use of org.nd4j.linalg.lossfunctions.impl.LossMSE in project deeplearning4j by deeplearning4j.
the class MultiLayerConfiguration method fromJson.
/**
* Create a neural net configuration from json
* @param json the neural net configuration from json
* @return {@link MultiLayerConfiguration}
*/
public static MultiLayerConfiguration fromJson(String json) {
MultiLayerConfiguration conf;
ObjectMapper mapper = NeuralNetConfiguration.mapper();
try {
conf = mapper.readValue(json, MultiLayerConfiguration.class);
} catch (IOException e) {
throw new RuntimeException(e);
}
//To maintain backward compatibility after loss function refactoring (configs generated with v0.5.0 or earlier)
// Previously: enumeration used for loss functions. Now: use classes
// IN the past, could have only been an OutputLayer or RnnOutputLayer using these enums
int layerCount = 0;
JsonNode confs = null;
for (NeuralNetConfiguration nnc : conf.getConfs()) {
Layer l = nnc.getLayer();
if (l instanceof BaseOutputLayer && ((BaseOutputLayer) l).getLossFn() == null) {
//lossFn field null -> may be an old config format, with lossFunction field being for the enum
//if so, try walking the JSON graph to extract out the appropriate enum value
BaseOutputLayer ol = (BaseOutputLayer) l;
try {
JsonNode jsonNode = mapper.readTree(json);
if (confs == null) {
confs = jsonNode.get("confs");
}
if (confs instanceof ArrayNode) {
ArrayNode layerConfs = (ArrayNode) confs;
JsonNode outputLayerNNCNode = layerConfs.get(layerCount);
if (outputLayerNNCNode == null)
//Should never happen...
return conf;
JsonNode outputLayerNode = outputLayerNNCNode.get("layer");
JsonNode lossFunctionNode = null;
if (outputLayerNode.has("output")) {
lossFunctionNode = outputLayerNode.get("output").get("lossFunction");
} else if (outputLayerNode.has("rnnoutput")) {
lossFunctionNode = outputLayerNode.get("rnnoutput").get("lossFunction");
}
if (lossFunctionNode != null) {
String lossFunctionEnumStr = lossFunctionNode.asText();
LossFunctions.LossFunction lossFunction = null;
try {
lossFunction = LossFunctions.LossFunction.valueOf(lossFunctionEnumStr);
} catch (Exception e) {
log.warn("OutputLayer with null LossFunction or pre-0.6.0 loss function configuration detected: could not parse JSON", e);
}
if (lossFunction != null) {
switch(lossFunction) {
case MSE:
ol.setLossFn(new LossMSE());
break;
case XENT:
ol.setLossFn(new LossBinaryXENT());
break;
case NEGATIVELOGLIKELIHOOD:
ol.setLossFn(new LossNegativeLogLikelihood());
break;
case MCXENT:
ol.setLossFn(new LossMCXENT());
break;
//Remaining: TODO
case EXPLL:
case RMSE_XENT:
case SQUARED_LOSS:
case RECONSTRUCTION_CROSSENTROPY:
case CUSTOM:
default:
log.warn("OutputLayer with null LossFunction or pre-0.6.0 loss function configuration detected: could not set loss function for {}", lossFunction);
break;
}
}
}
} else {
log.warn("OutputLayer with null LossFunction or pre-0.6.0 loss function configuration detected: could not parse JSON: layer 'confs' field is not an ArrayNode (is: {})", (confs != null ? confs.getClass() : null));
}
} catch (IOException e) {
log.warn("OutputLayer with null LossFunction or pre-0.6.0 loss function configuration detected: could not parse JSON", e);
break;
}
}
//Try to load the old format if necessary, and create the appropriate IActivation instance
if (l.getActivationFn() == null) {
try {
JsonNode jsonNode = mapper.readTree(json);
if (confs == null) {
confs = jsonNode.get("confs");
}
if (confs instanceof ArrayNode) {
ArrayNode layerConfs = (ArrayNode) confs;
JsonNode outputLayerNNCNode = layerConfs.get(layerCount);
if (outputLayerNNCNode == null)
//Should never happen...
return conf;
JsonNode layerWrapperNode = outputLayerNNCNode.get("layer");
if (layerWrapperNode == null || layerWrapperNode.size() != 1) {
continue;
}
JsonNode layerNode = layerWrapperNode.elements().next();
//Should only have 1 element: "dense", "output", etc
JsonNode activationFunction = layerNode.get("activationFunction");
if (activationFunction != null) {
IActivation ia = Activation.fromString(activationFunction.asText()).getActivationFunction();
l.setActivationFn(ia);
}
}
} catch (IOException e) {
log.warn("Layer with null ActivationFn field or pre-0.7.2 activation function detected: could not parse JSON", e);
}
}
layerCount++;
}
return conf;
}
use of org.nd4j.linalg.lossfunctions.impl.LossMSE in project deeplearning4j by deeplearning4j.
the class RegressionTest060 method regressionTestMLP2.
@Test
public void regressionTestMLP2() throws Exception {
File f = new ClassPathResource("regression_testing/060/060_ModelSerializer_Regression_MLP_2.zip").getTempFileFromArchive();
MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);
MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
assertEquals(2, conf.getConfs().size());
assertTrue(conf.isBackprop());
assertFalse(conf.isPretrain());
DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer();
assertTrue(l0.getActivationFn() instanceof ActivationLReLU);
assertEquals(3, l0.getNIn());
assertEquals(4, l0.getNOut());
assertEquals(WeightInit.DISTRIBUTION, l0.getWeightInit());
assertEquals(new NormalDistribution(0.1, 1.2), l0.getDist());
assertEquals(Updater.RMSPROP, l0.getUpdater());
assertEquals(0.96, l0.getRmsDecay(), 1e-6);
assertEquals(0.15, l0.getLearningRate(), 1e-6);
assertEquals(0.6, l0.getDropOut(), 1e-6);
assertEquals(0.1, l0.getL1(), 1e-6);
assertEquals(0.2, l0.getL2(), 1e-6);
assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l0.getGradientNormalization());
assertEquals(1.5, l0.getGradientNormalizationThreshold(), 1e-5);
OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer();
assertEquals("identity", l1.getActivationFn().toString());
assertEquals(LossFunctions.LossFunction.MSE, l1.getLossFunction());
assertTrue(l1.getLossFn() instanceof LossMSE);
assertEquals(4, l1.getNIn());
assertEquals(5, l1.getNOut());
assertEquals(WeightInit.DISTRIBUTION, l0.getWeightInit());
assertEquals(new NormalDistribution(0.1, 1.2), l0.getDist());
assertEquals(Updater.RMSPROP, l0.getUpdater());
assertEquals(0.96, l1.getRmsDecay(), 1e-6);
assertEquals(0.15, l1.getLearningRate(), 1e-6);
assertEquals(0.6, l1.getDropOut(), 1e-6);
assertEquals(0.1, l1.getL1(), 1e-6);
assertEquals(0.2, l1.getL2(), 1e-6);
assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l1.getGradientNormalization());
assertEquals(1.5, l1.getGradientNormalizationThreshold(), 1e-5);
int numParams = net.numParams();
assertEquals(Nd4j.linspace(1, numParams, numParams), net.params());
int updaterSize = net.getUpdater().stateSizeForLayer(net);
assertEquals(Nd4j.linspace(1, updaterSize, updaterSize), net.getUpdater().getStateViewArray());
}
use of org.nd4j.linalg.lossfunctions.impl.LossMSE in project deeplearning4j by deeplearning4j.
the class VaeGradientCheckTests method testVaePretrainReconstructionDistributions.
@Test
public void testVaePretrainReconstructionDistributions() {
int inOutSize = 6;
ReconstructionDistribution[] reconstructionDistributions = new ReconstructionDistribution[] { new GaussianReconstructionDistribution(Activation.IDENTITY), new GaussianReconstructionDistribution(Activation.TANH), new BernoulliReconstructionDistribution(Activation.SIGMOID), new CompositeReconstructionDistribution.Builder().addDistribution(2, new GaussianReconstructionDistribution(Activation.IDENTITY)).addDistribution(2, new BernoulliReconstructionDistribution()).addDistribution(2, new GaussianReconstructionDistribution(Activation.TANH)).build(), new ExponentialReconstructionDistribution("identity"), new ExponentialReconstructionDistribution("tanh"), new LossFunctionWrapper(new ActivationTanH(), new LossMSE()), new LossFunctionWrapper(new ActivationIdentity(), new LossMAE()) };
Nd4j.getRandom().setSeed(12345);
for (int minibatch : new int[] { 1, 5 }) {
for (int i = 0; i < reconstructionDistributions.length; i++) {
INDArray data;
switch(i) {
//Gaussian + identity
case 0:
case //Gaussian + tanh
1:
data = Nd4j.rand(minibatch, inOutSize);
break;
case //Bernoulli
2:
data = Nd4j.create(minibatch, inOutSize);
Nd4j.getExecutioner().exec(new BernoulliDistribution(data, 0.5), Nd4j.getRandom());
break;
case //Composite
3:
data = Nd4j.create(minibatch, inOutSize);
data.get(NDArrayIndex.all(), NDArrayIndex.interval(0, 2)).assign(Nd4j.rand(minibatch, 2));
Nd4j.getExecutioner().exec(new BernoulliDistribution(data.get(NDArrayIndex.all(), NDArrayIndex.interval(2, 4)), 0.5), Nd4j.getRandom());
data.get(NDArrayIndex.all(), NDArrayIndex.interval(4, 6)).assign(Nd4j.rand(minibatch, 2));
break;
case 4:
case 5:
data = Nd4j.rand(minibatch, inOutSize);
break;
case 6:
case 7:
data = Nd4j.randn(minibatch, inOutSize);
break;
default:
throw new RuntimeException();
}
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().regularization(true).l2(0.2).l1(0.3).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).learningRate(1.0).seed(12345L).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1)).list().layer(0, new VariationalAutoencoder.Builder().nIn(inOutSize).nOut(3).encoderLayerSizes(5).decoderLayerSizes(6).pzxActivationFunction(Activation.TANH).reconstructionDistribution(reconstructionDistributions[i]).activation(Activation.TANH).updater(Updater.SGD).build()).pretrain(true).backprop(false).build();
MultiLayerNetwork mln = new MultiLayerNetwork(conf);
mln.init();
mln.initGradientsView();
org.deeplearning4j.nn.api.Layer layer = mln.getLayer(0);
String msg = "testVaePretrainReconstructionDistributions() - " + reconstructionDistributions[i];
if (PRINT_RESULTS) {
System.out.println(msg);
for (int j = 0; j < mln.getnLayers(); j++) System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams());
}
boolean gradOK = GradientCheckUtil.checkGradientsPretrainLayer(layer, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, data, 12345);
assertTrue(msg, gradOK);
}
}
}
use of org.nd4j.linalg.lossfunctions.impl.LossMSE in project deeplearning4j by deeplearning4j.
the class TestVAE method testReconstructionErrorSimple.
@Test
public void testReconstructionErrorSimple() {
int inOutSize = 6;
ReconstructionDistribution[] reconstructionDistributions = new ReconstructionDistribution[] { new LossFunctionWrapper(Activation.TANH, new LossMSE()), new LossFunctionWrapper(Activation.IDENTITY, new LossMAE()), new CompositeReconstructionDistribution.Builder().addDistribution(3, new LossFunctionWrapper(Activation.TANH, new LossMSE())).addDistribution(3, new LossFunctionWrapper(Activation.IDENTITY, new LossMAE())).build() };
Nd4j.getRandom().setSeed(12345);
for (int minibatch : new int[] { 1, 5 }) {
for (int i = 0; i < reconstructionDistributions.length; i++) {
INDArray data = Nd4j.rand(minibatch, inOutSize).muli(2).subi(1);
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().regularization(true).l2(0.2).l1(0.3).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).learningRate(1.0).seed(12345L).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1)).list().layer(0, new VariationalAutoencoder.Builder().nIn(inOutSize).nOut(3).encoderLayerSizes(5).decoderLayerSizes(6).pzxActivationFunction(Activation.TANH).reconstructionDistribution(reconstructionDistributions[i]).activation(new ActivationTanH()).updater(Updater.SGD).build()).pretrain(true).backprop(false).build();
MultiLayerNetwork mln = new MultiLayerNetwork(conf);
mln.init();
mln.initGradientsView();
mln.fit(data);
org.deeplearning4j.nn.layers.variational.VariationalAutoencoder layer = (org.deeplearning4j.nn.layers.variational.VariationalAutoencoder) mln.getLayer(0);
assertTrue(layer.hasLossFunction());
Nd4j.getRandom().setSeed(12345);
INDArray reconstructionError = layer.reconstructionError(data);
assertArrayEquals(new int[] { minibatch, 1 }, reconstructionError.shape());
for (int j = 0; j < minibatch; j++) {
double re = reconstructionError.getDouble(j);
assertTrue(re >= 0.0);
}
}
}
}
use of org.nd4j.linalg.lossfunctions.impl.LossMSE in project deeplearning4j by deeplearning4j.
the class RegressionTest071 method regressionTestMLP2.
@Test
public void regressionTestMLP2() throws Exception {
File f = new ClassPathResource("regression_testing/071/071_ModelSerializer_Regression_MLP_2.zip").getTempFileFromArchive();
MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);
MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
assertEquals(2, conf.getConfs().size());
assertTrue(conf.isBackprop());
assertFalse(conf.isPretrain());
DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer();
assertTrue(l0.getActivationFn() instanceof ActivationLReLU);
assertEquals(3, l0.getNIn());
assertEquals(4, l0.getNOut());
assertEquals(WeightInit.DISTRIBUTION, l0.getWeightInit());
assertEquals(new NormalDistribution(0.1, 1.2), l0.getDist());
assertEquals(Updater.RMSPROP, l0.getUpdater());
assertEquals(0.96, l0.getRmsDecay(), 1e-6);
assertEquals(0.15, l0.getLearningRate(), 1e-6);
assertEquals(0.6, l0.getDropOut(), 1e-6);
assertEquals(0.1, l0.getL1(), 1e-6);
assertEquals(0.2, l0.getL2(), 1e-6);
assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l0.getGradientNormalization());
assertEquals(1.5, l0.getGradientNormalizationThreshold(), 1e-5);
OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer();
assertTrue(l1.getActivationFn() instanceof ActivationIdentity);
assertEquals(LossFunctions.LossFunction.MSE, l1.getLossFunction());
assertTrue(l1.getLossFn() instanceof LossMSE);
assertEquals(4, l1.getNIn());
assertEquals(5, l1.getNOut());
assertEquals(WeightInit.DISTRIBUTION, l0.getWeightInit());
assertEquals(new NormalDistribution(0.1, 1.2), l0.getDist());
assertEquals(Updater.RMSPROP, l0.getUpdater());
assertEquals(0.96, l1.getRmsDecay(), 1e-6);
assertEquals(0.15, l1.getLearningRate(), 1e-6);
assertEquals(0.6, l1.getDropOut(), 1e-6);
assertEquals(0.1, l1.getL1(), 1e-6);
assertEquals(0.2, l1.getL2(), 1e-6);
assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l1.getGradientNormalization());
assertEquals(1.5, l1.getGradientNormalizationThreshold(), 1e-5);
int numParams = net.numParams();
assertEquals(Nd4j.linspace(1, numParams, numParams), net.params());
int updaterSize = net.getUpdater().stateSizeForLayer(net);
assertEquals(Nd4j.linspace(1, updaterSize, updaterSize), net.getUpdater().getStateViewArray());
}
Aggregations