Search in sources :

Example 1 with ActivationLReLU

use of org.nd4j.linalg.activations.impl.ActivationLReLU in project deeplearning4j by deeplearning4j.

the class RegressionTest060 method regressionTestMLP2.

@Test
public void regressionTestMLP2() throws Exception {
    File f = new ClassPathResource("regression_testing/060/060_ModelSerializer_Regression_MLP_2.zip").getTempFileFromArchive();
    MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);
    MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
    assertEquals(2, conf.getConfs().size());
    assertTrue(conf.isBackprop());
    assertFalse(conf.isPretrain());
    DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer();
    assertTrue(l0.getActivationFn() instanceof ActivationLReLU);
    assertEquals(3, l0.getNIn());
    assertEquals(4, l0.getNOut());
    assertEquals(WeightInit.DISTRIBUTION, l0.getWeightInit());
    assertEquals(new NormalDistribution(0.1, 1.2), l0.getDist());
    assertEquals(Updater.RMSPROP, l0.getUpdater());
    assertEquals(0.96, l0.getRmsDecay(), 1e-6);
    assertEquals(0.15, l0.getLearningRate(), 1e-6);
    assertEquals(0.6, l0.getDropOut(), 1e-6);
    assertEquals(0.1, l0.getL1(), 1e-6);
    assertEquals(0.2, l0.getL2(), 1e-6);
    assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l0.getGradientNormalization());
    assertEquals(1.5, l0.getGradientNormalizationThreshold(), 1e-5);
    OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer();
    assertEquals("identity", l1.getActivationFn().toString());
    assertEquals(LossFunctions.LossFunction.MSE, l1.getLossFunction());
    assertTrue(l1.getLossFn() instanceof LossMSE);
    assertEquals(4, l1.getNIn());
    assertEquals(5, l1.getNOut());
    assertEquals(WeightInit.DISTRIBUTION, l0.getWeightInit());
    assertEquals(new NormalDistribution(0.1, 1.2), l0.getDist());
    assertEquals(Updater.RMSPROP, l0.getUpdater());
    assertEquals(0.96, l1.getRmsDecay(), 1e-6);
    assertEquals(0.15, l1.getLearningRate(), 1e-6);
    assertEquals(0.6, l1.getDropOut(), 1e-6);
    assertEquals(0.1, l1.getL1(), 1e-6);
    assertEquals(0.2, l1.getL2(), 1e-6);
    assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l1.getGradientNormalization());
    assertEquals(1.5, l1.getGradientNormalizationThreshold(), 1e-5);
    int numParams = net.numParams();
    assertEquals(Nd4j.linspace(1, numParams, numParams), net.params());
    int updaterSize = net.getUpdater().stateSizeForLayer(net);
    assertEquals(Nd4j.linspace(1, updaterSize, updaterSize), net.getUpdater().getStateViewArray());
}
Also used : LossMSE(org.nd4j.linalg.lossfunctions.impl.LossMSE) ActivationLReLU(org.nd4j.linalg.activations.impl.ActivationLReLU) NormalDistribution(org.deeplearning4j.nn.conf.distribution.NormalDistribution) File(java.io.File) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) ClassPathResource(org.nd4j.linalg.io.ClassPathResource) Test(org.junit.Test)

Example 2 with ActivationLReLU

use of org.nd4j.linalg.activations.impl.ActivationLReLU in project deeplearning4j by deeplearning4j.

the class RegressionTest071 method regressionTestMLP2.

@Test
public void regressionTestMLP2() throws Exception {
    File f = new ClassPathResource("regression_testing/071/071_ModelSerializer_Regression_MLP_2.zip").getTempFileFromArchive();
    MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);
    MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
    assertEquals(2, conf.getConfs().size());
    assertTrue(conf.isBackprop());
    assertFalse(conf.isPretrain());
    DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer();
    assertTrue(l0.getActivationFn() instanceof ActivationLReLU);
    assertEquals(3, l0.getNIn());
    assertEquals(4, l0.getNOut());
    assertEquals(WeightInit.DISTRIBUTION, l0.getWeightInit());
    assertEquals(new NormalDistribution(0.1, 1.2), l0.getDist());
    assertEquals(Updater.RMSPROP, l0.getUpdater());
    assertEquals(0.96, l0.getRmsDecay(), 1e-6);
    assertEquals(0.15, l0.getLearningRate(), 1e-6);
    assertEquals(0.6, l0.getDropOut(), 1e-6);
    assertEquals(0.1, l0.getL1(), 1e-6);
    assertEquals(0.2, l0.getL2(), 1e-6);
    assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l0.getGradientNormalization());
    assertEquals(1.5, l0.getGradientNormalizationThreshold(), 1e-5);
    OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer();
    assertTrue(l1.getActivationFn() instanceof ActivationIdentity);
    assertEquals(LossFunctions.LossFunction.MSE, l1.getLossFunction());
    assertTrue(l1.getLossFn() instanceof LossMSE);
    assertEquals(4, l1.getNIn());
    assertEquals(5, l1.getNOut());
    assertEquals(WeightInit.DISTRIBUTION, l0.getWeightInit());
    assertEquals(new NormalDistribution(0.1, 1.2), l0.getDist());
    assertEquals(Updater.RMSPROP, l0.getUpdater());
    assertEquals(0.96, l1.getRmsDecay(), 1e-6);
    assertEquals(0.15, l1.getLearningRate(), 1e-6);
    assertEquals(0.6, l1.getDropOut(), 1e-6);
    assertEquals(0.1, l1.getL1(), 1e-6);
    assertEquals(0.2, l1.getL2(), 1e-6);
    assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l1.getGradientNormalization());
    assertEquals(1.5, l1.getGradientNormalizationThreshold(), 1e-5);
    int numParams = net.numParams();
    assertEquals(Nd4j.linspace(1, numParams, numParams), net.params());
    int updaterSize = net.getUpdater().stateSizeForLayer(net);
    assertEquals(Nd4j.linspace(1, updaterSize, updaterSize), net.getUpdater().getStateViewArray());
}
Also used : LossMSE(org.nd4j.linalg.lossfunctions.impl.LossMSE) ActivationLReLU(org.nd4j.linalg.activations.impl.ActivationLReLU) NormalDistribution(org.deeplearning4j.nn.conf.distribution.NormalDistribution) ActivationIdentity(org.nd4j.linalg.activations.impl.ActivationIdentity) File(java.io.File) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) ClassPathResource(org.nd4j.linalg.io.ClassPathResource) Test(org.junit.Test)

Example 3 with ActivationLReLU

use of org.nd4j.linalg.activations.impl.ActivationLReLU in project deeplearning4j by deeplearning4j.

the class RegressionTest050 method regressionTestMLP2.

@Test
public void regressionTestMLP2() throws Exception {
    File f = new ClassPathResource("regression_testing/050/050_ModelSerializer_Regression_MLP_2.zip").getTempFileFromArchive();
    MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);
    MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
    assertEquals(2, conf.getConfs().size());
    assertTrue(conf.isBackprop());
    assertFalse(conf.isPretrain());
    DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer();
    assertTrue(l0.getActivationFn() instanceof ActivationLReLU);
    assertEquals(3, l0.getNIn());
    assertEquals(4, l0.getNOut());
    assertEquals(WeightInit.DISTRIBUTION, l0.getWeightInit());
    assertEquals(new NormalDistribution(0.1, 1.2), l0.getDist());
    assertEquals(Updater.RMSPROP, l0.getUpdater());
    assertEquals(0.96, l0.getRmsDecay(), 1e-6);
    assertEquals(0.15, l0.getLearningRate(), 1e-6);
    assertEquals(0.6, l0.getDropOut(), 1e-6);
    assertEquals(0.1, l0.getL1(), 1e-6);
    assertEquals(0.2, l0.getL2(), 1e-6);
    OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer();
    assertEquals("identity", l1.getActivationFn().toString());
    assertEquals(LossFunctions.LossFunction.MSE, l1.getLossFunction());
    assertTrue(l1.getLossFn() instanceof LossMSE);
    assertEquals(4, l1.getNIn());
    assertEquals(5, l1.getNOut());
    assertEquals(WeightInit.DISTRIBUTION, l0.getWeightInit());
    assertEquals(new NormalDistribution(0.1, 1.2), l0.getDist());
    assertEquals(Updater.RMSPROP, l0.getUpdater());
    assertEquals(0.96, l1.getRmsDecay(), 1e-6);
    assertEquals(0.15, l1.getLearningRate(), 1e-6);
    assertEquals(0.6, l1.getDropOut(), 1e-6);
    assertEquals(0.1, l1.getL1(), 1e-6);
    assertEquals(0.2, l1.getL2(), 1e-6);
    int numParams = net.numParams();
    assertEquals(Nd4j.linspace(1, numParams, numParams), net.params());
    int updaterSize = net.getUpdater().stateSizeForLayer(net);
    assertEquals(Nd4j.linspace(1, updaterSize, updaterSize), net.getUpdater().getStateViewArray());
}
Also used : LossMSE(org.nd4j.linalg.lossfunctions.impl.LossMSE) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) ActivationLReLU(org.nd4j.linalg.activations.impl.ActivationLReLU) NormalDistribution(org.deeplearning4j.nn.conf.distribution.NormalDistribution) File(java.io.File) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) ClassPathResource(org.nd4j.linalg.io.ClassPathResource) Test(org.junit.Test)

Aggregations

File (java.io.File)3 NormalDistribution (org.deeplearning4j.nn.conf.distribution.NormalDistribution)3 MultiLayerNetwork (org.deeplearning4j.nn.multilayer.MultiLayerNetwork)3 Test (org.junit.Test)3 ActivationLReLU (org.nd4j.linalg.activations.impl.ActivationLReLU)3 ClassPathResource (org.nd4j.linalg.io.ClassPathResource)3 LossMSE (org.nd4j.linalg.lossfunctions.impl.LossMSE)3 MultiLayerConfiguration (org.deeplearning4j.nn.conf.MultiLayerConfiguration)1 ActivationIdentity (org.nd4j.linalg.activations.impl.ActivationIdentity)1