Search in sources :

Example 76 with MultiLayerNetwork

use of org.deeplearning4j.nn.multilayer.MultiLayerNetwork in project deeplearning4j by deeplearning4j.

the class LayerConfigValidationTest method testLRPolicyMissingDecayRate.

@Test(expected = IllegalStateException.class)
public void testLRPolicyMissingDecayRate() {
    double lr = 2;
    double power = 3;
    int iterations = 1;
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().iterations(iterations).learningRate(lr).learningRateDecayPolicy(LearningRatePolicy.Inverse).lrPolicyPower(power).list().layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()).layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();
}
Also used : MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) Test(org.junit.Test)

Example 77 with MultiLayerNetwork

use of org.deeplearning4j.nn.multilayer.MultiLayerNetwork in project deeplearning4j by deeplearning4j.

the class LayerConfigValidationTest method testLRPolicyMissingSchedule.

@Test(expected = IllegalStateException.class)
public void testLRPolicyMissingSchedule() {
    double lr = 2;
    double lrDecayRate = 5;
    int iterations = 1;
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().iterations(iterations).learningRate(lr).learningRateDecayPolicy(LearningRatePolicy.Schedule).lrPolicyDecayRate(lrDecayRate).list().layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()).layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();
}
Also used : MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) Test(org.junit.Test)

Example 78 with MultiLayerNetwork

use of org.deeplearning4j.nn.multilayer.MultiLayerNetwork in project deeplearning4j by deeplearning4j.

the class LayerConfigValidationTest method testAdaDeltaValidation.

@Test
public void testAdaDeltaValidation() {
    // Warnings only thrown
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().rho(0.5).list().layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()).layer(1, new DenseLayer.Builder().nIn(2).nOut(2).rho(0.01).build()).build();
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();
}
Also used : MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) Test(org.junit.Test)

Example 79 with MultiLayerNetwork

use of org.deeplearning4j.nn.multilayer.MultiLayerNetwork in project deeplearning4j by deeplearning4j.

the class LayerConfigValidationTest method testPredefinedConfigValues.

@Test
public void testPredefinedConfigValues() {
    double expectedMomentum = 0.9;
    double expectedAdamMeanDecay = 0.9;
    double expectedAdamVarDecay = 0.999;
    double expectedRmsDecay = 0.95;
    Distribution expectedDist = new NormalDistribution(0, 1);
    double expectedL1 = 0.0;
    double expectedL2 = 0.0;
    // Nesterovs Updater
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().learningRate(0.3).updater(Updater.NESTEROVS).regularization(true).list().layer(0, new DenseLayer.Builder().nIn(2).nOut(2).l2(0.5).build()).layer(1, new DenseLayer.Builder().nIn(2).nOut(2).momentum(0.4).build()).build();
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();
    Layer layerConf = net.getLayer(0).conf().getLayer();
    assertEquals(expectedMomentum, layerConf.getMomentum(), 1e-3);
    assertEquals(expectedL1, layerConf.getL1(), 1e-3);
    assertEquals(0.5, layerConf.getL2(), 1e-3);
    Layer layerConf1 = net.getLayer(1).conf().getLayer();
    assertEquals(0.4, layerConf1.getMomentum(), 1e-3);
    // Adam Updater
    conf = new NeuralNetConfiguration.Builder().learningRate(0.3).updater(Updater.ADAM).regularization(true).weightInit(WeightInit.DISTRIBUTION).list().layer(0, new DenseLayer.Builder().nIn(2).nOut(2).l2(0.5).l1(0.3).build()).layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
    net = new MultiLayerNetwork(conf);
    net.init();
    layerConf = net.getLayer(0).conf().getLayer();
    assertEquals(0.3, layerConf.getL1(), 1e-3);
    assertEquals(0.5, layerConf.getL2(), 1e-3);
    layerConf1 = net.getLayer(1).conf().getLayer();
    assertEquals(expectedAdamMeanDecay, layerConf1.getAdamMeanDecay(), 1e-3);
    assertEquals(expectedAdamVarDecay, layerConf1.getAdamVarDecay(), 1e-3);
    assertEquals(expectedDist, layerConf1.getDist());
    // l1 & l2 local should still be set whether regularization true or false
    assertEquals(expectedL1, layerConf1.getL1(), 1e-3);
    assertEquals(expectedL2, layerConf1.getL2(), 1e-3);
    //RMSProp Updater
    conf = new NeuralNetConfiguration.Builder().learningRate(0.3).updater(Updater.RMSPROP).list().layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()).layer(1, new DenseLayer.Builder().nIn(2).nOut(2).rmsDecay(0.4).build()).build();
    net = new MultiLayerNetwork(conf);
    net.init();
    layerConf = net.getLayer(0).conf().getLayer();
    assertEquals(expectedRmsDecay, layerConf.getRmsDecay(), 1e-3);
    assertEquals(expectedL1, layerConf.getL1(), 1e-3);
    assertEquals(expectedL2, layerConf.getL2(), 1e-3);
    layerConf1 = net.getLayer(1).conf().getLayer();
    assertEquals(0.4, layerConf1.getRmsDecay(), 1e-3);
}
Also used : NormalDistribution(org.deeplearning4j.nn.conf.distribution.NormalDistribution) NormalDistribution(org.deeplearning4j.nn.conf.distribution.NormalDistribution) GaussianDistribution(org.deeplearning4j.nn.conf.distribution.GaussianDistribution) Distribution(org.deeplearning4j.nn.conf.distribution.Distribution) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) Test(org.junit.Test)

Example 80 with MultiLayerNetwork

use of org.deeplearning4j.nn.multilayer.MultiLayerNetwork in project deeplearning4j by deeplearning4j.

the class LayerConfigTest method testDropoutLayerwiseOverride.

@Test
public void testDropoutLayerwiseOverride() {
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().dropOut(1.0).list().layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()).layer(1, new DenseLayer.Builder().nIn(2).nOut(2).build()).build();
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();
    assertEquals(1.0, conf.getConf(0).getLayer().getDropOut(), 0.0);
    assertEquals(1.0, conf.getConf(1).getLayer().getDropOut(), 0.0);
    conf = new NeuralNetConfiguration.Builder().dropOut(1.0).list().layer(0, new DenseLayer.Builder().nIn(2).nOut(2).build()).layer(1, new DenseLayer.Builder().nIn(2).nOut(2).dropOut(2.0).build()).build();
    net = new MultiLayerNetwork(conf);
    net.init();
    assertEquals(1.0, conf.getConf(0).getLayer().getDropOut(), 0.0);
    assertEquals(2.0, conf.getConf(1).getLayer().getDropOut(), 0.0);
}
Also used : MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) Test(org.junit.Test)

Aggregations

MultiLayerNetwork (org.deeplearning4j.nn.multilayer.MultiLayerNetwork)326 Test (org.junit.Test)277 MultiLayerConfiguration (org.deeplearning4j.nn.conf.MultiLayerConfiguration)206 INDArray (org.nd4j.linalg.api.ndarray.INDArray)166 NeuralNetConfiguration (org.deeplearning4j.nn.conf.NeuralNetConfiguration)111 DataSet (org.nd4j.linalg.dataset.DataSet)91 DataSetIterator (org.nd4j.linalg.dataset.api.iterator.DataSetIterator)70 IrisDataSetIterator (org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator)49 NormalDistribution (org.deeplearning4j.nn.conf.distribution.NormalDistribution)43 ScoreIterationListener (org.deeplearning4j.optimize.listeners.ScoreIterationListener)41 OutputLayer (org.deeplearning4j.nn.conf.layers.OutputLayer)40 DenseLayer (org.deeplearning4j.nn.conf.layers.DenseLayer)38 Random (java.util.Random)34 MnistDataSetIterator (org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator)30 ConvolutionLayer (org.deeplearning4j.nn.conf.layers.ConvolutionLayer)28 DL4JException (org.deeplearning4j.exception.DL4JException)20 Layer (org.deeplearning4j.nn.api.Layer)20 ClassPathResource (org.nd4j.linalg.io.ClassPathResource)20 File (java.io.File)19 ComputationGraph (org.deeplearning4j.nn.graph.ComputationGraph)19