Search in sources :

Example 1 with DataSetIterator

use of org.nd4j.linalg.dataset.api.iterator.DataSetIterator in project deeplearning4j by deeplearning4j.

the class CenterLossOutputLayerTest method testMNISTConfig.

@Test
//Should be run manually
@Ignore
public void testMNISTConfig() throws Exception {
    // Test batch size
    int batchSize = 64;
    DataSetIterator mnistTrain = new MnistDataSetIterator(batchSize, true, 12345);
    ComputationGraph net = getCNNMnistConfig();
    net.init();
    net.setListeners(new ScoreIterationListener(1));
    for (int i = 0; i < 50; i++) {
        net.fit(mnistTrain.next());
        Thread.sleep(1000);
    }
    Thread.sleep(100000);
}
Also used : MnistDataSetIterator(org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator) ComputationGraph(org.deeplearning4j.nn.graph.ComputationGraph) ScoreIterationListener(org.deeplearning4j.optimize.listeners.ScoreIterationListener) IrisDataSetIterator(org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator) DataSetIterator(org.nd4j.linalg.dataset.api.iterator.DataSetIterator) MnistDataSetIterator(org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator) Ignore(org.junit.Ignore) Test(org.junit.Test)

Example 2 with DataSetIterator

use of org.nd4j.linalg.dataset.api.iterator.DataSetIterator in project deeplearning4j by deeplearning4j.

the class DropoutLayerTest method testDropoutLayerWithConvMnist.

@Test
public void testDropoutLayerWithConvMnist() throws Exception {
    DataSetIterator iter = new MnistDataSetIterator(2, 2);
    DataSet next = iter.next();
    // Run without separate activation layer
    MultiLayerConfiguration confIntegrated = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1).seed(123).list().layer(0, new ConvolutionLayer.Builder(4, 4).stride(2, 2).nIn(1).nOut(20).activation(Activation.RELU).weightInit(WeightInit.XAVIER).build()).layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).dropOut(0.25).nOut(10).build()).backprop(true).pretrain(false).setInputType(InputType.convolutionalFlat(28, 28, 1)).build();
    MultiLayerNetwork netIntegrated = new MultiLayerNetwork(confIntegrated);
    netIntegrated.init();
    netIntegrated.fit(next);
    // Run with separate activation layer
    MultiLayerConfiguration confSeparate = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1).seed(123).list().layer(0, new ConvolutionLayer.Builder(4, 4).stride(2, 2).nIn(1).nOut(20).activation(Activation.RELU).weightInit(WeightInit.XAVIER).build()).layer(1, new DropoutLayer.Builder(0.25).build()).layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).nOut(10).build()).backprop(true).pretrain(false).setInputType(InputType.convolutionalFlat(28, 28, 1)).build();
    MultiLayerNetwork netSeparate = new MultiLayerNetwork(confSeparate);
    netSeparate.init();
    netSeparate.fit(next);
    // check parameters
    assertEquals(netIntegrated.getLayer(0).getParam("W"), netSeparate.getLayer(0).getParam("W"));
    assertEquals(netIntegrated.getLayer(0).getParam("b"), netSeparate.getLayer(0).getParam("b"));
    assertEquals(netIntegrated.getLayer(1).getParam("W"), netSeparate.getLayer(2).getParam("W"));
    assertEquals(netIntegrated.getLayer(1).getParam("b"), netSeparate.getLayer(2).getParam("b"));
    // check activations
    netIntegrated.setInput(next.getFeatureMatrix());
    netSeparate.setInput(next.getFeatureMatrix());
    Nd4j.getRandom().setSeed(12345);
    List<INDArray> actTrainIntegrated = netIntegrated.feedForward(true);
    Nd4j.getRandom().setSeed(12345);
    List<INDArray> actTrainSeparate = netSeparate.feedForward(true);
    assertEquals(actTrainIntegrated.get(1), actTrainSeparate.get(1));
    assertEquals(actTrainIntegrated.get(2), actTrainSeparate.get(3));
    Nd4j.getRandom().setSeed(12345);
    List<INDArray> actTestIntegrated = netIntegrated.feedForward(false);
    Nd4j.getRandom().setSeed(12345);
    List<INDArray> actTestSeparate = netSeparate.feedForward(false);
    assertEquals(actTestIntegrated.get(1), actTrainSeparate.get(1));
    assertEquals(actTestIntegrated.get(2), actTestSeparate.get(3));
}
Also used : OutputLayer(org.deeplearning4j.nn.conf.layers.OutputLayer) MnistDataSetIterator(org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator) DataSet(org.nd4j.linalg.dataset.DataSet) DropoutLayer(org.deeplearning4j.nn.conf.layers.DropoutLayer) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) ConvolutionLayer(org.deeplearning4j.nn.conf.layers.ConvolutionLayer) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) INDArray(org.nd4j.linalg.api.ndarray.INDArray) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) DataSetIterator(org.nd4j.linalg.dataset.api.iterator.DataSetIterator) MnistDataSetIterator(org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator) Test(org.junit.Test)

Example 3 with DataSetIterator

use of org.nd4j.linalg.dataset.api.iterator.DataSetIterator in project deeplearning4j by deeplearning4j.

the class BatchNormalizationTest method checkMeanVarianceEstimateCNN.

@Test
public void checkMeanVarianceEstimateCNN() throws Exception {
    Nd4j.getRandom().setSeed(12345);
    //Check that the internal global mean/variance estimate is approximately correct
    //First, Mnist data as 2d input (NOT taking into account convolution property)
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1).updater(Updater.RMSPROP).seed(12345).list().layer(0, new BatchNormalization.Builder().nIn(3).nOut(3).eps(1e-5).decay(0.95).build()).layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MSE).weightInit(WeightInit.XAVIER).activation(Activation.IDENTITY).nOut(10).build()).backprop(true).pretrain(false).setInputType(InputType.convolutional(5, 5, 3)).build();
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();
    int minibatch = 32;
    List<DataSet> list = new ArrayList<>();
    for (int i = 0; i < 100; i++) {
        list.add(new DataSet(Nd4j.rand(new int[] { minibatch, 3, 5, 5 }), Nd4j.rand(minibatch, 10)));
    }
    DataSetIterator iter = new ListDataSetIterator(list);
    INDArray expMean = Nd4j.valueArrayOf(new int[] { 1, 3 }, 0.5);
    //Expected variance of U(0,1) distribution: 1/12 * (1-0)^2 = 0.0833
    INDArray expVar = Nd4j.valueArrayOf(new int[] { 1, 3 }, 1 / 12.0);
    for (int i = 0; i < 10; i++) {
        iter.reset();
        net.fit(iter);
    }
    INDArray estMean = net.getLayer(0).getParam(BatchNormalizationParamInitializer.GLOBAL_MEAN);
    INDArray estVar = net.getLayer(0).getParam(BatchNormalizationParamInitializer.GLOBAL_VAR);
    float[] fMeanExp = expMean.data().asFloat();
    float[] fMeanAct = estMean.data().asFloat();
    float[] fVarExp = expVar.data().asFloat();
    float[] fVarAct = estVar.data().asFloat();
    //        System.out.println("Mean vs. estimated mean:");
    //        System.out.println(Arrays.toString(fMeanExp));
    //        System.out.println(Arrays.toString(fMeanAct));
    //
    //        System.out.println("Var vs. estimated var:");
    //        System.out.println(Arrays.toString(fVarExp));
    //        System.out.println(Arrays.toString(fVarAct));
    assertArrayEquals(fMeanExp, fMeanAct, 0.01f);
    assertArrayEquals(fVarExp, fVarAct, 0.01f);
}
Also used : ListDataSetIterator(org.deeplearning4j.datasets.iterator.impl.ListDataSetIterator) DataSet(org.nd4j.linalg.dataset.DataSet) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) BatchNormalization(org.deeplearning4j.nn.conf.layers.BatchNormalization) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) INDArray(org.nd4j.linalg.api.ndarray.INDArray) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) DataSetIterator(org.nd4j.linalg.dataset.api.iterator.DataSetIterator) MnistDataSetIterator(org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator) ListDataSetIterator(org.deeplearning4j.datasets.iterator.impl.ListDataSetIterator) Test(org.junit.Test)

Example 4 with DataSetIterator

use of org.nd4j.linalg.dataset.api.iterator.DataSetIterator in project deeplearning4j by deeplearning4j.

the class BatchNormalizationTest method checkSerialization.

@Test
public void checkSerialization() throws Exception {
    //Serialize the batch norm network (after training), and make sure we get same activations out as before
    // i.e., make sure state is properly stored
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(2).seed(12345).list().layer(0, new ConvolutionLayer.Builder().nIn(1).nOut(6).weightInit(WeightInit.XAVIER).activation(Activation.IDENTITY).build()).layer(1, new BatchNormalization.Builder().build()).layer(2, new ActivationLayer.Builder().activation(Activation.LEAKYRELU).build()).layer(3, new DenseLayer.Builder().nOut(10).activation(Activation.LEAKYRELU).build()).layer(4, new BatchNormalization.Builder().build()).layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).nOut(10).build()).backprop(true).pretrain(false).setInputType(InputType.convolutionalFlat(28, 28, 1)).build();
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();
    DataSetIterator iter = new MnistDataSetIterator(16, true, 12345);
    for (int i = 0; i < 20; i++) {
        net.fit(iter.next());
    }
    INDArray in = iter.next().getFeatureMatrix();
    INDArray out = net.output(in, false);
    INDArray out2 = net.output(in, false);
    assertEquals(out, out2);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    ModelSerializer.writeModel(net, baos, true);
    baos.close();
    byte[] bArr = baos.toByteArray();
    ByteArrayInputStream bais = new ByteArrayInputStream(bArr);
    MultiLayerNetwork net2 = ModelSerializer.restoreMultiLayerNetwork(bais, true);
    INDArray outDeser = net2.output(in, false);
    assertEquals(out, outDeser);
}
Also used : MnistDataSetIterator(org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator) ByteArrayOutputStream(java.io.ByteArrayOutputStream) BatchNormalization(org.deeplearning4j.nn.conf.layers.BatchNormalization) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) INDArray(org.nd4j.linalg.api.ndarray.INDArray) ByteArrayInputStream(java.io.ByteArrayInputStream) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) DataSetIterator(org.nd4j.linalg.dataset.api.iterator.DataSetIterator) MnistDataSetIterator(org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator) ListDataSetIterator(org.deeplearning4j.datasets.iterator.impl.ListDataSetIterator) Test(org.junit.Test)

Example 5 with DataSetIterator

use of org.nd4j.linalg.dataset.api.iterator.DataSetIterator in project deeplearning4j by deeplearning4j.

the class BatchNormalizationTest method checkMeanVarianceEstimate.

@Test
public void checkMeanVarianceEstimate() throws Exception {
    Nd4j.getRandom().setSeed(12345);
    //Check that the internal global mean/variance estimate is approximately correct
    //First, Mnist data as 2d input (NOT taking into account convolution property)
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1).updater(Updater.RMSPROP).seed(12345).list().layer(0, new BatchNormalization.Builder().nIn(10).nOut(10).eps(1e-5).decay(0.95).build()).layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MSE).weightInit(WeightInit.XAVIER).activation(Activation.IDENTITY).nIn(10).nOut(10).build()).backprop(true).pretrain(false).build();
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();
    int minibatch = 32;
    List<DataSet> list = new ArrayList<>();
    for (int i = 0; i < 200; i++) {
        list.add(new DataSet(Nd4j.rand(minibatch, 10), Nd4j.rand(minibatch, 10)));
    }
    DataSetIterator iter = new ListDataSetIterator(list);
    INDArray expMean = Nd4j.valueArrayOf(new int[] { 1, 10 }, 0.5);
    //Expected variance of U(0,1) distribution: 1/12 * (1-0)^2 = 0.0833
    INDArray expVar = Nd4j.valueArrayOf(new int[] { 1, 10 }, 1 / 12.0);
    for (int i = 0; i < 10; i++) {
        iter.reset();
        net.fit(iter);
    }
    INDArray estMean = net.getLayer(0).getParam(BatchNormalizationParamInitializer.GLOBAL_MEAN);
    INDArray estVar = net.getLayer(0).getParam(BatchNormalizationParamInitializer.GLOBAL_VAR);
    float[] fMeanExp = expMean.data().asFloat();
    float[] fMeanAct = estMean.data().asFloat();
    float[] fVarExp = expVar.data().asFloat();
    float[] fVarAct = estVar.data().asFloat();
    //        System.out.println("Mean vs. estimated mean:");
    //        System.out.println(Arrays.toString(fMeanExp));
    //        System.out.println(Arrays.toString(fMeanAct));
    //
    //        System.out.println("Var vs. estimated var:");
    //        System.out.println(Arrays.toString(fVarExp));
    //        System.out.println(Arrays.toString(fVarAct));
    assertArrayEquals(fMeanExp, fMeanAct, 0.02f);
    assertArrayEquals(fVarExp, fVarAct, 0.02f);
}
Also used : ListDataSetIterator(org.deeplearning4j.datasets.iterator.impl.ListDataSetIterator) DataSet(org.nd4j.linalg.dataset.DataSet) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) INDArray(org.nd4j.linalg.api.ndarray.INDArray) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) DataSetIterator(org.nd4j.linalg.dataset.api.iterator.DataSetIterator) MnistDataSetIterator(org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator) ListDataSetIterator(org.deeplearning4j.datasets.iterator.impl.ListDataSetIterator) Test(org.junit.Test)

Aggregations

DataSetIterator (org.nd4j.linalg.dataset.api.iterator.DataSetIterator)147 Test (org.junit.Test)133 IrisDataSetIterator (org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator)90 DataSet (org.nd4j.linalg.dataset.DataSet)79 MultiLayerNetwork (org.deeplearning4j.nn.multilayer.MultiLayerNetwork)70 MultiLayerConfiguration (org.deeplearning4j.nn.conf.MultiLayerConfiguration)63 INDArray (org.nd4j.linalg.api.ndarray.INDArray)61 MnistDataSetIterator (org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator)53 NeuralNetConfiguration (org.deeplearning4j.nn.conf.NeuralNetConfiguration)49 ScoreIterationListener (org.deeplearning4j.optimize.listeners.ScoreIterationListener)43 OutputLayer (org.deeplearning4j.nn.conf.layers.OutputLayer)30 DenseLayer (org.deeplearning4j.nn.conf.layers.DenseLayer)24 MultiDataSetIterator (org.nd4j.linalg.dataset.api.iterator.MultiDataSetIterator)21 ComputationGraphConfiguration (org.deeplearning4j.nn.conf.ComputationGraphConfiguration)19 InMemoryModelSaver (org.deeplearning4j.earlystopping.saver.InMemoryModelSaver)17 MaxEpochsTerminationCondition (org.deeplearning4j.earlystopping.termination.MaxEpochsTerminationCondition)17 ComputationGraph (org.deeplearning4j.nn.graph.ComputationGraph)17 ListDataSetIterator (org.deeplearning4j.datasets.iterator.impl.ListDataSetIterator)16 BaseSparkTest (org.deeplearning4j.spark.BaseSparkTest)16 MaxTimeIterationTerminationCondition (org.deeplearning4j.earlystopping.termination.MaxTimeIterationTerminationCondition)14