Search in sources :

Example 16 with MnistDataSetIterator

use of org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator in project deeplearning4j by deeplearning4j.

the class ParallelWrapperMainTest method runParallelWrapperMain.

@Test
public void runParallelWrapperMain() throws Exception {
    int nChannels = 1;
    int outputNum = 10;
    // for GPU you usually want to have higher batchSize
    int batchSize = 128;
    int nEpochs = 10;
    int iterations = 1;
    int seed = 123;
    int uiPort = new Random().nextInt(1000) + 9000;
    System.setProperty("org.deeplearning4j.ui.port", String.valueOf(uiPort));
    log.info("Load data....");
    DataSetIterator mnistTrain = new MnistDataSetIterator(batchSize, true, 12345);
    DataSetIterator mnistTest = new MnistDataSetIterator(batchSize, false, 12345);
    log.info("Build model....");
    MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed).iterations(iterations).regularization(true).l2(0.0005).learningRate(//.biasLearningRate(0.02)
    0.01).weightInit(WeightInit.XAVIER).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(Updater.NESTEROVS).momentum(0.9).list().layer(0, new ConvolutionLayer.Builder(5, 5).nIn(nChannels).stride(1, 1).nOut(20).activation(Activation.IDENTITY).build()).layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2).stride(2, 2).build()).layer(2, new ConvolutionLayer.Builder(5, 5).stride(1, 1).nOut(50).activation(Activation.IDENTITY).build()).layer(3, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2).stride(2, 2).build()).layer(4, new DenseLayer.Builder().activation(Activation.RELU).nOut(500).build()).layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(outputNum).activation(Activation.SOFTMAX).build()).backprop(true).pretrain(false);
    // The builder needs the dimensions of the image along with the number of channels. these are 28x28 images in one channel
    new ConvolutionLayerSetup(builder, 28, 28, 1);
    MultiLayerConfiguration conf = builder.build();
    MultiLayerNetwork model = new MultiLayerNetwork(conf);
    model.init();
    File tempModel = new File("tmpmodel.zip");
    tempModel.deleteOnExit();
    ModelSerializer.writeModel(model, tempModel, false);
    File tmp = new File("tmpmodel.bin");
    tmp.deleteOnExit();
    ParallelWrapperMain parallelWrapperMain = new ParallelWrapperMain();
    parallelWrapperMain.runMain(new String[] { "--modelPath", tempModel.getAbsolutePath(), "--dataSetIteratorFactoryClazz", MnistDataSetIteratorProviderFactory.class.getName(), "--modelOutputPath", tmp.getAbsolutePath(), "--uiUrl", "localhost:" + uiPort });
    Thread.sleep(30000);
}
Also used : MnistDataSetIterator(org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator) ConvolutionLayer(org.deeplearning4j.nn.conf.layers.ConvolutionLayer) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) Random(java.util.Random) DenseLayer(org.deeplearning4j.nn.conf.layers.DenseLayer) ConvolutionLayerSetup(org.deeplearning4j.nn.conf.layers.setup.ConvolutionLayerSetup) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) File(java.io.File) DataSetIterator(org.nd4j.linalg.dataset.api.iterator.DataSetIterator) MnistDataSetIterator(org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator) Test(org.junit.Test)

Example 17 with MnistDataSetIterator

use of org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator in project deeplearning4j by deeplearning4j.

the class ParameterServerParallelWrapperTest method testWrapper.

@Test
public void testWrapper() throws Exception {
    int nChannels = 1;
    int outputNum = 10;
    // for GPU you usually want to have higher batchSize
    int batchSize = 128;
    int nEpochs = 10;
    int iterations = 1;
    int seed = 123;
    log.info("Load data....");
    DataSetIterator mnistTrain = new MnistDataSetIterator(batchSize, 1000);
    DataSetIterator mnistTest = new MnistDataSetIterator(batchSize, false, 12345);
    log.info("Build model....");
    MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed).iterations(iterations).regularization(true).l2(0.0005).learningRate(//.biasLearningRate(0.02)
    0.01).weightInit(WeightInit.XAVIER).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(Updater.NESTEROVS).momentum(0.9).list().layer(0, new ConvolutionLayer.Builder(5, 5).nIn(nChannels).stride(1, 1).nOut(20).activation(Activation.IDENTITY).build()).layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2).stride(2, 2).build()).layer(2, new ConvolutionLayer.Builder(5, 5).stride(1, 1).nOut(50).activation(Activation.IDENTITY).build()).layer(3, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2).stride(2, 2).build()).layer(4, new DenseLayer.Builder().activation(Activation.RELU).nOut(500).build()).layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(outputNum).activation(Activation.SOFTMAX).build()).setInputType(InputType.convolutionalFlat(28, 28, 1)).backprop(true).pretrain(false);
    MultiLayerConfiguration conf = builder.build();
    MultiLayerNetwork model = new MultiLayerNetwork(conf);
    model.init();
    ParameterServerParallelWrapper parameterServerParallelWrapper = ParameterServerParallelWrapper.builder().model(model).multiLayerNetwork(model).numEpochs(10).numWorkers(Runtime.getRuntime().availableProcessors()).statusServerPort(33000).preFetchSize(3).build();
    parameterServerParallelWrapper.fit(mnistTrain);
    parameterServerParallelWrapper.close();
    Thread.sleep(30000);
}
Also used : OutputLayer(org.deeplearning4j.nn.conf.layers.OutputLayer) MnistDataSetIterator(org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator) SubsamplingLayer(org.deeplearning4j.nn.conf.layers.SubsamplingLayer) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) ConvolutionLayer(org.deeplearning4j.nn.conf.layers.ConvolutionLayer) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) DataSetIterator(org.nd4j.linalg.dataset.api.iterator.DataSetIterator) MnistDataSetIterator(org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator) Test(org.junit.Test)

Example 18 with MnistDataSetIterator

use of org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator in project deeplearning4j by deeplearning4j.

the class DataSetIteratorTest method testMnist.

@Test
public void testMnist() throws Exception {
    ClassPathResource cpr = new ClassPathResource("mnist_first_200.txt");
    CSVRecordReader rr = new CSVRecordReader(0, ",");
    rr.initialize(new FileSplit(cpr.getTempFileFromArchive()));
    RecordReaderDataSetIterator dsi = new RecordReaderDataSetIterator(rr, 10, 0, 10);
    MnistDataSetIterator iter = new MnistDataSetIterator(10, 200, false, true, false, 0);
    while (dsi.hasNext()) {
        DataSet dsExp = dsi.next();
        DataSet dsAct = iter.next();
        INDArray fExp = dsExp.getFeatureMatrix();
        fExp.divi(255);
        INDArray lExp = dsExp.getLabels();
        INDArray fAct = dsAct.getFeatureMatrix();
        INDArray lAct = dsAct.getLabels();
        assertEquals(fExp, fAct);
        assertEquals(lExp, lAct);
    }
    assertFalse(iter.hasNext());
}
Also used : MnistDataSetIterator(org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator) INDArray(org.nd4j.linalg.api.ndarray.INDArray) DataSet(org.nd4j.linalg.dataset.DataSet) CSVRecordReader(org.datavec.api.records.reader.impl.csv.CSVRecordReader) RecordReaderDataSetIterator(org.deeplearning4j.datasets.datavec.RecordReaderDataSetIterator) FileSplit(org.datavec.api.split.FileSplit) ClassPathResource(org.nd4j.linalg.io.ClassPathResource) Test(org.junit.Test)

Example 19 with MnistDataSetIterator

use of org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator in project deeplearning4j by deeplearning4j.

the class MultiLayerTest method testPredict.

@Test
public void testPredict() throws Exception {
    Nd4j.getRandom().setSeed(12345);
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().regularization(false).learningRate(1.0).weightInit(WeightInit.XAVIER).seed(12345L).list().layer(0, new DenseLayer.Builder().nIn(784).nOut(50).activation(Activation.RELU).build()).layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(50).nOut(10).build()).pretrain(false).backprop(true).setInputType(InputType.convolutional(28, 28, 1)).build();
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();
    DataSetIterator ds = new MnistDataSetIterator(10, 10);
    net.fit(ds);
    DataSetIterator testDs = new MnistDataSetIterator(1, 1);
    DataSet testData = testDs.next();
    testData.setLabelNames(Arrays.asList("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
    String actualLables = testData.getLabelName(0);
    List<String> prediction = net.predict(testData);
    assertTrue(actualLables != null);
    assertTrue(prediction.get(0) != null);
}
Also used : BaseOutputLayer(org.deeplearning4j.nn.layers.BaseOutputLayer) MnistDataSetIterator(org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator) DataSet(org.nd4j.linalg.dataset.DataSet) IrisDataSetIterator(org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator) DataSetIterator(org.nd4j.linalg.dataset.api.iterator.DataSetIterator) MnistDataSetIterator(org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator) Test(org.junit.Test)

Example 20 with MnistDataSetIterator

use of org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator in project deeplearning4j by deeplearning4j.

the class TestSparkMultiLayerParameterAveraging method testIterationCounts.

@Test
public void testIterationCounts() throws Exception {
    int dataSetObjSize = 5;
    int batchSizePerExecutor = 25;
    List<DataSet> list = new ArrayList<>();
    int minibatchesPerWorkerPerEpoch = 10;
    DataSetIterator iter = new MnistDataSetIterator(dataSetObjSize, batchSizePerExecutor * numExecutors() * minibatchesPerWorkerPerEpoch, false);
    while (iter.hasNext()) {
        list.add(iter.next());
    }
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().updater(Updater.RMSPROP).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1).list().layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(28 * 28).nOut(50).activation(Activation.TANH).build()).layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(50).nOut(10).activation(Activation.SOFTMAX).build()).pretrain(false).backprop(true).build();
    for (int avgFreq : new int[] { 1, 5, 10 }) {
        System.out.println("--- Avg freq " + avgFreq + " ---");
        SparkDl4jMultiLayer sparkNet = new SparkDl4jMultiLayer(sc, conf.clone(), new ParameterAveragingTrainingMaster.Builder(numExecutors(), dataSetObjSize).batchSizePerWorker(batchSizePerExecutor).averagingFrequency(avgFreq).repartionData(Repartition.Always).build());
        sparkNet.setListeners(new ScoreIterationListener(1));
        JavaRDD<DataSet> rdd = sc.parallelize(list);
        assertEquals(0, sparkNet.getNetwork().getLayerWiseConfigurations().getIterationCount());
        sparkNet.fit(rdd);
        assertEquals(minibatchesPerWorkerPerEpoch, sparkNet.getNetwork().getLayerWiseConfigurations().getIterationCount());
        sparkNet.fit(rdd);
        assertEquals(2 * minibatchesPerWorkerPerEpoch, sparkNet.getNetwork().getLayerWiseConfigurations().getIterationCount());
        sparkNet.getTrainingMaster().deleteTempFiles(sc);
    }
}
Also used : MnistDataSetIterator(org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator) MultiDataSet(org.nd4j.linalg.dataset.MultiDataSet) DataSet(org.nd4j.linalg.dataset.DataSet) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) LabeledPoint(org.apache.spark.mllib.regression.LabeledPoint) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) DenseLayer(org.deeplearning4j.nn.conf.layers.DenseLayer) SparkDl4jMultiLayer(org.deeplearning4j.spark.impl.multilayer.SparkDl4jMultiLayer) ScoreIterationListener(org.deeplearning4j.optimize.listeners.ScoreIterationListener) IrisDataSetIterator(org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator) DataSetIterator(org.nd4j.linalg.dataset.api.iterator.DataSetIterator) MnistDataSetIterator(org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator) BaseSparkTest(org.deeplearning4j.spark.BaseSparkTest) Test(org.junit.Test)

Aggregations

MnistDataSetIterator (org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator)44 Test (org.junit.Test)41 DataSetIterator (org.nd4j.linalg.dataset.api.iterator.DataSetIterator)40 DataSet (org.nd4j.linalg.dataset.DataSet)31 MultiLayerConfiguration (org.deeplearning4j.nn.conf.MultiLayerConfiguration)26 MultiLayerNetwork (org.deeplearning4j.nn.multilayer.MultiLayerNetwork)26 INDArray (org.nd4j.linalg.api.ndarray.INDArray)22 NeuralNetConfiguration (org.deeplearning4j.nn.conf.NeuralNetConfiguration)15 ConvolutionLayer (org.deeplearning4j.nn.conf.layers.ConvolutionLayer)12 IrisDataSetIterator (org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator)11 OutputLayer (org.deeplearning4j.nn.conf.layers.OutputLayer)11 DenseLayer (org.deeplearning4j.nn.conf.layers.DenseLayer)10 ScoreIterationListener (org.deeplearning4j.optimize.listeners.ScoreIterationListener)10 Evaluation (org.deeplearning4j.eval.Evaluation)7 ComputationGraphConfiguration (org.deeplearning4j.nn.conf.ComputationGraphConfiguration)7 LabeledPoint (org.apache.spark.mllib.regression.LabeledPoint)6 BaseSparkTest (org.deeplearning4j.spark.BaseSparkTest)6 MultiDataSet (org.nd4j.linalg.dataset.MultiDataSet)6 File (java.io.File)4 RecordReaderDataSetIterator (org.deeplearning4j.datasets.datavec.RecordReaderDataSetIterator)4