Search in sources :

Example 71 with ClassPathResource

use of org.nd4j.linalg.io.ClassPathResource in project deeplearning4j by deeplearning4j.

the class RegressionTest071 method regressionTestCNN1.

@Test
public void regressionTestCNN1() throws Exception {
    File f = new ClassPathResource("regression_testing/071/071_ModelSerializer_Regression_CNN_1.zip").getTempFileFromArchive();
    MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);
    MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
    assertEquals(3, conf.getConfs().size());
    assertTrue(conf.isBackprop());
    assertFalse(conf.isPretrain());
    ConvolutionLayer l0 = (ConvolutionLayer) conf.getConf(0).getLayer();
    assertEquals("tanh", l0.getActivationFn().toString());
    assertEquals(3, l0.getNIn());
    assertEquals(3, l0.getNOut());
    assertEquals(WeightInit.RELU, l0.getWeightInit());
    assertEquals(Updater.RMSPROP, l0.getUpdater());
    assertEquals(0.96, l0.getRmsDecay(), 1e-6);
    assertEquals(0.15, l0.getLearningRate(), 1e-6);
    assertArrayEquals(new int[] { 2, 2 }, l0.getKernelSize());
    assertArrayEquals(new int[] { 1, 1 }, l0.getStride());
    assertArrayEquals(new int[] { 0, 0 }, l0.getPadding());
    assertEquals(l0.getConvolutionMode(), ConvolutionMode.Same);
    SubsamplingLayer l1 = (SubsamplingLayer) conf.getConf(1).getLayer();
    assertArrayEquals(new int[] { 2, 2 }, l1.getKernelSize());
    assertArrayEquals(new int[] { 1, 1 }, l1.getStride());
    assertArrayEquals(new int[] { 0, 0 }, l1.getPadding());
    assertEquals(PoolingType.MAX, l1.getPoolingType());
    assertEquals(l1.getConvolutionMode(), ConvolutionMode.Same);
    OutputLayer l2 = (OutputLayer) conf.getConf(2).getLayer();
    assertEquals("sigmoid", l1.getActivationFn().toString());
    assertEquals(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD, l2.getLossFunction());
    //TODO
    assertTrue(l2.getLossFn() instanceof LossNegativeLogLikelihood);
    assertEquals(26 * 26 * 3, l2.getNIn());
    assertEquals(5, l2.getNOut());
    assertEquals(WeightInit.RELU, l0.getWeightInit());
    assertEquals(Updater.RMSPROP, l0.getUpdater());
    assertEquals(0.96, l0.getRmsDecay(), 1e-6);
    assertEquals(0.15, l0.getLearningRate(), 1e-6);
    assertTrue(conf.getInputPreProcess(2) instanceof CnnToFeedForwardPreProcessor);
    int numParams = net.numParams();
    assertEquals(Nd4j.linspace(1, numParams, numParams), net.params());
    int updaterSize = net.getUpdater().stateSizeForLayer(net);
    assertEquals(Nd4j.linspace(1, updaterSize, updaterSize), net.getUpdater().getStateViewArray());
}
Also used : LossNegativeLogLikelihood(org.nd4j.linalg.lossfunctions.impl.LossNegativeLogLikelihood) CnnToFeedForwardPreProcessor(org.deeplearning4j.nn.conf.preprocessor.CnnToFeedForwardPreProcessor) File(java.io.File) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) ClassPathResource(org.nd4j.linalg.io.ClassPathResource) Test(org.junit.Test)

Example 72 with ClassPathResource

use of org.nd4j.linalg.io.ClassPathResource in project deeplearning4j by deeplearning4j.

the class ModelGuesserTest method testModelGuess.

@Test
public void testModelGuess() throws Exception {
    ClassPathResource sequenceResource = new ClassPathResource("modelimport/keras/examples/mnist_mlp/mnist_mlp_tf_model.h5");
    assertTrue(sequenceResource.exists());
    File f = getTempFile(sequenceResource);
    Model guess1 = ModelGuesser.loadModelGuess(f.getAbsolutePath());
    assumeNotNull(guess1);
    ClassPathResource sequenceResource2 = new ClassPathResource("modelimport/keras/examples/mnist_cnn/mnist_cnn_tf_model.h5");
    assertTrue(sequenceResource2.exists());
    File f2 = getTempFile(sequenceResource);
    Model guess2 = ModelGuesser.loadModelGuess(f2.getAbsolutePath());
    assumeNotNull(guess2);
}
Also used : Model(org.deeplearning4j.nn.api.Model) File(java.io.File) ClassPathResource(org.nd4j.linalg.io.ClassPathResource) Test(org.junit.Test)

Example 73 with ClassPathResource

use of org.nd4j.linalg.io.ClassPathResource in project deeplearning4j by deeplearning4j.

the class BarnesHutTsneTest method testTsne.

@Test
public void testTsne() throws Exception {
    Nd4j.ENFORCE_NUMERICAL_STABILITY = true;
    DataTypeUtil.setDTypeForContext(DataBuffer.Type.DOUBLE);
    Nd4j.getRandom().setSeed(123);
    BarnesHutTsne b = new BarnesHutTsne.Builder().stopLyingIteration(10).setMaxIter(10).theta(0.5).learningRate(500).useAdaGrad(false).build();
    ClassPathResource resource = new ClassPathResource("/mnist2500_X.txt");
    File f = resource.getTempFileFromArchive();
    INDArray data = Nd4j.readNumpy(f.getAbsolutePath(), "   ").get(NDArrayIndex.interval(0, 100), NDArrayIndex.interval(0, 784));
    ClassPathResource labels = new ClassPathResource("mnist2500_labels.txt");
    List<String> labelsList = IOUtils.readLines(labels.getInputStream()).subList(0, 100);
    b.fit(data);
}
Also used : INDArray(org.nd4j.linalg.api.ndarray.INDArray) File(java.io.File) ClassPathResource(org.nd4j.linalg.io.ClassPathResource) Test(org.junit.Test)

Example 74 with ClassPathResource

use of org.nd4j.linalg.io.ClassPathResource in project deeplearning4j by deeplearning4j.

the class RegressionTest050 method regressionTestMLP2.

@Test
public void regressionTestMLP2() throws Exception {
    File f = new ClassPathResource("regression_testing/050/050_ModelSerializer_Regression_MLP_2.zip").getTempFileFromArchive();
    MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);
    MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
    assertEquals(2, conf.getConfs().size());
    assertTrue(conf.isBackprop());
    assertFalse(conf.isPretrain());
    DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer();
    assertTrue(l0.getActivationFn() instanceof ActivationLReLU);
    assertEquals(3, l0.getNIn());
    assertEquals(4, l0.getNOut());
    assertEquals(WeightInit.DISTRIBUTION, l0.getWeightInit());
    assertEquals(new NormalDistribution(0.1, 1.2), l0.getDist());
    assertEquals(Updater.RMSPROP, l0.getUpdater());
    assertEquals(0.96, l0.getRmsDecay(), 1e-6);
    assertEquals(0.15, l0.getLearningRate(), 1e-6);
    assertEquals(0.6, l0.getDropOut(), 1e-6);
    assertEquals(0.1, l0.getL1(), 1e-6);
    assertEquals(0.2, l0.getL2(), 1e-6);
    OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer();
    assertEquals("identity", l1.getActivationFn().toString());
    assertEquals(LossFunctions.LossFunction.MSE, l1.getLossFunction());
    assertTrue(l1.getLossFn() instanceof LossMSE);
    assertEquals(4, l1.getNIn());
    assertEquals(5, l1.getNOut());
    assertEquals(WeightInit.DISTRIBUTION, l0.getWeightInit());
    assertEquals(new NormalDistribution(0.1, 1.2), l0.getDist());
    assertEquals(Updater.RMSPROP, l0.getUpdater());
    assertEquals(0.96, l1.getRmsDecay(), 1e-6);
    assertEquals(0.15, l1.getLearningRate(), 1e-6);
    assertEquals(0.6, l1.getDropOut(), 1e-6);
    assertEquals(0.1, l1.getL1(), 1e-6);
    assertEquals(0.2, l1.getL2(), 1e-6);
    int numParams = net.numParams();
    assertEquals(Nd4j.linspace(1, numParams, numParams), net.params());
    int updaterSize = net.getUpdater().stateSizeForLayer(net);
    assertEquals(Nd4j.linspace(1, updaterSize, updaterSize), net.getUpdater().getStateViewArray());
}
Also used : LossMSE(org.nd4j.linalg.lossfunctions.impl.LossMSE) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) ActivationLReLU(org.nd4j.linalg.activations.impl.ActivationLReLU) NormalDistribution(org.deeplearning4j.nn.conf.distribution.NormalDistribution) File(java.io.File) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) ClassPathResource(org.nd4j.linalg.io.ClassPathResource) Test(org.junit.Test)

Example 75 with ClassPathResource

use of org.nd4j.linalg.io.ClassPathResource in project deeplearning4j by deeplearning4j.

the class RegressionTest060 method regressionTestCNN1.

@Test
public void regressionTestCNN1() throws Exception {
    File f = new ClassPathResource("regression_testing/060/060_ModelSerializer_Regression_CNN_1.zip").getTempFileFromArchive();
    MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);
    MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
    assertEquals(3, conf.getConfs().size());
    assertTrue(conf.isBackprop());
    assertFalse(conf.isPretrain());
    ConvolutionLayer l0 = (ConvolutionLayer) conf.getConf(0).getLayer();
    assertEquals("tanh", l0.getActivationFn().toString());
    assertEquals(3, l0.getNIn());
    assertEquals(3, l0.getNOut());
    assertEquals(WeightInit.RELU, l0.getWeightInit());
    assertEquals(Updater.RMSPROP, l0.getUpdater());
    assertEquals(0.96, l0.getRmsDecay(), 1e-6);
    assertEquals(0.15, l0.getLearningRate(), 1e-6);
    assertArrayEquals(new int[] { 2, 2 }, l0.getKernelSize());
    assertArrayEquals(new int[] { 1, 1 }, l0.getStride());
    assertArrayEquals(new int[] { 0, 0 }, l0.getPadding());
    //Pre-0.7.0: no ConvolutionMode. Want to default to truncate here if not set
    assertEquals(l0.getConvolutionMode(), ConvolutionMode.Truncate);
    SubsamplingLayer l1 = (SubsamplingLayer) conf.getConf(1).getLayer();
    assertArrayEquals(new int[] { 2, 2 }, l1.getKernelSize());
    assertArrayEquals(new int[] { 1, 1 }, l1.getStride());
    assertArrayEquals(new int[] { 0, 0 }, l1.getPadding());
    assertEquals(PoolingType.MAX, l1.getPoolingType());
    //Pre-0.7.0: no ConvolutionMode. Want to default to truncate here if not set
    assertEquals(l1.getConvolutionMode(), ConvolutionMode.Truncate);
    OutputLayer l2 = (OutputLayer) conf.getConf(2).getLayer();
    assertEquals("sigmoid", l1.getActivationFn().toString());
    assertEquals(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD, l2.getLossFunction());
    //TODO
    assertTrue(l2.getLossFn() instanceof LossNegativeLogLikelihood);
    assertEquals(26 * 26 * 3, l2.getNIn());
    assertEquals(5, l2.getNOut());
    assertEquals(WeightInit.RELU, l0.getWeightInit());
    assertEquals(Updater.RMSPROP, l0.getUpdater());
    assertEquals(0.96, l0.getRmsDecay(), 1e-6);
    assertEquals(0.15, l0.getLearningRate(), 1e-6);
    assertTrue(conf.getInputPreProcess(2) instanceof CnnToFeedForwardPreProcessor);
    int numParams = net.numParams();
    assertEquals(Nd4j.linspace(1, numParams, numParams), net.params());
    int updaterSize = net.getUpdater().stateSizeForLayer(net);
    assertEquals(Nd4j.linspace(1, updaterSize, updaterSize), net.getUpdater().getStateViewArray());
}
Also used : LossNegativeLogLikelihood(org.nd4j.linalg.lossfunctions.impl.LossNegativeLogLikelihood) CnnToFeedForwardPreProcessor(org.deeplearning4j.nn.conf.preprocessor.CnnToFeedForwardPreProcessor) File(java.io.File) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) ClassPathResource(org.nd4j.linalg.io.ClassPathResource) Test(org.junit.Test)

Aggregations

ClassPathResource (org.nd4j.linalg.io.ClassPathResource)112 Test (org.junit.Test)100 lombok.val (lombok.val)31 INDArray (org.nd4j.linalg.api.ndarray.INDArray)26 SequenceRecordReader (org.datavec.api.records.reader.SequenceRecordReader)23 CSVSequenceRecordReader (org.datavec.api.records.reader.impl.csv.CSVSequenceRecordReader)23 DataSet (org.nd4j.linalg.dataset.DataSet)23 File (java.io.File)22 MultiLayerNetwork (org.deeplearning4j.nn.multilayer.MultiLayerNetwork)20 FileSplit (org.datavec.api.split.FileSplit)18 CollectionSequenceRecordReader (org.datavec.api.records.reader.impl.collection.CollectionSequenceRecordReader)14 Ignore (org.junit.Ignore)14 CSVRecordReader (org.datavec.api.records.reader.impl.csv.CSVRecordReader)13 RecordReader (org.datavec.api.records.reader.RecordReader)12 NumberedFileInputSplit (org.datavec.api.split.NumberedFileInputSplit)12 MultiLayerConfiguration (org.deeplearning4j.nn.conf.MultiLayerConfiguration)12 MultiDataSet (org.nd4j.linalg.dataset.api.MultiDataSet)11 MultiDataSetIterator (org.nd4j.linalg.dataset.api.iterator.MultiDataSetIterator)8 RecordMetaData (org.datavec.api.records.metadata.RecordMetaData)7 ImageRecordReader (org.datavec.image.recordreader.ImageRecordReader)7