Search in sources :

Example 11 with NormalDistribution

use of org.deeplearning4j.nn.conf.distribution.NormalDistribution in project deeplearning4j by deeplearning4j.

the class CNN1DGradientCheckTest method testCnn1DWithSubsampling1D.

@Test
public void testCnn1DWithSubsampling1D() {
    Nd4j.getRandom().setSeed(12345);
    int[] minibatchSizes = { 1, 3 };
    int length = 7;
    int convNIn = 2;
    int convNOut1 = 3;
    int convNOut2 = 4;
    int[] kernels = { 1, 2, 4 };
    int stride = 1;
    int padding = 0;
    int pnorm = 2;
    Activation[] activations = { Activation.SIGMOID, Activation.TANH };
    SubsamplingLayer.PoolingType[] poolingTypes = new SubsamplingLayer.PoolingType[] { SubsamplingLayer.PoolingType.MAX, SubsamplingLayer.PoolingType.AVG, SubsamplingLayer.PoolingType.PNORM };
    for (Activation afn : activations) {
        for (SubsamplingLayer.PoolingType poolingType : poolingTypes) {
            for (int minibatchSize : minibatchSizes) {
                for (int kernel : kernels) {
                    INDArray input = Nd4j.rand(new int[] { minibatchSize, convNIn, length });
                    INDArray labels = Nd4j.zeros(minibatchSize, finalNOut, length);
                    for (int i = 0; i < minibatchSize; i++) {
                        for (int j = 0; j < length; j++) {
                            labels.putScalar(new int[] { i, i % finalNOut, j }, 1.0);
                        }
                    }
                    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().regularization(false).learningRate(1.0).updater(Updater.SGD).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1)).convolutionMode(ConvolutionMode.Same).list().layer(0, new Convolution1DLayer.Builder().activation(afn).kernelSize(kernel).stride(stride).padding(padding).nIn(convNIn).nOut(convNOut1).build()).layer(1, new Convolution1DLayer.Builder().activation(afn).kernelSize(kernel).stride(stride).padding(padding).nIn(convNOut1).nOut(convNOut2).build()).layer(2, new Subsampling1DLayer.Builder(poolingType).kernelSize(kernel).stride(stride).padding(padding).pnorm(pnorm).build()).layer(3, new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nOut(finalNOut).build()).setInputType(InputType.recurrent(convNIn)).build();
                    String json = conf.toJson();
                    MultiLayerConfiguration c2 = MultiLayerConfiguration.fromJson(json);
                    assertEquals(conf, c2);
                    MultiLayerNetwork net = new MultiLayerNetwork(conf);
                    net.init();
                    String msg = "PoolingType=" + poolingType + ", minibatch=" + minibatchSize + ", activationFn=" + afn + ", kernel = " + kernel;
                    if (PRINT_RESULTS) {
                        System.out.println(msg);
                        for (int j = 0; j < net.getnLayers(); j++) System.out.println("Layer " + j + " # params: " + net.getLayer(j).numParams());
                    }
                    boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels);
                    assertTrue(msg, gradOK);
                }
            }
        }
    }
}
Also used : Subsampling1DLayer(org.deeplearning4j.nn.conf.layers.Subsampling1DLayer) SubsamplingLayer(org.deeplearning4j.nn.conf.layers.SubsamplingLayer) Activation(org.nd4j.linalg.activations.Activation) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) Convolution1DLayer(org.deeplearning4j.nn.conf.layers.Convolution1DLayer) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) INDArray(org.nd4j.linalg.api.ndarray.INDArray) NormalDistribution(org.deeplearning4j.nn.conf.distribution.NormalDistribution) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) Test(org.junit.Test)

Example 12 with NormalDistribution

use of org.deeplearning4j.nn.conf.distribution.NormalDistribution in project deeplearning4j by deeplearning4j.

the class GradientCheckTestsComputationGraph method testBasicTwoOutputs.

@Test
public void testBasicTwoOutputs() {
    Nd4j.getRandom().setSeed(12345);
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1)).activation(Activation.TANH).updater(Updater.NONE).learningRate(1.0).graphBuilder().addInputs("in1", "in2").addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in1").addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in2").addLayer("out1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2).nOut(2).activation(Activation.IDENTITY).build(), "d0").addLayer("out2", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2).nOut(2).activation(Activation.IDENTITY).build(), "d1").setOutputs("out1", "out2").pretrain(false).backprop(true).build();
    ComputationGraph graph = new ComputationGraph(conf);
    graph.init();
    System.out.println("Num layers: " + graph.getNumLayers());
    System.out.println("Num params: " + graph.numParams());
    Nd4j.getRandom().setSeed(12345);
    int nParams = graph.numParams();
    INDArray newParams = Nd4j.rand(1, nParams);
    graph.setParams(newParams);
    int[] mbSizes = new int[] { 1, 3, 10 };
    for (int minibatch : mbSizes) {
        INDArray in1 = Nd4j.rand(minibatch, 2);
        INDArray in2 = Nd4j.rand(minibatch, 2);
        INDArray labels1 = Nd4j.rand(minibatch, 2);
        INDArray labels2 = Nd4j.rand(minibatch, 2);
        String testName = "testBasicStackUnstack() - minibatch = " + minibatch;
        if (PRINT_RESULTS) {
            System.out.println(testName);
            for (int j = 0; j < graph.getNumLayers(); j++) System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams());
        }
        boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] { in1, in2 }, new INDArray[] { labels1, labels2 });
        assertTrue(testName, gradOK);
    }
}
Also used : NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) INDArray(org.nd4j.linalg.api.ndarray.INDArray) NormalDistribution(org.deeplearning4j.nn.conf.distribution.NormalDistribution) ComputationGraphConfiguration(org.deeplearning4j.nn.conf.ComputationGraphConfiguration) ComputationGraph(org.deeplearning4j.nn.graph.ComputationGraph) Test(org.junit.Test)

Example 13 with NormalDistribution

use of org.deeplearning4j.nn.conf.distribution.NormalDistribution in project deeplearning4j by deeplearning4j.

the class GradientCheckTestsComputationGraph method testMultipleOutputsLayer.

@Test
public void testMultipleOutputsLayer() {
    Nd4j.getRandom().setSeed(12345);
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1)).updater(Updater.NONE).learningRate(1.0).activation(Activation.TANH).graphBuilder().addInputs("i0").addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "i0").addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "d0").addLayer("d2", new DenseLayer.Builder().nIn(2).nOut(2).build(), "d0").addLayer("d3", new DenseLayer.Builder().nIn(2).nOut(2).build(), "d0").addLayer("out", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(6).nOut(2).build(), "d1", "d2", "d3").setOutputs("out").pretrain(false).backprop(true).build();
    ComputationGraph graph = new ComputationGraph(conf);
    graph.init();
    int[] minibatchSizes = { 1, 3 };
    for (int mb : minibatchSizes) {
        INDArray input = Nd4j.rand(mb, 2);
        INDArray out = Nd4j.rand(mb, 2);
        String msg = "testMultipleOutputsLayer() - minibatchSize = " + mb;
        if (PRINT_RESULTS) {
            System.out.println(msg);
            for (int j = 0; j < graph.getNumLayers(); j++) System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams());
        }
        boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] { input }, new INDArray[] { out });
        assertTrue(msg, gradOK);
    }
}
Also used : INDArray(org.nd4j.linalg.api.ndarray.INDArray) NormalDistribution(org.deeplearning4j.nn.conf.distribution.NormalDistribution) ComputationGraphConfiguration(org.deeplearning4j.nn.conf.ComputationGraphConfiguration) ComputationGraph(org.deeplearning4j.nn.graph.ComputationGraph) Test(org.junit.Test)

Example 14 with NormalDistribution

use of org.deeplearning4j.nn.conf.distribution.NormalDistribution in project deeplearning4j by deeplearning4j.

the class GradientCheckTestsComputationGraph method testL2NormalizeVertex4d.

@Test
public void testL2NormalizeVertex4d() {
    Nd4j.getRandom().setSeed(12345);
    int h = 4;
    int w = 4;
    int dIn = 2;
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1)).activation(Activation.TANH).updater(Updater.NONE).learningRate(1.0).graphBuilder().addInputs("in1").addLayer("d1", new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(2).build(), "in1").addVertex("norm", new L2NormalizeVertex(), "d1").addLayer("out1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nOut(2).activation(Activation.IDENTITY).build(), "norm").setOutputs("out1").setInputTypes(InputType.convolutional(h, w, dIn)).build();
    ComputationGraph graph = new ComputationGraph(conf);
    graph.init();
    int[] mbSizes = new int[] { 1, 3, 10 };
    for (int minibatch : mbSizes) {
        INDArray in1 = Nd4j.rand(new int[] { minibatch, dIn, h, w });
        INDArray labels1 = Nd4j.rand(minibatch, 2);
        String testName = "testL2NormalizeVertex4d() - minibatch = " + minibatch;
        if (PRINT_RESULTS) {
            System.out.println(testName);
            for (int j = 0; j < graph.getNumLayers(); j++) System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams());
        }
        boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] { in1 }, new INDArray[] { labels1 });
        assertTrue(testName, gradOK);
    }
}
Also used : INDArray(org.nd4j.linalg.api.ndarray.INDArray) NormalDistribution(org.deeplearning4j.nn.conf.distribution.NormalDistribution) ComputationGraphConfiguration(org.deeplearning4j.nn.conf.ComputationGraphConfiguration) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) ComputationGraph(org.deeplearning4j.nn.graph.ComputationGraph) Test(org.junit.Test)

Example 15 with NormalDistribution

use of org.deeplearning4j.nn.conf.distribution.NormalDistribution in project deeplearning4j by deeplearning4j.

the class GradientCheckTestsComputationGraph method testBasicStackUnstack.

@Test
public void testBasicStackUnstack() {
    int layerSizes = 2;
    Nd4j.getRandom().setSeed(12345);
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1)).activation(Activation.TANH).updater(Updater.NONE).learningRate(1.0).graphBuilder().addInputs("in1", "in2").addLayer("d0", new DenseLayer.Builder().nIn(layerSizes).nOut(layerSizes).build(), "in1").addLayer("d1", new DenseLayer.Builder().nIn(layerSizes).nOut(layerSizes).build(), "in2").addVertex("stack", new StackVertex(), "d0", "d1").addLayer("d2", new DenseLayer.Builder().nIn(layerSizes).nOut(layerSizes).build(), "stack").addVertex("u1", new UnstackVertex(0, 2), "d2").addVertex("u2", new UnstackVertex(1, 2), "d2").addLayer("out1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(layerSizes).nOut(layerSizes).activation(Activation.IDENTITY).build(), "u1").addLayer("out2", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(layerSizes).nOut(2).activation(Activation.IDENTITY).build(), "u2").setOutputs("out1", "out2").pretrain(false).backprop(true).build();
    ComputationGraph graph = new ComputationGraph(conf);
    graph.init();
    Nd4j.getRandom().setSeed(12345);
    int nParams = graph.numParams();
    INDArray newParams = Nd4j.rand(1, nParams);
    graph.setParams(newParams);
    int[] mbSizes = new int[] { 1, 3, 10 };
    for (int minibatch : mbSizes) {
        INDArray in1 = Nd4j.rand(minibatch, layerSizes);
        INDArray in2 = Nd4j.rand(minibatch, layerSizes);
        INDArray labels1 = Nd4j.rand(minibatch, 2);
        INDArray labels2 = Nd4j.rand(minibatch, 2);
        String testName = "testBasicStackUnstack() - minibatch = " + minibatch;
        if (PRINT_RESULTS) {
            System.out.println(testName);
            for (int j = 0; j < graph.getNumLayers(); j++) System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams());
        }
        boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] { in1, in2 }, new INDArray[] { labels1, labels2 });
        assertTrue(testName, gradOK);
    }
}
Also used : NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) INDArray(org.nd4j.linalg.api.ndarray.INDArray) NormalDistribution(org.deeplearning4j.nn.conf.distribution.NormalDistribution) ComputationGraphConfiguration(org.deeplearning4j.nn.conf.ComputationGraphConfiguration) ComputationGraph(org.deeplearning4j.nn.graph.ComputationGraph) Test(org.junit.Test)

Aggregations

NormalDistribution (org.deeplearning4j.nn.conf.distribution.NormalDistribution)90 Test (org.junit.Test)87 INDArray (org.nd4j.linalg.api.ndarray.INDArray)76 MultiLayerConfiguration (org.deeplearning4j.nn.conf.MultiLayerConfiguration)49 MultiLayerNetwork (org.deeplearning4j.nn.multilayer.MultiLayerNetwork)43 NeuralNetConfiguration (org.deeplearning4j.nn.conf.NeuralNetConfiguration)41 Random (java.util.Random)28 ComputationGraphConfiguration (org.deeplearning4j.nn.conf.ComputationGraphConfiguration)28 ComputationGraph (org.deeplearning4j.nn.graph.ComputationGraph)22 GravesLSTM (org.deeplearning4j.nn.layers.recurrent.GravesLSTM)13 DataSet (org.nd4j.linalg.dataset.DataSet)13 RnnOutputLayer (org.deeplearning4j.nn.conf.layers.RnnOutputLayer)12 IrisDataSetIterator (org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator)9 RnnToFeedForwardPreProcessor (org.deeplearning4j.nn.conf.preprocessor.RnnToFeedForwardPreProcessor)6 Activation (org.nd4j.linalg.activations.Activation)5 DataSetIterator (org.nd4j.linalg.dataset.api.iterator.DataSetIterator)5 DataNormalization (org.nd4j.linalg.dataset.api.preprocessor.DataNormalization)5 NormalizerMinMaxScaler (org.nd4j.linalg.dataset.api.preprocessor.NormalizerMinMaxScaler)5 LossFunction (org.nd4j.linalg.lossfunctions.LossFunctions.LossFunction)5 DenseLayer (org.deeplearning4j.nn.conf.layers.DenseLayer)4