Search in sources :

Example 56 with Gradient

use of org.deeplearning4j.nn.gradient.Gradient in project deeplearning4j by deeplearning4j.

the class MultiLayerTestRNN method testTruncatedBPTTVsBPTT.

@Test
public void testTruncatedBPTTVsBPTT() {
    //Under some (limited) circumstances, we expect BPTT and truncated BPTT to be identical
    //Specifically TBPTT over entire data vector
    int timeSeriesLength = 12;
    int miniBatchSize = 7;
    int nIn = 5;
    int nOut = 4;
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).list().layer(0, new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(nIn).nOut(7).activation(Activation.TANH).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 0.5)).build()).layer(1, new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(7).nOut(8).activation(Activation.TANH).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 0.5)).build()).layer(2, new RnnOutputLayer.Builder(LossFunction.MCXENT).weightInit(WeightInit.DISTRIBUTION).nIn(8).nOut(nOut).activation(Activation.SOFTMAX).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 0.5)).build()).backprop(true).build();
    assertEquals(BackpropType.Standard, conf.getBackpropType());
    MultiLayerConfiguration confTBPTT = new NeuralNetConfiguration.Builder().seed(12345).list().layer(0, new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(nIn).nOut(7).activation(Activation.TANH).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 0.5)).build()).layer(1, new org.deeplearning4j.nn.conf.layers.GravesLSTM.Builder().nIn(7).nOut(8).activation(Activation.TANH).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 0.5)).build()).layer(2, new RnnOutputLayer.Builder(LossFunction.MCXENT).weightInit(WeightInit.DISTRIBUTION).nIn(8).nOut(nOut).activation(Activation.SOFTMAX).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 0.5)).build()).backprop(true).backpropType(BackpropType.TruncatedBPTT).tBPTTBackwardLength(timeSeriesLength).tBPTTForwardLength(timeSeriesLength).build();
    Nd4j.getRandom().setSeed(12345);
    MultiLayerNetwork mln = new MultiLayerNetwork(conf);
    mln.init();
    Nd4j.getRandom().setSeed(12345);
    MultiLayerNetwork mlnTBPTT = new MultiLayerNetwork(confTBPTT);
    mlnTBPTT.init();
    assertTrue(mlnTBPTT.getLayerWiseConfigurations().getBackpropType() == BackpropType.TruncatedBPTT);
    assertTrue(mlnTBPTT.getLayerWiseConfigurations().getTbpttFwdLength() == timeSeriesLength);
    assertTrue(mlnTBPTT.getLayerWiseConfigurations().getTbpttBackLength() == timeSeriesLength);
    INDArray inputData = Nd4j.rand(new int[] { miniBatchSize, nIn, timeSeriesLength });
    INDArray labels = Nd4j.rand(new int[] { miniBatchSize, nOut, timeSeriesLength });
    mln.setInput(inputData);
    mln.setLabels(labels);
    mlnTBPTT.setInput(inputData);
    mlnTBPTT.setLabels(labels);
    mln.computeGradientAndScore();
    mlnTBPTT.computeGradientAndScore();
    Pair<Gradient, Double> mlnPair = mln.gradientAndScore();
    Pair<Gradient, Double> tbpttPair = mlnTBPTT.gradientAndScore();
    assertEquals(mlnPair.getFirst().gradientForVariable(), tbpttPair.getFirst().gradientForVariable());
    assertEquals(mlnPair.getSecond(), tbpttPair.getSecond());
    //Check states: expect stateMap to be empty but tBpttStateMap to not be
    Map<String, INDArray> l0StateMLN = mln.rnnGetPreviousState(0);
    Map<String, INDArray> l0StateTBPTT = mlnTBPTT.rnnGetPreviousState(0);
    Map<String, INDArray> l1StateMLN = mln.rnnGetPreviousState(0);
    Map<String, INDArray> l1StateTBPTT = mlnTBPTT.rnnGetPreviousState(0);
    Map<String, INDArray> l0TBPTTStateMLN = ((BaseRecurrentLayer<?>) mln.getLayer(0)).rnnGetTBPTTState();
    Map<String, INDArray> l0TBPTTStateTBPTT = ((BaseRecurrentLayer<?>) mlnTBPTT.getLayer(0)).rnnGetTBPTTState();
    Map<String, INDArray> l1TBPTTStateMLN = ((BaseRecurrentLayer<?>) mln.getLayer(1)).rnnGetTBPTTState();
    Map<String, INDArray> l1TBPTTStateTBPTT = ((BaseRecurrentLayer<?>) mlnTBPTT.getLayer(1)).rnnGetTBPTTState();
    assertTrue(l0StateMLN.isEmpty());
    assertTrue(l0StateTBPTT.isEmpty());
    assertTrue(l1StateMLN.isEmpty());
    assertTrue(l1StateTBPTT.isEmpty());
    assertTrue(l0TBPTTStateMLN.isEmpty());
    assertTrue(l0TBPTTStateTBPTT.size() == 2);
    assertTrue(l1TBPTTStateMLN.isEmpty());
    assertTrue(l1TBPTTStateTBPTT.size() == 2);
    INDArray tbpttActL0 = l0TBPTTStateTBPTT.get(GravesLSTM.STATE_KEY_PREV_ACTIVATION);
    INDArray tbpttActL1 = l1TBPTTStateTBPTT.get(GravesLSTM.STATE_KEY_PREV_ACTIVATION);
    List<INDArray> activations = mln.feedForward(inputData, true);
    INDArray l0Act = activations.get(1);
    INDArray l1Act = activations.get(2);
    INDArray expL0Act = l0Act.tensorAlongDimension(timeSeriesLength - 1, 1, 0);
    INDArray expL1Act = l1Act.tensorAlongDimension(timeSeriesLength - 1, 1, 0);
    assertEquals(tbpttActL0, expL0Act);
    assertEquals(tbpttActL1, expL1Act);
}
Also used : RnnOutputLayer(org.deeplearning4j.nn.conf.layers.RnnOutputLayer) Gradient(org.deeplearning4j.nn.gradient.Gradient) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) GravesLSTM(org.deeplearning4j.nn.layers.recurrent.GravesLSTM) INDArray(org.nd4j.linalg.api.ndarray.INDArray) NormalDistribution(org.deeplearning4j.nn.conf.distribution.NormalDistribution) BaseRecurrentLayer(org.deeplearning4j.nn.layers.recurrent.BaseRecurrentLayer) Test(org.junit.Test)

Example 57 with Gradient

use of org.deeplearning4j.nn.gradient.Gradient in project deeplearning4j by deeplearning4j.

the class RBMTests method testGradientFlattening.

@Test
public void testGradientFlattening() {
    INDArray features = Nd4j.create(new double[][] { { 0, 0, 0, 0, 0, 0 } });
    INDArray params = Nd4j.create(new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 });
    INDArray expectedParams = params.dup();
    RBM rbm = getRBMLayer(6, 3, HiddenUnit.BINARY, VisibleUnit.BINARY, params, true, false, 1, LossFunctions.LossFunction.SQUARED_LOSS, 1);
    //        INDArray expectedStepParams = Nd4j.create(new double[] {-0.25,-0.25,-0.25,-0.25,-0.25,-0.25,-0.25,-0.25,-0.25,-0.25,-0.25,-0.25,-0.25,-0.25,-0.25,-0.25,-0.25,-0.25,0.0,0.0,0.0,-0.5,-0.5,-0.5,-0.5,-0.5,-0.5});
    rbm.fit(features);
    Gradient g = rbm.gradient();
    List<INDArray> grList = new ArrayList();
    grList.add(g.getGradientFor("W"));
    grList.add(g.getGradientFor("b"));
    grList.add(g.getGradientFor("vb"));
    INDArray expectedGradient = Nd4j.toFlattened('f', grList);
    assertEquals(expectedParams.subi(expectedGradient), rbm.params());
}
Also used : Gradient(org.deeplearning4j.nn.gradient.Gradient) INDArray(org.nd4j.linalg.api.ndarray.INDArray) ArrayList(java.util.ArrayList) Test(org.junit.Test)

Example 58 with Gradient

use of org.deeplearning4j.nn.gradient.Gradient in project deeplearning4j by deeplearning4j.

the class BatchNormalizationTest method testDnnForwardBackward.

@Test
public void testDnnForwardBackward() {
    double eps = 1e-5;
    int nIn = 4;
    int minibatch = 2;
    Nd4j.getRandom().setSeed(12345);
    INDArray input = Nd4j.rand('c', new int[] { minibatch, nIn });
    //TODO: other values for gamma/beta
    INDArray gamma = Nd4j.ones(1, nIn);
    INDArray beta = Nd4j.zeros(1, nIn);
    Layer l = getLayer(nIn, eps, false, -1, -1);
    INDArray mean = input.mean(0);
    INDArray var = input.var(false, 0);
    INDArray xHat = input.subRowVector(mean).divRowVector(Transforms.sqrt(var.add(eps), true));
    INDArray outExpected = xHat.mulRowVector(gamma).addRowVector(beta);
    INDArray out = l.activate(input, true);
    System.out.println(Arrays.toString(outExpected.data().asDouble()));
    System.out.println(Arrays.toString(out.data().asDouble()));
    assertEquals(outExpected, out);
    //-------------------------------------------------------------
    //Check backprop
    //dL/dy
    INDArray epsilon = Nd4j.rand(minibatch, nIn);
    INDArray dldgammaExp = epsilon.mul(xHat).sum(0);
    INDArray dldbetaExp = epsilon.sum(0);
    INDArray dldxhat = epsilon.mulRowVector(gamma);
    INDArray dldvar = dldxhat.mul(input.subRowVector(mean)).mul(-0.5).mulRowVector(Transforms.pow(var.add(eps), -3.0 / 2.0, true)).sum(0);
    INDArray dldmu = dldxhat.mulRowVector(Transforms.pow(var.add(eps), -1.0 / 2.0, true)).neg().sum(0).add(dldvar.mul(input.subRowVector(mean).mul(-2.0).sum(0).div(minibatch)));
    INDArray dldinExp = dldxhat.mulRowVector(Transforms.pow(var.add(eps), -1.0 / 2.0, true)).add(input.subRowVector(mean).mul(2.0 / minibatch).mulRowVector(dldvar)).addRowVector(dldmu.mul(1.0 / minibatch));
    Pair<Gradient, INDArray> p = l.backpropGradient(epsilon);
    INDArray dldgamma = p.getFirst().getGradientFor("gamma");
    INDArray dldbeta = p.getFirst().getGradientFor("beta");
    assertEquals(dldgammaExp, dldgamma);
    assertEquals(dldbetaExp, dldbeta);
    System.out.println("EPSILONS");
    System.out.println(Arrays.toString(dldinExp.data().asDouble()));
    System.out.println(Arrays.toString(p.getSecond().dup().data().asDouble()));
    assertEquals(dldinExp, p.getSecond());
}
Also used : Gradient(org.deeplearning4j.nn.gradient.Gradient) INDArray(org.nd4j.linalg.api.ndarray.INDArray) Layer(org.deeplearning4j.nn.api.Layer) Test(org.junit.Test)

Example 59 with Gradient

use of org.deeplearning4j.nn.gradient.Gradient in project deeplearning4j by deeplearning4j.

the class BatchNormalizationTest method testGradientAndUpdaters.

@Test
public void testGradientAndUpdaters() throws Exception {
    //Global mean/variance are part of the parameter vector. Expect 0 gradient, and no-op updater for these
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1).updater(Updater.RMSPROP).seed(12345).list().layer(0, new ConvolutionLayer.Builder().nIn(1).nOut(6).weightInit(WeightInit.XAVIER).activation(Activation.IDENTITY).build()).layer(1, new BatchNormalization.Builder().build()).layer(2, new ActivationLayer.Builder().activation(Activation.LEAKYRELU).build()).layer(3, new DenseLayer.Builder().nOut(10).activation(Activation.LEAKYRELU).build()).layer(4, new BatchNormalization.Builder().build()).layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).nOut(10).build()).backprop(true).pretrain(false).setInputType(InputType.convolutionalFlat(28, 28, 1)).build();
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();
    DataSetIterator iter = new MnistDataSetIterator(16, true, 12345);
    DataSet ds = iter.next();
    net.setInput(ds.getFeatures());
    net.setLabels(ds.getLabels());
    net.computeGradientAndScore();
    Gradient g = net.gradient();
    Map<String, INDArray> map = g.gradientForVariable();
    for (String s : map.keySet()) {
        INDArray grad = map.get(s);
        if (s.endsWith(BatchNormalizationParamInitializer.GLOBAL_MEAN) || s.endsWith(BatchNormalizationParamInitializer.GLOBAL_VAR)) {
            assertEquals(Nd4j.zeros(grad.shape()), grad);
        }
    }
    org.deeplearning4j.nn.api.Updater u = net.getUpdater();
    Field f = MultiLayerUpdater.class.getDeclaredField("layerUpdaters");
    f.setAccessible(true);
    org.deeplearning4j.nn.api.Updater[] updaters = (org.deeplearning4j.nn.api.Updater[]) f.get(u);
    assertNotNull(updaters);
    assertEquals(6, updaters.length);
    for (int i = 0; i <= 5; i++) {
        LayerUpdater lu = (LayerUpdater) updaters[i];
        Map<String, GradientUpdater> guMap = lu.getUpdaterForVariable();
        for (Map.Entry<String, GradientUpdater> entry : guMap.entrySet()) {
            if (i == 1 || i == 4) {
                String param = entry.getKey();
                if (BatchNormalizationParamInitializer.GLOBAL_MEAN.equals(param) || BatchNormalizationParamInitializer.GLOBAL_VAR.equals(param)) {
                    assertTrue(entry.getValue() instanceof NoOpUpdater);
                } else {
                    assertTrue(entry.getValue() instanceof RmsProp);
                }
            } else {
                assertTrue(entry.getValue() instanceof RmsProp);
            }
        }
    }
}
Also used : DataSet(org.nd4j.linalg.dataset.DataSet) BatchNormalization(org.deeplearning4j.nn.conf.layers.BatchNormalization) Field(java.lang.reflect.Field) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) LayerUpdater(org.deeplearning4j.nn.updater.LayerUpdater) Updater(org.deeplearning4j.nn.conf.Updater) MultiLayerUpdater(org.deeplearning4j.nn.updater.MultiLayerUpdater) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) Gradient(org.deeplearning4j.nn.gradient.Gradient) MnistDataSetIterator(org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) INDArray(org.nd4j.linalg.api.ndarray.INDArray) LayerUpdater(org.deeplearning4j.nn.updater.LayerUpdater) MultiLayerUpdater(org.deeplearning4j.nn.updater.MultiLayerUpdater) DataSetIterator(org.nd4j.linalg.dataset.api.iterator.DataSetIterator) MnistDataSetIterator(org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator) ListDataSetIterator(org.deeplearning4j.datasets.iterator.impl.ListDataSetIterator) Test(org.junit.Test)

Example 60 with Gradient

use of org.deeplearning4j.nn.gradient.Gradient in project deeplearning4j by deeplearning4j.

the class BatchNormalizationTest method testCnnForwardBackward.

@Test
public void testCnnForwardBackward() {
    double eps = 1e-5;
    int nIn = 4;
    int hw = 3;
    int minibatch = 2;
    Nd4j.getRandom().setSeed(12345);
    INDArray input = Nd4j.rand('c', new int[] { minibatch, nIn, hw, hw });
    //TODO: other values for gamma/beta
    INDArray gamma = Nd4j.ones(1, nIn);
    INDArray beta = Nd4j.zeros(1, nIn);
    Layer l = getLayer(nIn, eps, false, -1, -1);
    INDArray mean = input.mean(0, 2, 3);
    INDArray var = input.var(false, 0, 2, 3);
    INDArray xHat = Nd4j.getExecutioner().execAndReturn(new BroadcastSubOp(input, mean, input.dup(), 1));
    Nd4j.getExecutioner().execAndReturn(new BroadcastDivOp(xHat, Transforms.sqrt(var.add(eps), true), xHat, 1));
    INDArray outExpected = Nd4j.getExecutioner().execAndReturn(new BroadcastMulOp(xHat, gamma, xHat.dup(), 1));
    Nd4j.getExecutioner().execAndReturn(new BroadcastAddOp(outExpected, beta, outExpected, 1));
    INDArray out = l.activate(input, true);
    System.out.println(Arrays.toString(outExpected.data().asDouble()));
    System.out.println(Arrays.toString(out.data().asDouble()));
    assertEquals(outExpected, out);
    //-------------------------------------------------------------
    //Check backprop
    //dL/dy
    INDArray epsilon = Nd4j.rand('c', new int[] { minibatch, nIn, hw, hw });
    int effectiveMinibatch = minibatch * hw * hw;
    INDArray dldgammaExp = epsilon.mul(xHat).sum(0, 2, 3);
    INDArray dldbetaExp = epsilon.sum(0, 2, 3);
    //epsilon.mulRowVector(gamma);
    INDArray dldxhat = Nd4j.getExecutioner().execAndReturn(new BroadcastMulOp(epsilon, gamma, epsilon.dup(), 1));
    INDArray inputSubMean = Nd4j.getExecutioner().execAndReturn(new BroadcastSubOp(input, mean, input.dup(), 1));
    INDArray dldvar = dldxhat.mul(inputSubMean).mul(-0.5);
    dldvar = Nd4j.getExecutioner().execAndReturn(new BroadcastMulOp(dldvar, Transforms.pow(var.add(eps), -3.0 / 2.0, true), dldvar.dup(), 1));
    dldvar = dldvar.sum(0, 2, 3);
    INDArray dldmu = Nd4j.getExecutioner().execAndReturn(new BroadcastMulOp(dldxhat, Transforms.pow(var.add(eps), -1.0 / 2.0, true), dldxhat.dup(), 1)).neg().sum(0, 2, 3);
    dldmu = dldmu.add(dldvar.mul(inputSubMean.mul(-2.0).sum(0, 2, 3).div(effectiveMinibatch)));
    INDArray dldinExp = Nd4j.getExecutioner().execAndReturn(new BroadcastMulOp(dldxhat, Transforms.pow(var.add(eps), -1.0 / 2.0, true), dldxhat.dup(), 1));
    dldinExp = dldinExp.add(Nd4j.getExecutioner().execAndReturn(new BroadcastMulOp(inputSubMean.mul(2.0 / effectiveMinibatch), dldvar, inputSubMean.dup(), 1)));
    dldinExp = Nd4j.getExecutioner().execAndReturn(new BroadcastAddOp(dldinExp, dldmu.mul(1.0 / effectiveMinibatch), dldinExp.dup(), 1));
    Pair<Gradient, INDArray> p = l.backpropGradient(epsilon);
    INDArray dldgamma = p.getFirst().getGradientFor("gamma");
    INDArray dldbeta = p.getFirst().getGradientFor("beta");
    assertEquals(dldgammaExp, dldgamma);
    assertEquals(dldbetaExp, dldbeta);
    //        System.out.println("EPSILONS");
    //        System.out.println(Arrays.toString(dldinExp.data().asDouble()));
    //        System.out.println(Arrays.toString(p.getSecond().dup().data().asDouble()));
    assertEquals(dldinExp, p.getSecond());
}
Also used : BroadcastAddOp(org.nd4j.linalg.api.ops.impl.broadcast.BroadcastAddOp) Gradient(org.deeplearning4j.nn.gradient.Gradient) INDArray(org.nd4j.linalg.api.ndarray.INDArray) BroadcastSubOp(org.nd4j.linalg.api.ops.impl.broadcast.BroadcastSubOp) BroadcastMulOp(org.nd4j.linalg.api.ops.impl.broadcast.BroadcastMulOp) Layer(org.deeplearning4j.nn.api.Layer) BroadcastDivOp(org.nd4j.linalg.api.ops.impl.broadcast.BroadcastDivOp) Test(org.junit.Test)

Aggregations

Gradient (org.deeplearning4j.nn.gradient.Gradient)105 INDArray (org.nd4j.linalg.api.ndarray.INDArray)100 DefaultGradient (org.deeplearning4j.nn.gradient.DefaultGradient)72 Test (org.junit.Test)52 NeuralNetConfiguration (org.deeplearning4j.nn.conf.NeuralNetConfiguration)35 Pair (org.deeplearning4j.berkeley.Pair)28 Layer (org.deeplearning4j.nn.api.Layer)28 Updater (org.deeplearning4j.nn.api.Updater)25 DenseLayer (org.deeplearning4j.nn.conf.layers.DenseLayer)24 OutputLayer (org.deeplearning4j.nn.conf.layers.OutputLayer)21 MultiLayerConfiguration (org.deeplearning4j.nn.conf.MultiLayerConfiguration)9 MultiLayerNetwork (org.deeplearning4j.nn.multilayer.MultiLayerNetwork)8 IActivation (org.nd4j.linalg.activations.IActivation)6 HashMap (java.util.HashMap)5 DataSetIterator (org.nd4j.linalg.dataset.api.iterator.DataSetIterator)5 ArrayList (java.util.ArrayList)4 IrisDataSetIterator (org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator)4 DL4JInvalidInputException (org.deeplearning4j.exception.DL4JInvalidInputException)4 IOutputLayer (org.deeplearning4j.nn.api.layers.IOutputLayer)4 ComputationGraphConfiguration (org.deeplearning4j.nn.conf.ComputationGraphConfiguration)4