Search in sources :

Example 1 with Layer

use of org.deeplearning4j.nn.api.Layer in project deeplearning4j by deeplearning4j.

the class BatchNormalizationTest method testCnnForwardPass.

@Test
public void testCnnForwardPass() {
    int nOut = 10;
    Layer l = getLayer(nOut, 0.0, false, -1, -1);
    //Gamma, beta, global mean, global var
    assertEquals(4 * nOut, l.numParams());
    int hw = 15;
    Nd4j.getRandom().setSeed(12345);
    INDArray randInput = Nd4j.rand(12345, 100, nOut, hw, hw);
    INDArray output = l.activate(randInput, true);
    assertEquals(4, output.rank());
    INDArray mean = output.mean(0, 2, 3);
    INDArray stdev = output.std(false, 0, 2, 3);
    assertArrayEquals(new float[nOut], mean.data().asFloat(), 1e-6f);
    assertArrayEquals(Nd4j.ones(1, nOut).data().asFloat(), stdev.data().asFloat(), 1e-6f);
    //If we fix gamma/beta: expect different mean and variance...
    double gamma = 2.0;
    double beta = 3.0;
    l = getLayer(nOut, 0.0, true, gamma, beta);
    //Should have only global mean/var parameters
    assertEquals(2 * nOut, l.numParams());
    output = l.activate(randInput, true);
    mean = output.mean(0, 2, 3);
    stdev = output.std(false, 0, 2, 3);
    assertEquals(Nd4j.valueArrayOf(mean.shape(), beta), mean);
    assertEquals(Nd4j.valueArrayOf(stdev.shape(), gamma), stdev);
}
Also used : INDArray(org.nd4j.linalg.api.ndarray.INDArray) Layer(org.deeplearning4j.nn.api.Layer) Test(org.junit.Test)

Example 2 with Layer

use of org.deeplearning4j.nn.api.Layer in project deeplearning4j by deeplearning4j.

the class BatchNormalizationTest method test2dVs4d.

@Test
public void test2dVs4d() {
    //Idea: 2d and 4d should be the same...
    Nd4j.getRandom().setSeed(12345);
    int m = 2;
    int h = 3;
    int w = 3;
    int nOut = 2;
    INDArray in = Nd4j.rand('c', m * h * w, nOut);
    INDArray in4 = in.dup();
    in4 = Shape.newShapeNoCopy(in4, new int[] { m, h, w, nOut }, false);
    assertNotNull(in4);
    in4 = in4.permute(0, 3, 1, 2).dup();
    INDArray arr = Nd4j.rand(1, m * h * w * nOut).reshape('f', h, w, m, nOut).permute(2, 3, 1, 0);
    in4 = arr.assign(in4);
    Layer l1 = getLayer(nOut);
    Layer l2 = getLayer(nOut);
    INDArray out2d = l1.activate(in.dup(), true);
    INDArray out4d = l2.activate(in4.dup(), true);
    INDArray out4dAs2 = out4d.permute(0, 2, 3, 1).dup('c');
    out4dAs2 = Shape.newShapeNoCopy(out4dAs2, new int[] { m * h * w, nOut }, false);
    assertEquals(out2d, out4dAs2);
    //Test backprop:
    INDArray epsilons2d = Nd4j.rand('c', m * h * w, nOut);
    INDArray epsilons4d = epsilons2d.dup();
    epsilons4d = Shape.newShapeNoCopy(epsilons4d, new int[] { m, h, w, nOut }, false);
    assertNotNull(epsilons4d);
    epsilons4d = epsilons4d.permute(0, 3, 1, 2).dup();
    Pair<Gradient, INDArray> b2d = l1.backpropGradient(epsilons2d);
    Pair<Gradient, INDArray> b4d = l2.backpropGradient(epsilons4d);
    INDArray e4dAs2d = b4d.getSecond().permute(0, 2, 3, 1).dup('c');
    e4dAs2d = Shape.newShapeNoCopy(e4dAs2d, new int[] { m * h * w, nOut }, false);
    assertEquals(b2d.getSecond(), e4dAs2d);
}
Also used : Gradient(org.deeplearning4j.nn.gradient.Gradient) INDArray(org.nd4j.linalg.api.ndarray.INDArray) Layer(org.deeplearning4j.nn.api.Layer) Test(org.junit.Test)

Example 3 with Layer

use of org.deeplearning4j.nn.api.Layer in project deeplearning4j by deeplearning4j.

the class ConvolutionLayerTest method testFeatureMapShapeMNIST.

@Test
public void testFeatureMapShapeMNIST() throws Exception {
    int inputWidth = 28;
    int[] stride = new int[] { 1, 1 };
    int[] padding = new int[] { 0, 0 };
    int[] kernelSize = new int[] { 9, 9 };
    int nChannelsIn = 1;
    int depth = 20;
    int featureMapWidth = (inputWidth + padding[1] * 2 - kernelSize[1]) / stride[1] + 1;
    INDArray input = getMnistData();
    Layer layer = getCNNConfig(nChannelsIn, depth, kernelSize, stride, padding);
    INDArray convActivations = layer.activate(input);
    assertEquals(featureMapWidth, convActivations.size(2));
    assertEquals(depth, convActivations.size(1));
}
Also used : INDArray(org.nd4j.linalg.api.ndarray.INDArray) ConvolutionLayer(org.deeplearning4j.nn.conf.layers.ConvolutionLayer) Layer(org.deeplearning4j.nn.api.Layer) OutputLayer(org.deeplearning4j.nn.conf.layers.OutputLayer) DenseLayer(org.deeplearning4j.nn.conf.layers.DenseLayer) SubsamplingLayer(org.deeplearning4j.nn.conf.layers.SubsamplingLayer) Test(org.junit.Test)

Example 4 with Layer

use of org.deeplearning4j.nn.api.Layer in project deeplearning4j by deeplearning4j.

the class SubsamplingLayerTest method testSubSampleLayerSumBackprop.

@Test(expected = IllegalStateException.class)
public void testSubSampleLayerSumBackprop() throws Exception {
    Layer layer = getSubsamplingLayer(SubsamplingLayer.PoolingType.SUM);
    INDArray input = getData();
    layer.setInput(input);
    layer.backpropGradient(epsilon);
}
Also used : INDArray(org.nd4j.linalg.api.ndarray.INDArray) Layer(org.deeplearning4j.nn.api.Layer) OutputLayer(org.deeplearning4j.nn.conf.layers.OutputLayer) SubsamplingLayer(org.deeplearning4j.nn.conf.layers.SubsamplingLayer) Test(org.junit.Test)

Example 5 with Layer

use of org.deeplearning4j.nn.api.Layer in project deeplearning4j by deeplearning4j.

the class BaseLayerTest method testSetExistingParamsConvolutionSingleLayer.

@Test
public void testSetExistingParamsConvolutionSingleLayer() {
    Layer layer = configureSingleLayer();
    assertNotEquals(paramTable, layer.paramTable());
    layer.setParamTable(paramTable);
    assertEquals(paramTable, layer.paramTable());
}
Also used : ConvolutionLayer(org.deeplearning4j.nn.conf.layers.ConvolutionLayer) Layer(org.deeplearning4j.nn.api.Layer) OutputLayer(org.deeplearning4j.nn.conf.layers.OutputLayer) DenseLayer(org.deeplearning4j.nn.conf.layers.DenseLayer) Test(org.junit.Test)

Aggregations

Layer (org.deeplearning4j.nn.api.Layer)100 INDArray (org.nd4j.linalg.api.ndarray.INDArray)82 Test (org.junit.Test)61 OutputLayer (org.deeplearning4j.nn.conf.layers.OutputLayer)44 DenseLayer (org.deeplearning4j.nn.conf.layers.DenseLayer)42 NeuralNetConfiguration (org.deeplearning4j.nn.conf.NeuralNetConfiguration)32 Gradient (org.deeplearning4j.nn.gradient.Gradient)28 DefaultGradient (org.deeplearning4j.nn.gradient.DefaultGradient)23 Updater (org.deeplearning4j.nn.api.Updater)21 FeedForwardLayer (org.deeplearning4j.nn.conf.layers.FeedForwardLayer)20 MultiLayerNetwork (org.deeplearning4j.nn.multilayer.MultiLayerNetwork)20 SubsamplingLayer (org.deeplearning4j.nn.conf.layers.SubsamplingLayer)19 FrozenLayer (org.deeplearning4j.nn.layers.FrozenLayer)16 IOutputLayer (org.deeplearning4j.nn.api.layers.IOutputLayer)15 RecurrentLayer (org.deeplearning4j.nn.api.layers.RecurrentLayer)15 ComputationGraph (org.deeplearning4j.nn.graph.ComputationGraph)14 ConvolutionLayer (org.deeplearning4j.nn.conf.layers.ConvolutionLayer)9 GraphVertex (org.deeplearning4j.nn.graph.vertex.GraphVertex)8 MultiLayerConfiguration (org.deeplearning4j.nn.conf.MultiLayerConfiguration)6 VertexIndices (org.deeplearning4j.nn.graph.vertex.VertexIndices)5