use of org.deeplearning4j.nn.api.Layer in project deeplearning4j by deeplearning4j.
the class BatchNormalizationTest method testCnnForwardPass.
@Test
public void testCnnForwardPass() {
int nOut = 10;
Layer l = getLayer(nOut, 0.0, false, -1, -1);
//Gamma, beta, global mean, global var
assertEquals(4 * nOut, l.numParams());
int hw = 15;
Nd4j.getRandom().setSeed(12345);
INDArray randInput = Nd4j.rand(12345, 100, nOut, hw, hw);
INDArray output = l.activate(randInput, true);
assertEquals(4, output.rank());
INDArray mean = output.mean(0, 2, 3);
INDArray stdev = output.std(false, 0, 2, 3);
assertArrayEquals(new float[nOut], mean.data().asFloat(), 1e-6f);
assertArrayEquals(Nd4j.ones(1, nOut).data().asFloat(), stdev.data().asFloat(), 1e-6f);
//If we fix gamma/beta: expect different mean and variance...
double gamma = 2.0;
double beta = 3.0;
l = getLayer(nOut, 0.0, true, gamma, beta);
//Should have only global mean/var parameters
assertEquals(2 * nOut, l.numParams());
output = l.activate(randInput, true);
mean = output.mean(0, 2, 3);
stdev = output.std(false, 0, 2, 3);
assertEquals(Nd4j.valueArrayOf(mean.shape(), beta), mean);
assertEquals(Nd4j.valueArrayOf(stdev.shape(), gamma), stdev);
}
use of org.deeplearning4j.nn.api.Layer in project deeplearning4j by deeplearning4j.
the class BatchNormalizationTest method test2dVs4d.
@Test
public void test2dVs4d() {
//Idea: 2d and 4d should be the same...
Nd4j.getRandom().setSeed(12345);
int m = 2;
int h = 3;
int w = 3;
int nOut = 2;
INDArray in = Nd4j.rand('c', m * h * w, nOut);
INDArray in4 = in.dup();
in4 = Shape.newShapeNoCopy(in4, new int[] { m, h, w, nOut }, false);
assertNotNull(in4);
in4 = in4.permute(0, 3, 1, 2).dup();
INDArray arr = Nd4j.rand(1, m * h * w * nOut).reshape('f', h, w, m, nOut).permute(2, 3, 1, 0);
in4 = arr.assign(in4);
Layer l1 = getLayer(nOut);
Layer l2 = getLayer(nOut);
INDArray out2d = l1.activate(in.dup(), true);
INDArray out4d = l2.activate(in4.dup(), true);
INDArray out4dAs2 = out4d.permute(0, 2, 3, 1).dup('c');
out4dAs2 = Shape.newShapeNoCopy(out4dAs2, new int[] { m * h * w, nOut }, false);
assertEquals(out2d, out4dAs2);
//Test backprop:
INDArray epsilons2d = Nd4j.rand('c', m * h * w, nOut);
INDArray epsilons4d = epsilons2d.dup();
epsilons4d = Shape.newShapeNoCopy(epsilons4d, new int[] { m, h, w, nOut }, false);
assertNotNull(epsilons4d);
epsilons4d = epsilons4d.permute(0, 3, 1, 2).dup();
Pair<Gradient, INDArray> b2d = l1.backpropGradient(epsilons2d);
Pair<Gradient, INDArray> b4d = l2.backpropGradient(epsilons4d);
INDArray e4dAs2d = b4d.getSecond().permute(0, 2, 3, 1).dup('c');
e4dAs2d = Shape.newShapeNoCopy(e4dAs2d, new int[] { m * h * w, nOut }, false);
assertEquals(b2d.getSecond(), e4dAs2d);
}
use of org.deeplearning4j.nn.api.Layer in project deeplearning4j by deeplearning4j.
the class ConvolutionLayerTest method testFeatureMapShapeMNIST.
@Test
public void testFeatureMapShapeMNIST() throws Exception {
int inputWidth = 28;
int[] stride = new int[] { 1, 1 };
int[] padding = new int[] { 0, 0 };
int[] kernelSize = new int[] { 9, 9 };
int nChannelsIn = 1;
int depth = 20;
int featureMapWidth = (inputWidth + padding[1] * 2 - kernelSize[1]) / stride[1] + 1;
INDArray input = getMnistData();
Layer layer = getCNNConfig(nChannelsIn, depth, kernelSize, stride, padding);
INDArray convActivations = layer.activate(input);
assertEquals(featureMapWidth, convActivations.size(2));
assertEquals(depth, convActivations.size(1));
}
use of org.deeplearning4j.nn.api.Layer in project deeplearning4j by deeplearning4j.
the class SubsamplingLayerTest method testSubSampleLayerSumBackprop.
@Test(expected = IllegalStateException.class)
public void testSubSampleLayerSumBackprop() throws Exception {
Layer layer = getSubsamplingLayer(SubsamplingLayer.PoolingType.SUM);
INDArray input = getData();
layer.setInput(input);
layer.backpropGradient(epsilon);
}
use of org.deeplearning4j.nn.api.Layer in project deeplearning4j by deeplearning4j.
the class BaseLayerTest method testSetExistingParamsConvolutionSingleLayer.
@Test
public void testSetExistingParamsConvolutionSingleLayer() {
Layer layer = configureSingleLayer();
assertNotEquals(paramTable, layer.paramTable());
layer.setParamTable(paramTable);
assertEquals(paramTable, layer.paramTable());
}
Aggregations