use of org.deeplearning4j.nn.conf.distribution.NormalDistribution in project deeplearning4j by deeplearning4j.
the class GlobalPoolingGradientCheckTests method testLSTMGlobalPoolingBasicMultiLayer.
@Test
public void testLSTMGlobalPoolingBasicMultiLayer() {
//Basic test of global pooling w/ LSTM
Nd4j.getRandom().setSeed(12345L);
int timeSeriesLength = 10;
int nIn = 5;
int layerSize = 4;
int nOut = 2;
int[] minibatchSizes = new int[] { 1, 3 };
PoolingType[] poolingTypes = new PoolingType[] { PoolingType.AVG, PoolingType.SUM, PoolingType.MAX, PoolingType.PNORM };
for (int miniBatchSize : minibatchSizes) {
for (PoolingType pt : poolingTypes) {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().regularization(false).updater(Updater.NONE).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1.0)).seed(12345L).list().layer(0, new GravesLSTM.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH).build()).layer(1, new GlobalPoolingLayer.Builder().poolingType(pt).build()).layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut).build()).pretrain(false).backprop(true).build();
MultiLayerNetwork mln = new MultiLayerNetwork(conf);
mln.init();
Random r = new Random(12345L);
INDArray input = Nd4j.zeros(miniBatchSize, nIn, timeSeriesLength);
for (int i = 0; i < miniBatchSize; i++) {
for (int j = 0; j < nIn; j++) {
for (int k = 0; k < timeSeriesLength; k++) {
input.putScalar(new int[] { i, j, k }, r.nextDouble() - 0.5);
}
}
}
INDArray labels = Nd4j.zeros(miniBatchSize, nOut);
for (int i = 0; i < miniBatchSize; i++) {
int idx = r.nextInt(nOut);
labels.putScalar(i, idx, 1.0);
}
if (PRINT_RESULTS) {
System.out.println("testLSTMGlobalPoolingBasicMultiLayer() - " + pt + ", minibatch = " + miniBatchSize);
for (int j = 0; j < mln.getnLayers(); j++) System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams());
}
boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels);
assertTrue(gradOK);
}
}
}
use of org.deeplearning4j.nn.conf.distribution.NormalDistribution in project deeplearning4j by deeplearning4j.
the class GlobalPoolingGradientCheckTests method testCnnGlobalPoolingMasking.
@Test
public void testCnnGlobalPoolingMasking() {
//Global pooling w/ CNN + masking, where mask is along dimension 2, then separately test along dimension 3
Nd4j.getRandom().setSeed(12345L);
int inputDepth = 2;
int inputH = 5;
int inputW = 5;
int layerDepth = 3;
int nOut = 2;
for (int maskDim = 2; maskDim <= 3; maskDim++) {
int[] minibatchSizes = new int[] { 1, 3 };
PoolingType[] poolingTypes = new PoolingType[] { PoolingType.AVG, PoolingType.SUM, PoolingType.MAX, PoolingType.PNORM };
for (int miniBatchSize : minibatchSizes) {
for (PoolingType pt : poolingTypes) {
int[] kernel;
int[] stride;
if (maskDim == 2) {
//"time" (variable length) dimension is dimension 2
kernel = new int[] { 2, inputW };
stride = new int[] { 1, inputW };
} else {
kernel = new int[] { inputH, 2 };
stride = new int[] { inputH, 1 };
}
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().regularization(false).updater(Updater.NONE).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1.0)).convolutionMode(ConvolutionMode.Same).seed(12345L).list().layer(0, new ConvolutionLayer.Builder().kernelSize(kernel).stride(stride).nOut(layerDepth).build()).layer(1, new GlobalPoolingLayer.Builder().poolingType(pt).build()).layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nOut(nOut).build()).pretrain(false).backprop(true).setInputType(InputType.convolutional(inputH, inputW, inputDepth)).build();
MultiLayerNetwork mln = new MultiLayerNetwork(conf);
mln.init();
Random r = new Random(12345L);
INDArray input = Nd4j.rand(new int[] { miniBatchSize, inputDepth, inputH, inputW }).subi(0.5);
INDArray inputMask;
if (miniBatchSize == 1) {
inputMask = Nd4j.create(new double[] { 1, 1, 1, 1, 0 });
} else if (miniBatchSize == 3) {
inputMask = Nd4j.create(new double[][] { { 1, 1, 1, 1, 1 }, { 1, 1, 1, 1, 0 }, { 1, 1, 1, 0, 0 } });
} else {
throw new RuntimeException();
}
INDArray labels = Nd4j.zeros(miniBatchSize, nOut);
for (int i = 0; i < miniBatchSize; i++) {
int idx = r.nextInt(nOut);
labels.putScalar(i, idx, 1.0);
}
mln.setLayerMaskArrays(inputMask, null);
if (PRINT_RESULTS) {
System.out.println("testCnnGlobalPoolingBasicMultiLayer() - " + pt + ", minibatch = " + miniBatchSize);
for (int j = 0; j < mln.getnLayers(); j++) System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams());
}
boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels);
assertTrue(gradOK);
}
}
}
}
use of org.deeplearning4j.nn.conf.distribution.NormalDistribution in project deeplearning4j by deeplearning4j.
the class GlobalPoolingGradientCheckTests method testLSTMWithMasking.
@Test
public void testLSTMWithMasking() {
//Basic test of GravesLSTM layer
Nd4j.getRandom().setSeed(12345L);
int timeSeriesLength = 10;
int nIn = 5;
int layerSize = 4;
int nOut = 2;
int miniBatchSize = 3;
PoolingType[] poolingTypes = new PoolingType[] { PoolingType.AVG, PoolingType.SUM, PoolingType.MAX, PoolingType.PNORM };
for (PoolingType pt : poolingTypes) {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().regularization(false).updater(Updater.NONE).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1.0)).seed(12345L).list().layer(0, new GravesLSTM.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH).build()).layer(1, new GlobalPoolingLayer.Builder().poolingType(pt).build()).layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut).build()).pretrain(false).backprop(true).build();
MultiLayerNetwork mln = new MultiLayerNetwork(conf);
mln.init();
Random r = new Random(12345L);
INDArray input = Nd4j.zeros(miniBatchSize, nIn, timeSeriesLength);
for (int i = 0; i < miniBatchSize; i++) {
for (int j = 0; j < nIn; j++) {
for (int k = 0; k < timeSeriesLength; k++) {
input.putScalar(new int[] { i, j, k }, r.nextDouble() - 0.5);
}
}
}
INDArray featuresMask = Nd4j.create(miniBatchSize, timeSeriesLength);
for (int i = 0; i < miniBatchSize; i++) {
int to = timeSeriesLength - i;
for (int j = 0; j < to; j++) {
featuresMask.putScalar(i, j, 1.0);
}
}
INDArray labels = Nd4j.zeros(miniBatchSize, nOut);
for (int i = 0; i < miniBatchSize; i++) {
int idx = r.nextInt(nOut);
labels.putScalar(i, idx, 1.0);
}
mln.setLayerMaskArrays(featuresMask, null);
if (PRINT_RESULTS) {
System.out.println("testLSTMGlobalPoolingBasicMultiLayer() - " + pt + ", minibatch = " + miniBatchSize);
for (int j = 0; j < mln.getnLayers(); j++) System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams());
}
boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels);
assertTrue(gradOK);
}
}
use of org.deeplearning4j.nn.conf.distribution.NormalDistribution in project deeplearning4j by deeplearning4j.
the class GlobalPoolingGradientCheckTests method testCnnGlobalPoolingBasicMultiLayer.
@Test
public void testCnnGlobalPoolingBasicMultiLayer() {
//Basic test of global pooling w/ CNN
Nd4j.getRandom().setSeed(12345L);
int inputDepth = 3;
int inputH = 5;
int inputW = 4;
int layerDepth = 4;
int nOut = 2;
int[] minibatchSizes = new int[] { 1, 3 };
PoolingType[] poolingTypes = new PoolingType[] { PoolingType.AVG, PoolingType.SUM, PoolingType.MAX, PoolingType.PNORM };
for (int miniBatchSize : minibatchSizes) {
for (PoolingType pt : poolingTypes) {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().regularization(false).updater(Updater.NONE).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1.0)).seed(12345L).list().layer(0, new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(layerDepth).build()).layer(1, new GlobalPoolingLayer.Builder().poolingType(pt).build()).layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nOut(nOut).build()).pretrain(false).backprop(true).setInputType(InputType.convolutional(inputH, inputW, inputDepth)).build();
MultiLayerNetwork mln = new MultiLayerNetwork(conf);
mln.init();
Random r = new Random(12345L);
INDArray input = Nd4j.rand(new int[] { miniBatchSize, inputDepth, inputH, inputW }).subi(0.5);
INDArray labels = Nd4j.zeros(miniBatchSize, nOut);
for (int i = 0; i < miniBatchSize; i++) {
int idx = r.nextInt(nOut);
labels.putScalar(i, idx, 1.0);
}
if (PRINT_RESULTS) {
System.out.println("testCnnGlobalPoolingBasicMultiLayer() - " + pt + ", minibatch = " + miniBatchSize);
for (int j = 0; j < mln.getnLayers(); j++) System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams());
}
boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels);
assertTrue(gradOK);
}
}
}
use of org.deeplearning4j.nn.conf.distribution.NormalDistribution in project deeplearning4j by deeplearning4j.
the class GradientCheckTests method testGravesLSTMBasicMultiLayer.
@Test
public void testGravesLSTMBasicMultiLayer() {
//Basic test of GravesLSTM layer
Nd4j.getRandom().setSeed(12345L);
int timeSeriesLength = 4;
int nIn = 2;
int layerSize = 2;
int nOut = 2;
int miniBatchSize = 5;
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().regularization(false).seed(12345L).list().layer(0, new GravesLSTM.Builder().nIn(nIn).nOut(layerSize).activation(Activation.SIGMOID).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1.0)).updater(Updater.NONE).build()).layer(1, new GravesLSTM.Builder().nIn(layerSize).nOut(layerSize).activation(Activation.SIGMOID).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1.0)).updater(Updater.NONE).build()).layer(2, new RnnOutputLayer.Builder(LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1.0)).updater(Updater.NONE).build()).pretrain(false).backprop(true).build();
MultiLayerNetwork mln = new MultiLayerNetwork(conf);
mln.init();
Random r = new Random(12345L);
INDArray input = Nd4j.zeros(miniBatchSize, nIn, timeSeriesLength);
for (int i = 0; i < miniBatchSize; i++) {
for (int j = 0; j < nIn; j++) {
for (int k = 0; k < timeSeriesLength; k++) {
input.putScalar(new int[] { i, j, k }, r.nextDouble() - 0.5);
}
}
}
INDArray labels = Nd4j.zeros(miniBatchSize, nOut, timeSeriesLength);
for (int i = 0; i < miniBatchSize; i++) {
for (int j = 0; j < timeSeriesLength; j++) {
int idx = r.nextInt(nOut);
labels.putScalar(new int[] { i, idx, j }, 1.0);
}
}
if (PRINT_RESULTS) {
System.out.println("testGravesLSTMBasic()");
for (int j = 0; j < mln.getnLayers(); j++) System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams());
}
boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels);
assertTrue(gradOK);
}
Aggregations