use of org.deeplearning4j.nn.multilayer.MultiLayerNetwork in project deeplearning4j by deeplearning4j.
the class ConvolutionLayerTest method testCNNZeroStride.
@Test(expected = Exception.class)
public void testCNNZeroStride() {
int imageHeight = 20;
int imageWidth = 23;
int nChannels = 1;
int classes = 2;
int numSamples = 200;
int kernelHeight = imageHeight;
int kernelWidth = imageWidth;
DataSet trainInput;
MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(123).iterations(1).list().layer(0, new ConvolutionLayer.Builder(kernelHeight, kernelWidth).stride(1, 0).nOut(2).activation(Activation.RELU).weightInit(WeightInit.XAVIER).build()).layer(1, new OutputLayer.Builder().nOut(classes).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).build()).backprop(true).pretrain(false);
new ConvolutionLayerSetup(builder, imageHeight, imageWidth, nChannels);
MultiLayerConfiguration conf = builder.build();
MultiLayerNetwork model = new MultiLayerNetwork(conf);
model.init();
INDArray emptyFeatures = Nd4j.zeros(numSamples, imageWidth * imageHeight * nChannels);
INDArray emptyLables = Nd4j.zeros(numSamples, classes);
trainInput = new DataSet(emptyFeatures, emptyLables);
model.fit(trainInput);
}
use of org.deeplearning4j.nn.multilayer.MultiLayerNetwork in project deeplearning4j by deeplearning4j.
the class ActivationLayerTest method testDenseActivationLayer.
@Test
public void testDenseActivationLayer() throws Exception {
DataSetIterator iter = new MnistDataSetIterator(2, 2);
DataSet next = iter.next();
// Run without separate activation layer
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1).seed(123).list().layer(0, new DenseLayer.Builder().nIn(28 * 28 * 1).nOut(10).activation(Activation.RELU).weightInit(WeightInit.XAVIER).build()).layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).nIn(10).nOut(10).build()).backprop(true).pretrain(false).build();
MultiLayerNetwork network = new MultiLayerNetwork(conf);
network.init();
network.fit(next);
// Run with separate activation layer
MultiLayerConfiguration conf2 = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1).seed(123).list().layer(0, new DenseLayer.Builder().nIn(28 * 28 * 1).nOut(10).activation(Activation.IDENTITY).weightInit(WeightInit.XAVIER).build()).layer(1, new org.deeplearning4j.nn.conf.layers.ActivationLayer.Builder().activation(Activation.RELU).build()).layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).nIn(10).nOut(10).build()).backprop(true).pretrain(false).build();
MultiLayerNetwork network2 = new MultiLayerNetwork(conf2);
network2.init();
network2.fit(next);
// check parameters
assertEquals(network.getLayer(0).getParam("W"), network2.getLayer(0).getParam("W"));
assertEquals(network.getLayer(1).getParam("W"), network2.getLayer(2).getParam("W"));
assertEquals(network.getLayer(0).getParam("b"), network2.getLayer(0).getParam("b"));
assertEquals(network.getLayer(1).getParam("b"), network2.getLayer(2).getParam("b"));
// check activations
network.init();
network.setInput(next.getFeatureMatrix());
List<INDArray> activations = network.feedForward(true);
network2.init();
network2.setInput(next.getFeatureMatrix());
List<INDArray> activations2 = network2.feedForward(true);
assertEquals(activations.get(1).reshape(activations2.get(2).shape()), activations2.get(2));
assertEquals(activations.get(2), activations2.get(3));
}
use of org.deeplearning4j.nn.multilayer.MultiLayerNetwork in project deeplearning4j by deeplearning4j.
the class BaseLayerTest method configureMultiLayer.
public MultiLayerNetwork configureMultiLayer() {
int nIn = 2;
int nOut = 2;
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().list().layer(0, new DenseLayer.Builder().nIn(nIn).nOut(nOut).build()).layer(1, new OutputLayer.Builder().nIn(nIn).nOut(nOut).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
return net;
}
use of org.deeplearning4j.nn.multilayer.MultiLayerNetwork in project deeplearning4j by deeplearning4j.
the class BaseLayerTest method testSetExistingParamsDenseMultiLayer.
@Test
public void testSetExistingParamsDenseMultiLayer() {
MultiLayerNetwork net = configureMultiLayer();
for (Layer layer : net.getLayers()) {
assertNotEquals(paramTable, layer.paramTable());
layer.setParamTable(paramTable);
assertEquals(paramTable, layer.paramTable());
}
}
use of org.deeplearning4j.nn.multilayer.MultiLayerNetwork in project deeplearning4j by deeplearning4j.
the class TestConvolutionModes method testSameModeActivationSizes.
@Test
public void testSameModeActivationSizes() {
int inH = 3;
int inW = 4;
int inDepth = 3;
int minibatch = 5;
int sH = 2;
int sW = 2;
int kH = 3;
int kW = 3;
Layer[] l = new Layer[2];
l[0] = new ConvolutionLayer.Builder().nOut(4).kernelSize(kH, kW).stride(sH, sW).build();
l[1] = new SubsamplingLayer.Builder().kernelSize(kH, kW).stride(sH, sW).build();
for (int i = 0; i < l.length; i++) {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().convolutionMode(ConvolutionMode.Same).list().layer(0, l[i]).layer(1, new OutputLayer.Builder().nOut(3).build()).setInputType(InputType.convolutional(inH, inW, inDepth)).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
INDArray inData = Nd4j.create(minibatch, inDepth, inH, inW);
List<INDArray> activations = net.feedForward(inData);
INDArray actL0 = activations.get(1);
int outH = (int) Math.ceil(inH / ((double) sH));
int outW = (int) Math.ceil(inW / ((double) sW));
System.out.println(Arrays.toString(actL0.shape()));
assertArrayEquals(new int[] { minibatch, (i == 0 ? 4 : inDepth), outH, outW }, actL0.shape());
}
}
Aggregations