Search in sources :

Example 16 with MultiLayerNetwork

use of org.deeplearning4j.nn.multilayer.MultiLayerNetwork in project deeplearning4j by deeplearning4j.

the class ConvolutionLayerTest method testCNNZeroStride.

@Test(expected = Exception.class)
public void testCNNZeroStride() {
    int imageHeight = 20;
    int imageWidth = 23;
    int nChannels = 1;
    int classes = 2;
    int numSamples = 200;
    int kernelHeight = imageHeight;
    int kernelWidth = imageWidth;
    DataSet trainInput;
    MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(123).iterations(1).list().layer(0, new ConvolutionLayer.Builder(kernelHeight, kernelWidth).stride(1, 0).nOut(2).activation(Activation.RELU).weightInit(WeightInit.XAVIER).build()).layer(1, new OutputLayer.Builder().nOut(classes).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).build()).backprop(true).pretrain(false);
    new ConvolutionLayerSetup(builder, imageHeight, imageWidth, nChannels);
    MultiLayerConfiguration conf = builder.build();
    MultiLayerNetwork model = new MultiLayerNetwork(conf);
    model.init();
    INDArray emptyFeatures = Nd4j.zeros(numSamples, imageWidth * imageHeight * nChannels);
    INDArray emptyLables = Nd4j.zeros(numSamples, classes);
    trainInput = new DataSet(emptyFeatures, emptyLables);
    model.fit(trainInput);
}
Also used : OutputLayer(org.deeplearning4j.nn.conf.layers.OutputLayer) DataSet(org.nd4j.linalg.dataset.DataSet) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) ConvolutionLayer(org.deeplearning4j.nn.conf.layers.ConvolutionLayer) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) INDArray(org.nd4j.linalg.api.ndarray.INDArray) ConvolutionLayerSetup(org.deeplearning4j.nn.conf.layers.setup.ConvolutionLayerSetup) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) Test(org.junit.Test)

Example 17 with MultiLayerNetwork

use of org.deeplearning4j.nn.multilayer.MultiLayerNetwork in project deeplearning4j by deeplearning4j.

the class ActivationLayerTest method testDenseActivationLayer.

@Test
public void testDenseActivationLayer() throws Exception {
    DataSetIterator iter = new MnistDataSetIterator(2, 2);
    DataSet next = iter.next();
    // Run without separate activation layer
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1).seed(123).list().layer(0, new DenseLayer.Builder().nIn(28 * 28 * 1).nOut(10).activation(Activation.RELU).weightInit(WeightInit.XAVIER).build()).layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).nIn(10).nOut(10).build()).backprop(true).pretrain(false).build();
    MultiLayerNetwork network = new MultiLayerNetwork(conf);
    network.init();
    network.fit(next);
    // Run with separate activation layer
    MultiLayerConfiguration conf2 = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1).seed(123).list().layer(0, new DenseLayer.Builder().nIn(28 * 28 * 1).nOut(10).activation(Activation.IDENTITY).weightInit(WeightInit.XAVIER).build()).layer(1, new org.deeplearning4j.nn.conf.layers.ActivationLayer.Builder().activation(Activation.RELU).build()).layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).nIn(10).nOut(10).build()).backprop(true).pretrain(false).build();
    MultiLayerNetwork network2 = new MultiLayerNetwork(conf2);
    network2.init();
    network2.fit(next);
    // check parameters
    assertEquals(network.getLayer(0).getParam("W"), network2.getLayer(0).getParam("W"));
    assertEquals(network.getLayer(1).getParam("W"), network2.getLayer(2).getParam("W"));
    assertEquals(network.getLayer(0).getParam("b"), network2.getLayer(0).getParam("b"));
    assertEquals(network.getLayer(1).getParam("b"), network2.getLayer(2).getParam("b"));
    // check activations
    network.init();
    network.setInput(next.getFeatureMatrix());
    List<INDArray> activations = network.feedForward(true);
    network2.init();
    network2.setInput(next.getFeatureMatrix());
    List<INDArray> activations2 = network2.feedForward(true);
    assertEquals(activations.get(1).reshape(activations2.get(2).shape()), activations2.get(2));
    assertEquals(activations.get(2), activations2.get(3));
}
Also used : OutputLayer(org.deeplearning4j.nn.conf.layers.OutputLayer) MnistDataSetIterator(org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator) DataSet(org.nd4j.linalg.dataset.DataSet) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) DenseLayer(org.deeplearning4j.nn.conf.layers.DenseLayer) INDArray(org.nd4j.linalg.api.ndarray.INDArray) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) DataSetIterator(org.nd4j.linalg.dataset.api.iterator.DataSetIterator) MnistDataSetIterator(org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator) Test(org.junit.Test)

Example 18 with MultiLayerNetwork

use of org.deeplearning4j.nn.multilayer.MultiLayerNetwork in project deeplearning4j by deeplearning4j.

the class BaseLayerTest method configureMultiLayer.

public MultiLayerNetwork configureMultiLayer() {
    int nIn = 2;
    int nOut = 2;
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().list().layer(0, new DenseLayer.Builder().nIn(nIn).nOut(nOut).build()).layer(1, new OutputLayer.Builder().nIn(nIn).nOut(nOut).build()).build();
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();
    return net;
}
Also used : OutputLayer(org.deeplearning4j.nn.conf.layers.OutputLayer) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork)

Example 19 with MultiLayerNetwork

use of org.deeplearning4j.nn.multilayer.MultiLayerNetwork in project deeplearning4j by deeplearning4j.

the class BaseLayerTest method testSetExistingParamsDenseMultiLayer.

@Test
public void testSetExistingParamsDenseMultiLayer() {
    MultiLayerNetwork net = configureMultiLayer();
    for (Layer layer : net.getLayers()) {
        assertNotEquals(paramTable, layer.paramTable());
        layer.setParamTable(paramTable);
        assertEquals(paramTable, layer.paramTable());
    }
}
Also used : MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) ConvolutionLayer(org.deeplearning4j.nn.conf.layers.ConvolutionLayer) Layer(org.deeplearning4j.nn.api.Layer) OutputLayer(org.deeplearning4j.nn.conf.layers.OutputLayer) DenseLayer(org.deeplearning4j.nn.conf.layers.DenseLayer) Test(org.junit.Test)

Example 20 with MultiLayerNetwork

use of org.deeplearning4j.nn.multilayer.MultiLayerNetwork in project deeplearning4j by deeplearning4j.

the class TestConvolutionModes method testSameModeActivationSizes.

@Test
public void testSameModeActivationSizes() {
    int inH = 3;
    int inW = 4;
    int inDepth = 3;
    int minibatch = 5;
    int sH = 2;
    int sW = 2;
    int kH = 3;
    int kW = 3;
    Layer[] l = new Layer[2];
    l[0] = new ConvolutionLayer.Builder().nOut(4).kernelSize(kH, kW).stride(sH, sW).build();
    l[1] = new SubsamplingLayer.Builder().kernelSize(kH, kW).stride(sH, sW).build();
    for (int i = 0; i < l.length; i++) {
        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().convolutionMode(ConvolutionMode.Same).list().layer(0, l[i]).layer(1, new OutputLayer.Builder().nOut(3).build()).setInputType(InputType.convolutional(inH, inW, inDepth)).build();
        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();
        INDArray inData = Nd4j.create(minibatch, inDepth, inH, inW);
        List<INDArray> activations = net.feedForward(inData);
        INDArray actL0 = activations.get(1);
        int outH = (int) Math.ceil(inH / ((double) sH));
        int outW = (int) Math.ceil(inW / ((double) sW));
        System.out.println(Arrays.toString(actL0.shape()));
        assertArrayEquals(new int[] { minibatch, (i == 0 ? 4 : inDepth), outH, outW }, actL0.shape());
    }
}
Also used : MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) INDArray(org.nd4j.linalg.api.ndarray.INDArray) ConvolutionLayer(org.deeplearning4j.nn.conf.layers.ConvolutionLayer) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) ConvolutionLayer(org.deeplearning4j.nn.conf.layers.ConvolutionLayer) Test(org.junit.Test)

Aggregations

MultiLayerNetwork (org.deeplearning4j.nn.multilayer.MultiLayerNetwork)326 Test (org.junit.Test)277 MultiLayerConfiguration (org.deeplearning4j.nn.conf.MultiLayerConfiguration)206 INDArray (org.nd4j.linalg.api.ndarray.INDArray)166 NeuralNetConfiguration (org.deeplearning4j.nn.conf.NeuralNetConfiguration)111 DataSet (org.nd4j.linalg.dataset.DataSet)91 DataSetIterator (org.nd4j.linalg.dataset.api.iterator.DataSetIterator)70 IrisDataSetIterator (org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator)49 NormalDistribution (org.deeplearning4j.nn.conf.distribution.NormalDistribution)43 ScoreIterationListener (org.deeplearning4j.optimize.listeners.ScoreIterationListener)41 OutputLayer (org.deeplearning4j.nn.conf.layers.OutputLayer)40 DenseLayer (org.deeplearning4j.nn.conf.layers.DenseLayer)38 Random (java.util.Random)34 MnistDataSetIterator (org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator)30 ConvolutionLayer (org.deeplearning4j.nn.conf.layers.ConvolutionLayer)28 DL4JException (org.deeplearning4j.exception.DL4JException)20 Layer (org.deeplearning4j.nn.api.Layer)20 ClassPathResource (org.nd4j.linalg.io.ClassPathResource)20 File (java.io.File)19 ComputationGraph (org.deeplearning4j.nn.graph.ComputationGraph)19