Search in sources :

Example 56 with NeuralNetConfiguration

use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.

the class ConvolutionLayerTest method testCNNBiasInit.

@Test
public void testCNNBiasInit() {
    ConvolutionLayer cnn = new ConvolutionLayer.Builder().nIn(1).nOut(3).biasInit(1).build();
    NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().layer(cnn).build();
    int numParams = conf.getLayer().initializer().numParams(conf);
    INDArray params = Nd4j.create(1, numParams);
    Layer layer = conf.getLayer().instantiate(conf, null, 0, params, true);
    assertEquals(1, layer.getParam("b").size(0));
}
Also used : INDArray(org.nd4j.linalg.api.ndarray.INDArray) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) ConvolutionLayer(org.deeplearning4j.nn.conf.layers.ConvolutionLayer) Layer(org.deeplearning4j.nn.api.Layer) OutputLayer(org.deeplearning4j.nn.conf.layers.OutputLayer) DenseLayer(org.deeplearning4j.nn.conf.layers.DenseLayer) SubsamplingLayer(org.deeplearning4j.nn.conf.layers.SubsamplingLayer) ConvolutionLayer(org.deeplearning4j.nn.conf.layers.ConvolutionLayer) Test(org.junit.Test)

Example 57 with NeuralNetConfiguration

use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.

the class BatchNormalizationTest method getLayer.

protected static Layer getLayer(int nOut, double epsilon, boolean lockGammaBeta, double gamma, double beta) {
    BatchNormalization.Builder b = new BatchNormalization.Builder().nOut(nOut).eps(epsilon);
    if (lockGammaBeta) {
        b.lockGammaBeta(true).gamma(gamma).beta(beta);
    }
    BatchNormalization bN = b.build();
    NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().iterations(1).layer(bN).build();
    int numParams = conf.getLayer().initializer().numParams(conf);
    INDArray params = null;
    if (numParams > 0) {
        params = Nd4j.create(1, numParams);
    }
    Layer layer = conf.getLayer().instantiate(conf, null, 0, params, true);
    if (numParams > 0) {
        layer.setBackpropGradientsViewArray(Nd4j.create(1, numParams));
    }
    return layer;
}
Also used : INDArray(org.nd4j.linalg.api.ndarray.INDArray) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) Layer(org.deeplearning4j.nn.api.Layer) BatchNormalization(org.deeplearning4j.nn.conf.layers.BatchNormalization)

Example 58 with NeuralNetConfiguration

use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.

the class TestVAE method testInitialization.

@Test
public void testInitialization() {
    MultiLayerConfiguration mlc = new NeuralNetConfiguration.Builder().list().layer(0, new org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.Builder().nIn(10).nOut(5).encoderLayerSizes(12).decoderLayerSizes(13).build()).build();
    NeuralNetConfiguration c = mlc.getConf(0);
    org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder vae = (org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder) c.getLayer();
    int allParams = vae.initializer().numParams(c);
    //                  Encoder         Encoder -> p(z|x)       Decoder         //p(x|z)
    int expNumParams = (10 * 12 + 12) + (12 * (2 * 5) + (2 * 5)) + (5 * 13 + 13) + (13 * (2 * 10) + (2 * 10));
    assertEquals(expNumParams, allParams);
    MultiLayerNetwork net = new MultiLayerNetwork(mlc);
    net.init();
    System.out.println("Exp num params: " + expNumParams);
    assertEquals(expNumParams, net.getLayer(0).params().length());
    Map<String, INDArray> paramTable = net.getLayer(0).paramTable();
    int count = 0;
    for (INDArray arr : paramTable.values()) {
        count += arr.length();
    }
    assertEquals(expNumParams, count);
    assertEquals(expNumParams, net.getLayer(0).numParams());
}
Also used : org.deeplearning4j.nn.conf.layers.variational(org.deeplearning4j.nn.conf.layers.variational) VariationalAutoencoder(org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder) VariationalAutoencoder(org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) INDArray(org.nd4j.linalg.api.ndarray.INDArray) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) Test(org.junit.Test)

Example 59 with NeuralNetConfiguration

use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.

the class TestVAE method testParamGradientOrderAndViews.

@Test
public void testParamGradientOrderAndViews() {
    Nd4j.getRandom().setSeed(12345);
    MultiLayerConfiguration mlc = new NeuralNetConfiguration.Builder().list().layer(0, new org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.Builder().nIn(10).nOut(5).encoderLayerSizes(12, 13).decoderLayerSizes(14, 15).build()).build();
    NeuralNetConfiguration c = mlc.getConf(0);
    org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder vae = (org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder) c.getLayer();
    MultiLayerNetwork net = new MultiLayerNetwork(mlc);
    net.init();
    net.initGradientsView();
    org.deeplearning4j.nn.layers.variational.VariationalAutoencoder layer = (org.deeplearning4j.nn.layers.variational.VariationalAutoencoder) net.getLayer(0);
    Map<String, INDArray> layerParams = layer.paramTable();
    Map<String, INDArray> layerGradViews = layer.getGradientViews();
    layer.setInput(Nd4j.rand(3, 10));
    layer.computeGradientAndScore();
    ;
    Gradient g = layer.gradient();
    Map<String, INDArray> grads = g.gradientForVariable();
    assertEquals(layerParams.size(), layerGradViews.size());
    assertEquals(layerParams.size(), grads.size());
    //Iteration order should be consistent due to linked hashmaps
    Iterator<String> pIter = layerParams.keySet().iterator();
    Iterator<String> gvIter = layerGradViews.keySet().iterator();
    Iterator<String> gIter = grads.keySet().iterator();
    while (pIter.hasNext()) {
        String p = pIter.next();
        String gv = gvIter.next();
        String gr = gIter.next();
        //            System.out.println(p + "\t" + gv + "\t" + gr);
        assertEquals(p, gv);
        assertEquals(p, gr);
        INDArray pArr = layerParams.get(p);
        INDArray gvArr = layerGradViews.get(p);
        INDArray gArr = grads.get(p);
        assertArrayEquals(pArr.shape(), gvArr.shape());
        //Should be the exact same object due to view mechanics
        assertTrue(gvArr == gArr);
    }
}
Also used : Gradient(org.deeplearning4j.nn.gradient.Gradient) org.deeplearning4j.nn.conf.layers.variational(org.deeplearning4j.nn.conf.layers.variational) VariationalAutoencoder(org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder) VariationalAutoencoder(org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) INDArray(org.nd4j.linalg.api.ndarray.INDArray) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) Test(org.junit.Test)

Example 60 with NeuralNetConfiguration

use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.

the class TestVAE method testPretrainParamsDuringBackprop.

@Test
public void testPretrainParamsDuringBackprop() {
    //Idea: pretrain-specific parameters shouldn't change during backprop
    Nd4j.getRandom().setSeed(12345);
    MultiLayerConfiguration mlc = new NeuralNetConfiguration.Builder().seed(12345).list().layer(0, new org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.Builder().nIn(10).nOut(5).encoderLayerSizes(12, 13).decoderLayerSizes(14, 15).build()).layer(1, new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(5).nOut(6).activation(new ActivationTanH()).build()).pretrain(true).backprop(true).build();
    NeuralNetConfiguration c = mlc.getConf(0);
    org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder vae = (org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder) c.getLayer();
    MultiLayerNetwork net = new MultiLayerNetwork(mlc);
    net.init();
    net.initGradientsView();
    org.deeplearning4j.nn.layers.variational.VariationalAutoencoder layer = (org.deeplearning4j.nn.layers.variational.VariationalAutoencoder) net.getLayer(0);
    INDArray input = Nd4j.rand(3, 10);
    net.pretrain(input);
    //Get a snapshot of the pretrain params after fitting:
    Map<String, INDArray> layerParams = layer.paramTable();
    Map<String, INDArray> pretrainParamsBefore = new HashMap<>();
    for (String s : layerParams.keySet()) {
        if (layer.isPretrainParam(s)) {
            pretrainParamsBefore.put(s, layerParams.get(s).dup());
        }
    }
    INDArray features = Nd4j.rand(3, 10);
    INDArray labels = Nd4j.rand(3, 6);
    net.getLayerWiseConfigurations().setPretrain(false);
    for (int i = 0; i < 3; i++) {
        net.fit(features, labels);
    }
    Map<String, INDArray> layerParamsAfter = layer.paramTable();
    for (String s : pretrainParamsBefore.keySet()) {
        INDArray before = pretrainParamsBefore.get(s);
        INDArray after = layerParamsAfter.get(s);
        assertEquals(before, after);
    }
}
Also used : OutputLayer(org.deeplearning4j.nn.conf.layers.OutputLayer) org.deeplearning4j.nn.conf.layers.variational(org.deeplearning4j.nn.conf.layers.variational) HashMap(java.util.HashMap) VariationalAutoencoder(org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder) VariationalAutoencoder(org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) INDArray(org.nd4j.linalg.api.ndarray.INDArray) ActivationTanH(org.nd4j.linalg.activations.impl.ActivationTanH) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) Test(org.junit.Test)

Aggregations

NeuralNetConfiguration (org.deeplearning4j.nn.conf.NeuralNetConfiguration)83 INDArray (org.nd4j.linalg.api.ndarray.INDArray)65 Test (org.junit.Test)55 Layer (org.deeplearning4j.nn.api.Layer)29 Gradient (org.deeplearning4j.nn.gradient.Gradient)26 DenseLayer (org.deeplearning4j.nn.conf.layers.DenseLayer)24 Updater (org.deeplearning4j.nn.api.Updater)22 OutputLayer (org.deeplearning4j.nn.conf.layers.OutputLayer)21 DefaultGradient (org.deeplearning4j.nn.gradient.DefaultGradient)21 DataSet (org.nd4j.linalg.dataset.DataSet)14 MultiLayerConfiguration (org.deeplearning4j.nn.conf.MultiLayerConfiguration)11 ScoreIterationListener (org.deeplearning4j.optimize.listeners.ScoreIterationListener)9 MultiLayerNetwork (org.deeplearning4j.nn.multilayer.MultiLayerNetwork)8 IrisDataSetIterator (org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator)6 UniformDistribution (org.deeplearning4j.nn.conf.distribution.UniformDistribution)6 RnnOutputLayer (org.deeplearning4j.nn.layers.recurrent.RnnOutputLayer)6 MnistDataFetcher (org.deeplearning4j.datasets.fetchers.MnistDataFetcher)4 Evaluation (org.deeplearning4j.eval.Evaluation)4 Model (org.deeplearning4j.nn.api.Model)4 ConvolutionLayer (org.deeplearning4j.nn.conf.layers.ConvolutionLayer)4