Search in sources :

Example 51 with NeuralNetConfiguration

use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.

the class DenseTest method testDenseBiasInit.

@Test
public void testDenseBiasInit() {
    DenseLayer build = new DenseLayer.Builder().nIn(1).nOut(3).biasInit(1).build();
    NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().layer(build).build();
    int numParams = conf.getLayer().initializer().numParams(conf);
    INDArray params = Nd4j.create(1, numParams);
    Layer layer = conf.getLayer().instantiate(conf, null, 0, params, true);
    assertEquals(1, layer.getParam("b").size(0));
}
Also used : DenseLayer(org.deeplearning4j.nn.conf.layers.DenseLayer) INDArray(org.nd4j.linalg.api.ndarray.INDArray) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) Layer(org.deeplearning4j.nn.api.Layer) OutputLayer(org.deeplearning4j.nn.conf.layers.OutputLayer) DenseLayer(org.deeplearning4j.nn.conf.layers.DenseLayer) Test(org.junit.Test)

Example 52 with NeuralNetConfiguration

use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.

the class OutputLayerTest method testBinary.

@Test
public void testBinary() {
    Nd4j.MAX_ELEMENTS_PER_SLICE = Integer.MAX_VALUE;
    Nd4j.MAX_SLICES_TO_PRINT = Integer.MAX_VALUE;
    DataBuffer.Type initialType = Nd4j.dataType();
    DataTypeUtil.setDTypeForContext(DataBuffer.Type.DOUBLE);
    INDArray data = Nd4j.create(new double[][] { { 1, 1, 1, 0, 0, 0 }, { 1, 0, 1, 0, 0, 0 }, { 1, 1, 1, 0, 0, 0 }, { 0, 0, 1, 1, 1, 0 }, { 0, 0, 1, 1, 0, 0 }, { 0, 0, 1, 1, 1, 0 } });
    INDArray data2 = Nd4j.create(new double[][] { { 1, 0 }, { 1, 0 }, { 1, 0 }, { 0, 1 }, { 0, 1 }, { 0, 1 } });
    DataSet dataset = new DataSet(data, data2);
    NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(123).iterations(200).learningRate(1e-2).layer(new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder().nIn(6).nOut(2).weightInit(WeightInit.ZERO).updater(Updater.SGD).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()).build();
    int numParams = conf.getLayer().initializer().numParams(conf);
    INDArray params = Nd4j.create(1, numParams);
    OutputLayer o = (OutputLayer) conf.getLayer().instantiate(conf, null, 0, params, true);
    o.setBackpropGradientsViewArray(Nd4j.create(1, params.length()));
    o.setListeners(new ScoreIterationListener(1));
    o.fit(dataset);
    DataTypeUtil.setDTypeForContext(initialType);
}
Also used : RnnOutputLayer(org.deeplearning4j.nn.layers.recurrent.RnnOutputLayer) INDArray(org.nd4j.linalg.api.ndarray.INDArray) DataSet(org.nd4j.linalg.dataset.DataSet) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) ScoreIterationListener(org.deeplearning4j.optimize.listeners.ScoreIterationListener) DataBuffer(org.nd4j.linalg.api.buffer.DataBuffer) Test(org.junit.Test)

Example 53 with NeuralNetConfiguration

use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.

the class OutputLayerTest method testIris2.

@Test
public void testIris2() {
    NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(10).learningRate(1e-1).layer(new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build()).build();
    int numParams = conf.getLayer().initializer().numParams(conf);
    INDArray params = Nd4j.create(1, numParams);
    OutputLayer l = (OutputLayer) conf.getLayer().instantiate(conf, Collections.<IterationListener>singletonList(new ScoreIterationListener(1)), 0, params, true);
    l.setBackpropGradientsViewArray(Nd4j.create(1, params.length()));
    DataSetIterator iter = new IrisDataSetIterator(150, 150);
    DataSet next = iter.next();
    next.shuffle();
    SplitTestAndTrain trainTest = next.splitTestAndTrain(110);
    trainTest.getTrain().normalizeZeroMeanZeroUnitVariance();
    l.fit(trainTest.getTrain());
    DataSet test = trainTest.getTest();
    test.normalizeZeroMeanZeroUnitVariance();
    Evaluation eval = new Evaluation();
    INDArray output = l.output(test.getFeatureMatrix());
    eval.eval(test.getLabels(), output);
    log.info("Score " + eval.stats());
}
Also used : RnnOutputLayer(org.deeplearning4j.nn.layers.recurrent.RnnOutputLayer) Evaluation(org.deeplearning4j.eval.Evaluation) IrisDataSetIterator(org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator) DataSet(org.nd4j.linalg.dataset.DataSet) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) INDArray(org.nd4j.linalg.api.ndarray.INDArray) IterationListener(org.deeplearning4j.optimize.api.IterationListener) ScoreIterationListener(org.deeplearning4j.optimize.listeners.ScoreIterationListener) ScoreIterationListener(org.deeplearning4j.optimize.listeners.ScoreIterationListener) IrisDataSetIterator(org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator) DataSetIterator(org.nd4j.linalg.dataset.api.iterator.DataSetIterator) SplitTestAndTrain(org.nd4j.linalg.dataset.SplitTestAndTrain) Test(org.junit.Test)

Example 54 with NeuralNetConfiguration

use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.

the class OutputLayerTest method testIris.

@Test
public void testIris() {
    NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).iterations(5).learningRate(1e-1).layer(new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build()).build();
    int numParams = conf.getLayer().initializer().numParams(conf);
    INDArray params = Nd4j.create(1, numParams);
    OutputLayer l = (OutputLayer) conf.getLayer().instantiate(conf, Collections.<IterationListener>singletonList(new ScoreIterationListener(1)), 0, params, true);
    l.setBackpropGradientsViewArray(Nd4j.create(1, params.length()));
    DataSetIterator iter = new IrisDataSetIterator(150, 150);
    DataSet next = iter.next();
    next.shuffle();
    SplitTestAndTrain trainTest = next.splitTestAndTrain(110);
    trainTest.getTrain().normalizeZeroMeanZeroUnitVariance();
    l.fit(trainTest.getTrain());
    DataSet test = trainTest.getTest();
    test.normalizeZeroMeanZeroUnitVariance();
    Evaluation eval = new Evaluation();
    INDArray output = l.output(test.getFeatureMatrix());
    eval.eval(test.getLabels(), output);
    log.info("Score " + eval.stats());
}
Also used : RnnOutputLayer(org.deeplearning4j.nn.layers.recurrent.RnnOutputLayer) Evaluation(org.deeplearning4j.eval.Evaluation) IrisDataSetIterator(org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator) DataSet(org.nd4j.linalg.dataset.DataSet) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) INDArray(org.nd4j.linalg.api.ndarray.INDArray) IterationListener(org.deeplearning4j.optimize.api.IterationListener) ScoreIterationListener(org.deeplearning4j.optimize.listeners.ScoreIterationListener) ScoreIterationListener(org.deeplearning4j.optimize.listeners.ScoreIterationListener) IrisDataSetIterator(org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator) DataSetIterator(org.nd4j.linalg.dataset.api.iterator.DataSetIterator) SplitTestAndTrain(org.nd4j.linalg.dataset.SplitTestAndTrain) Test(org.junit.Test)

Example 55 with NeuralNetConfiguration

use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.

the class BaseLayerTest method configureSingleLayer.

public Layer configureSingleLayer() {
    int nIn = 2;
    int nOut = 2;
    NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().layer(new ConvolutionLayer.Builder().nIn(nIn).nOut(nOut).build()).build();
    int numParams = conf.getLayer().initializer().numParams(conf);
    INDArray params = Nd4j.create(1, numParams);
    return conf.getLayer().instantiate(conf, null, 0, params, true);
}
Also used : INDArray(org.nd4j.linalg.api.ndarray.INDArray) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) ConvolutionLayer(org.deeplearning4j.nn.conf.layers.ConvolutionLayer)

Aggregations

NeuralNetConfiguration (org.deeplearning4j.nn.conf.NeuralNetConfiguration)83 INDArray (org.nd4j.linalg.api.ndarray.INDArray)65 Test (org.junit.Test)55 Layer (org.deeplearning4j.nn.api.Layer)29 Gradient (org.deeplearning4j.nn.gradient.Gradient)26 DenseLayer (org.deeplearning4j.nn.conf.layers.DenseLayer)24 Updater (org.deeplearning4j.nn.api.Updater)22 OutputLayer (org.deeplearning4j.nn.conf.layers.OutputLayer)21 DefaultGradient (org.deeplearning4j.nn.gradient.DefaultGradient)21 DataSet (org.nd4j.linalg.dataset.DataSet)14 MultiLayerConfiguration (org.deeplearning4j.nn.conf.MultiLayerConfiguration)11 ScoreIterationListener (org.deeplearning4j.optimize.listeners.ScoreIterationListener)9 MultiLayerNetwork (org.deeplearning4j.nn.multilayer.MultiLayerNetwork)8 IrisDataSetIterator (org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator)6 UniformDistribution (org.deeplearning4j.nn.conf.distribution.UniformDistribution)6 RnnOutputLayer (org.deeplearning4j.nn.layers.recurrent.RnnOutputLayer)6 MnistDataFetcher (org.deeplearning4j.datasets.fetchers.MnistDataFetcher)4 Evaluation (org.deeplearning4j.eval.Evaluation)4 Model (org.deeplearning4j.nn.api.Model)4 ConvolutionLayer (org.deeplearning4j.nn.conf.layers.ConvolutionLayer)4