use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.
the class DenseTest method testDenseBiasInit.
@Test
public void testDenseBiasInit() {
DenseLayer build = new DenseLayer.Builder().nIn(1).nOut(3).biasInit(1).build();
NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().layer(build).build();
int numParams = conf.getLayer().initializer().numParams(conf);
INDArray params = Nd4j.create(1, numParams);
Layer layer = conf.getLayer().instantiate(conf, null, 0, params, true);
assertEquals(1, layer.getParam("b").size(0));
}
use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.
the class OutputLayerTest method testBinary.
@Test
public void testBinary() {
Nd4j.MAX_ELEMENTS_PER_SLICE = Integer.MAX_VALUE;
Nd4j.MAX_SLICES_TO_PRINT = Integer.MAX_VALUE;
DataBuffer.Type initialType = Nd4j.dataType();
DataTypeUtil.setDTypeForContext(DataBuffer.Type.DOUBLE);
INDArray data = Nd4j.create(new double[][] { { 1, 1, 1, 0, 0, 0 }, { 1, 0, 1, 0, 0, 0 }, { 1, 1, 1, 0, 0, 0 }, { 0, 0, 1, 1, 1, 0 }, { 0, 0, 1, 1, 0, 0 }, { 0, 0, 1, 1, 1, 0 } });
INDArray data2 = Nd4j.create(new double[][] { { 1, 0 }, { 1, 0 }, { 1, 0 }, { 0, 1 }, { 0, 1 }, { 0, 1 } });
DataSet dataset = new DataSet(data, data2);
NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(123).iterations(200).learningRate(1e-2).layer(new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder().nIn(6).nOut(2).weightInit(WeightInit.ZERO).updater(Updater.SGD).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()).build();
int numParams = conf.getLayer().initializer().numParams(conf);
INDArray params = Nd4j.create(1, numParams);
OutputLayer o = (OutputLayer) conf.getLayer().instantiate(conf, null, 0, params, true);
o.setBackpropGradientsViewArray(Nd4j.create(1, params.length()));
o.setListeners(new ScoreIterationListener(1));
o.fit(dataset);
DataTypeUtil.setDTypeForContext(initialType);
}
use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.
the class OutputLayerTest method testIris2.
@Test
public void testIris2() {
NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(10).learningRate(1e-1).layer(new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build()).build();
int numParams = conf.getLayer().initializer().numParams(conf);
INDArray params = Nd4j.create(1, numParams);
OutputLayer l = (OutputLayer) conf.getLayer().instantiate(conf, Collections.<IterationListener>singletonList(new ScoreIterationListener(1)), 0, params, true);
l.setBackpropGradientsViewArray(Nd4j.create(1, params.length()));
DataSetIterator iter = new IrisDataSetIterator(150, 150);
DataSet next = iter.next();
next.shuffle();
SplitTestAndTrain trainTest = next.splitTestAndTrain(110);
trainTest.getTrain().normalizeZeroMeanZeroUnitVariance();
l.fit(trainTest.getTrain());
DataSet test = trainTest.getTest();
test.normalizeZeroMeanZeroUnitVariance();
Evaluation eval = new Evaluation();
INDArray output = l.output(test.getFeatureMatrix());
eval.eval(test.getLabels(), output);
log.info("Score " + eval.stats());
}
use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.
the class OutputLayerTest method testIris.
@Test
public void testIris() {
NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).iterations(5).learningRate(1e-1).layer(new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build()).build();
int numParams = conf.getLayer().initializer().numParams(conf);
INDArray params = Nd4j.create(1, numParams);
OutputLayer l = (OutputLayer) conf.getLayer().instantiate(conf, Collections.<IterationListener>singletonList(new ScoreIterationListener(1)), 0, params, true);
l.setBackpropGradientsViewArray(Nd4j.create(1, params.length()));
DataSetIterator iter = new IrisDataSetIterator(150, 150);
DataSet next = iter.next();
next.shuffle();
SplitTestAndTrain trainTest = next.splitTestAndTrain(110);
trainTest.getTrain().normalizeZeroMeanZeroUnitVariance();
l.fit(trainTest.getTrain());
DataSet test = trainTest.getTest();
test.normalizeZeroMeanZeroUnitVariance();
Evaluation eval = new Evaluation();
INDArray output = l.output(test.getFeatureMatrix());
eval.eval(test.getLabels(), output);
log.info("Score " + eval.stats());
}
use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.
the class BaseLayerTest method configureSingleLayer.
public Layer configureSingleLayer() {
int nIn = 2;
int nOut = 2;
NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().layer(new ConvolutionLayer.Builder().nIn(nIn).nOut(nOut).build()).build();
int numParams = conf.getLayer().initializer().numParams(conf);
INDArray params = Nd4j.create(1, numParams);
return conf.getLayer().instantiate(conf, null, 0, params, true);
}
Aggregations