use of org.nd4j.linalg.dataset.api.iterator.DataSetIterator in project deeplearning4j by deeplearning4j.
the class MultiLayerTest method testDbn.
@Test
public void testDbn() throws Exception {
Nd4j.MAX_SLICES_TO_PRINT = -1;
Nd4j.MAX_ELEMENTS_PER_SLICE = -1;
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().iterations(100).momentum(0.9).optimizationAlgo(OptimizationAlgorithm.LBFGS).regularization(true).l2(2e-4).list().layer(0, new RBM.Builder(RBM.HiddenUnit.GAUSSIAN, RBM.VisibleUnit.GAUSSIAN).nIn(4).nOut(3).weightInit(WeightInit.DISTRIBUTION).dist(new UniformDistribution(0, 1)).activation(Activation.TANH).lossFunction(LossFunctions.LossFunction.KL_DIVERGENCE).build()).layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3).weightInit(WeightInit.DISTRIBUTION).dist(new UniformDistribution(0, 1)).activation(Activation.SOFTMAX).build()).build();
MultiLayerNetwork d = new MultiLayerNetwork(conf);
DataSetIterator iter = new IrisDataSetIterator(150, 150);
DataSet next = iter.next();
Nd4j.writeTxt(next.getFeatureMatrix(), "iris.txt", "\t");
next.normalizeZeroMeanZeroUnitVariance();
SplitTestAndTrain testAndTrain = next.splitTestAndTrain(110);
DataSet train = testAndTrain.getTrain();
d.fit(train);
DataSet test = testAndTrain.getTest();
Evaluation eval = new Evaluation();
INDArray output = d.output(test.getFeatureMatrix());
eval.eval(test.getLabels(), output);
log.info("Score " + eval.stats());
}
use of org.nd4j.linalg.dataset.api.iterator.DataSetIterator in project deeplearning4j by deeplearning4j.
the class MultiLayerTest method testPredict.
@Test
public void testPredict() throws Exception {
Nd4j.getRandom().setSeed(12345);
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().regularization(false).learningRate(1.0).weightInit(WeightInit.XAVIER).seed(12345L).list().layer(0, new DenseLayer.Builder().nIn(784).nOut(50).activation(Activation.RELU).build()).layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(50).nOut(10).build()).pretrain(false).backprop(true).setInputType(InputType.convolutional(28, 28, 1)).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
DataSetIterator ds = new MnistDataSetIterator(10, 10);
net.fit(ds);
DataSetIterator testDs = new MnistDataSetIterator(1, 1);
DataSet testData = testDs.next();
testData.setLabelNames(Arrays.asList("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
String actualLables = testData.getLabelName(0);
List<String> prediction = net.predict(testData);
assertTrue(actualLables != null);
assertTrue(prediction.get(0) != null);
}
use of org.nd4j.linalg.dataset.api.iterator.DataSetIterator in project deeplearning4j by deeplearning4j.
the class MultiLayerTest method testBackProp.
@Test
public void testBackProp() {
Nd4j.getRandom().setSeed(123);
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).iterations(5).seed(123).list().layer(0, new DenseLayer.Builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER).activation(Activation.TANH).build()).layer(1, new DenseLayer.Builder().nIn(3).nOut(2).weightInit(WeightInit.XAVIER).activation(Activation.TANH).build()).layer(2, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).nIn(2).nOut(3).build()).backprop(true).pretrain(false).build();
MultiLayerNetwork network = new MultiLayerNetwork(conf);
network.init();
network.setListeners(new ScoreIterationListener(1));
DataSetIterator iter = new IrisDataSetIterator(150, 150);
DataSet next = iter.next();
next.normalizeZeroMeanZeroUnitVariance();
SplitTestAndTrain trainTest = next.splitTestAndTrain(110);
network.setInput(trainTest.getTrain().getFeatureMatrix());
network.setLabels(trainTest.getTrain().getLabels());
network.init();
network.fit(trainTest.getTrain());
DataSet test = trainTest.getTest();
Evaluation eval = new Evaluation();
INDArray output = network.output(test.getFeatureMatrix());
eval.eval(test.getLabels(), output);
log.info("Score " + eval.stats());
}
use of org.nd4j.linalg.dataset.api.iterator.DataSetIterator in project deeplearning4j by deeplearning4j.
the class BackPropMLPTest method testMLPTrivial.
@Test
public void testMLPTrivial() {
//Simplest possible case: 1 hidden layer, 1 hidden neuron, batch size of 1.
MultiLayerNetwork network = new MultiLayerNetwork(getIrisMLPSimpleConfig(new int[] { 1 }, Activation.SIGMOID));
network.setListeners(new ScoreIterationListener(1));
network.init();
DataSetIterator iter = new IrisDataSetIterator(1, 10);
while (iter.hasNext()) network.fit(iter.next());
}
use of org.nd4j.linalg.dataset.api.iterator.DataSetIterator in project deeplearning4j by deeplearning4j.
the class BackPropMLPTest method testMLP2.
@Test
public void testMLP2() {
//Simple mini-batch test with multiple hidden layers
MultiLayerConfiguration conf = getIrisMLPSimpleConfig(new int[] { 5, 15, 3 }, Activation.TANH);
System.out.println(conf);
MultiLayerNetwork network = new MultiLayerNetwork(conf);
network.init();
DataSetIterator iter = new IrisDataSetIterator(12, 120);
while (iter.hasNext()) {
network.fit(iter.next());
}
}
Aggregations