use of org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator in project deeplearning4j by deeplearning4j.
the class MultiLayerTest method testIterationCountAndPersistence.
@Test
public void testIterationCountAndPersistence() throws IOException {
Nd4j.getRandom().setSeed(123);
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1).seed(123).list().layer(0, new DenseLayer.Builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER).activation(Activation.TANH).build()).layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3).build()).backprop(true).pretrain(false).build();
MultiLayerNetwork network = new MultiLayerNetwork(conf);
network.init();
DataSetIterator iter = new IrisDataSetIterator(50, 150);
assertEquals(0, network.getLayerWiseConfigurations().getIterationCount());
network.fit(iter);
assertEquals(3, network.getLayerWiseConfigurations().getIterationCount());
iter.reset();
network.fit(iter);
assertEquals(6, network.getLayerWiseConfigurations().getIterationCount());
iter.reset();
network.fit(iter.next());
assertEquals(7, network.getLayerWiseConfigurations().getIterationCount());
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ModelSerializer.writeModel(network, baos, true);
byte[] asBytes = baos.toByteArray();
ByteArrayInputStream bais = new ByteArrayInputStream(asBytes);
MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(bais, true);
assertEquals(7, net.getLayerWiseConfigurations().getIterationCount());
}
use of org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator in project deeplearning4j by deeplearning4j.
the class MultiLayerTest method testBatchNorm.
@Test
public void testBatchNorm() {
Nd4j.getRandom().setSeed(123);
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).iterations(5).seed(123).list().layer(0, new DenseLayer.Builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER).activation(Activation.TANH).build()).layer(1, new DenseLayer.Builder().nIn(3).nOut(2).weightInit(WeightInit.XAVIER).activation(Activation.TANH).build()).layer(2, new BatchNormalization.Builder().nOut(2).build()).layer(3, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).nIn(2).nOut(3).build()).backprop(true).pretrain(false).build();
MultiLayerNetwork network = new MultiLayerNetwork(conf);
network.init();
network.setListeners(new ScoreIterationListener(1));
DataSetIterator iter = new IrisDataSetIterator(150, 150);
DataSet next = iter.next();
next.normalizeZeroMeanZeroUnitVariance();
SplitTestAndTrain trainTest = next.splitTestAndTrain(110);
network.setLabels(trainTest.getTrain().getLabels());
network.init();
network.fit(trainTest.getTrain());
}
use of org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator in project deeplearning4j by deeplearning4j.
the class BackTrackLineSearchTest method testBackTrackLineGradientDescent.
///////////////////////////////////////////////////////////////////////////
@Test
public void testBackTrackLineGradientDescent() {
OptimizationAlgorithm optimizer = OptimizationAlgorithm.LINE_GRADIENT_DESCENT;
DataSetIterator irisIter = new IrisDataSetIterator(1, 1);
DataSet data = irisIter.next();
MultiLayerNetwork network = new MultiLayerNetwork(getIrisMultiLayerConfig(Activation.SIGMOID, 100, optimizer));
network.init();
IterationListener listener = new ScoreIterationListener(1);
network.setListeners(Collections.singletonList(listener));
double oldScore = network.score(data);
network.fit(data.getFeatureMatrix(), data.getLabels());
double score = network.score();
assertTrue(score < oldScore);
}
use of org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator in project deeplearning4j by deeplearning4j.
the class TestOptimizers method testOptimizersMLP.
@Test
public void testOptimizersMLP() {
//Check that the score actually decreases over time
DataSetIterator iter = new IrisDataSetIterator(150, 150);
OptimizationAlgorithm[] toTest = { OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT, OptimizationAlgorithm.LINE_GRADIENT_DESCENT, OptimizationAlgorithm.CONJUGATE_GRADIENT, OptimizationAlgorithm.LBFGS };
DataSet ds = iter.next();
ds.normalizeZeroMeanZeroUnitVariance();
for (OptimizationAlgorithm oa : toTest) {
int nIter = 10;
MultiLayerNetwork network = new MultiLayerNetwork(getMLPConfigIris(oa, nIter));
network.init();
double score = network.score(ds);
assertTrue(score != 0.0 && !Double.isNaN(score));
if (PRINT_OPT_RESULTS)
System.out.println("testOptimizersMLP() - " + oa);
int nCallsToOptimizer = 30;
double[] scores = new double[nCallsToOptimizer + 1];
scores[0] = score;
for (int i = 0; i < nCallsToOptimizer; i++) {
network.fit(ds);
double scoreAfter = network.score(ds);
scores[i + 1] = scoreAfter;
assertTrue("Score is NaN after optimization", !Double.isNaN(scoreAfter));
assertTrue("OA= " + oa + ", before= " + score + ", after= " + scoreAfter, scoreAfter <= score);
score = scoreAfter;
}
if (PRINT_OPT_RESULTS)
System.out.println(oa + " - " + Arrays.toString(scores));
}
}
use of org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator in project deeplearning4j by deeplearning4j.
the class TestParamAndGradientIterationListener method test.
@Test
public void test() {
IrisDataSetIterator iter = new IrisDataSetIterator(30, 150);
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).learningRate(1e-5).iterations(1).list().layer(0, new DenseLayer.Builder().nIn(4).nOut(20).build()).layer(1, new DenseLayer.Builder().nIn(20).nOut(30).build()).layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(30).nOut(3).build()).pretrain(false).backprop(true).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
IterationListener listener = ParamAndGradientIterationListener.builder().outputToFile(true).file(new File(System.getProperty("java.io.tmpdir") + "/paramAndGradTest.txt")).outputToConsole(true).outputToLogger(false).iterations(2).printHeader(true).printMean(false).printMinMax(false).printMeanAbsValue(true).delimiter("\t").build();
net.setListeners(listener);
for (int i = 0; i < 2; i++) {
net.fit(iter);
}
}
Aggregations