use of org.deeplearning4j.optimize.api.IterationListener in project deeplearning4j by deeplearning4j.
the class BackTrackLineSearchTest method testBackTrackLineHessian.
@Test(expected = Exception.class)
public void testBackTrackLineHessian() {
OptimizationAlgorithm optimizer = OptimizationAlgorithm.HESSIAN_FREE;
DataSet data = irisIter.next();
MultiLayerNetwork network = new MultiLayerNetwork(getIrisMultiLayerConfig(Activation.RELU, 100, optimizer));
network.init();
IterationListener listener = new ScoreIterationListener(1);
network.setListeners(Collections.singletonList(listener));
network.fit(data.getFeatureMatrix(), data.getLabels());
}
use of org.deeplearning4j.optimize.api.IterationListener in project deeplearning4j by deeplearning4j.
the class TestSparkComputationGraph method testBasic.
@Test
public void testBasic() throws Exception {
JavaSparkContext sc = this.sc;
RecordReader rr = new CSVRecordReader(0, ",");
rr.initialize(new FileSplit(new ClassPathResource("iris.txt").getTempFileFromArchive()));
MultiDataSetIterator iter = new RecordReaderMultiDataSetIterator.Builder(1).addReader("iris", rr).addInput("iris", 0, 3).addOutputOneHot("iris", 4, 3).build();
List<MultiDataSet> list = new ArrayList<>(150);
while (iter.hasNext()) list.add(iter.next());
ComputationGraphConfiguration config = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).learningRate(0.1).graphBuilder().addInputs("in").addLayer("dense", new DenseLayer.Builder().nIn(4).nOut(2).build(), "in").addLayer("out", new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(2).nOut(3).build(), "dense").setOutputs("out").pretrain(false).backprop(true).build();
ComputationGraph cg = new ComputationGraph(config);
cg.init();
TrainingMaster tm = new ParameterAveragingTrainingMaster(true, numExecutors(), 1, 10, 1, 0);
SparkComputationGraph scg = new SparkComputationGraph(sc, cg, tm);
scg.setListeners(Collections.singleton((IterationListener) new ScoreIterationListener(1)));
JavaRDD<MultiDataSet> rdd = sc.parallelize(list);
scg.fitMultiDataSet(rdd);
//Try: fitting using DataSet
DataSetIterator iris = new IrisDataSetIterator(1, 150);
List<DataSet> list2 = new ArrayList<>();
while (iris.hasNext()) list2.add(iris.next());
JavaRDD<DataSet> rddDS = sc.parallelize(list2);
scg.fit(rddDS);
}
use of org.deeplearning4j.optimize.api.IterationListener in project deeplearning4j by deeplearning4j.
the class EvalTest method testIris.
@Test
public void testIris() {
// Network config
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).iterations(1).seed(42).learningRate(1e-6).list().layer(0, new DenseLayer.Builder().nIn(4).nOut(2).activation(Activation.TANH).weightInit(WeightInit.XAVIER).build()).layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(2).nOut(3).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).build()).build();
// Instantiate model
MultiLayerNetwork model = new MultiLayerNetwork(conf);
model.init();
model.setListeners(Arrays.asList((IterationListener) new ScoreIterationListener(1)));
// Train-test split
DataSetIterator iter = new IrisDataSetIterator(150, 150);
DataSet next = iter.next();
next.shuffle();
SplitTestAndTrain trainTest = next.splitTestAndTrain(5, new Random(42));
// Train
DataSet train = trainTest.getTrain();
train.normalizeZeroMeanZeroUnitVariance();
// Test
DataSet test = trainTest.getTest();
test.normalizeZeroMeanZeroUnitVariance();
INDArray testFeature = test.getFeatureMatrix();
INDArray testLabel = test.getLabels();
// Fitting model
model.fit(train);
// Get predictions from test feature
INDArray testPredictedLabel = model.output(testFeature);
// Eval with class number
//// Specify class num here
Evaluation eval = new Evaluation(3);
eval.eval(testLabel, testPredictedLabel);
double eval1F1 = eval.f1();
double eval1Acc = eval.accuracy();
// Eval without class number
//// No class num
Evaluation eval2 = new Evaluation();
eval2.eval(testLabel, testPredictedLabel);
double eval2F1 = eval2.f1();
double eval2Acc = eval2.accuracy();
//Assert the two implementations give same f1 and accuracy (since one batch)
assertTrue(eval1F1 == eval2F1 && eval1Acc == eval2Acc);
Evaluation evalViaMethod = model.evaluate(new ListDataSetIterator(Collections.singletonList(test)));
checkEvaluationEquality(eval, evalViaMethod);
System.out.println(eval.getConfusionMatrix().toString());
System.out.println(eval.getConfusionMatrix().toCSV());
System.out.println(eval.getConfusionMatrix().toHTML());
System.out.println(eval.confusionToString());
}
use of org.deeplearning4j.optimize.api.IterationListener in project deeplearning4j by deeplearning4j.
the class DataSetIteratorTest method runCifar.
public void runCifar(boolean preProcessCifar) throws Exception {
final int height = 32;
final int width = 32;
int channels = 3;
int outputNum = CifarLoader.NUM_LABELS;
int numSamples = 10;
int batchSize = 5;
int iterations = 1;
int seed = 123;
int listenerFreq = iterations;
CifarDataSetIterator cifar = new CifarDataSetIterator(batchSize, numSamples, new int[] { height, width, channels }, preProcessCifar, true);
MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed).iterations(iterations).gradientNormalization(GradientNormalization.RenormalizeL2PerLayer).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list().layer(0, new ConvolutionLayer.Builder(5, 5).nIn(channels).nOut(6).weightInit(WeightInit.XAVIER).activation(Activation.RELU).build()).layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] { 2, 2 }).build()).layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).build()).backprop(true).pretrain(false).setInputType(InputType.convolutionalFlat(height, width, channels));
MultiLayerNetwork model = new MultiLayerNetwork(builder.build());
model.init();
model.setListeners(Arrays.asList((IterationListener) new ScoreIterationListener(listenerFreq)));
model.fit(cifar);
cifar.test(10);
Evaluation eval = new Evaluation(cifar.getLabels());
while (cifar.hasNext()) {
DataSet testDS = cifar.next(batchSize);
INDArray output = model.output(testDS.getFeatureMatrix());
eval.eval(testDS.getLabels(), output);
}
System.out.println(eval.stats(true));
}
use of org.deeplearning4j.optimize.api.IterationListener in project deeplearning4j by deeplearning4j.
the class MultiLayerNeuralNetConfigurationTest method testIterationListener.
@Test
public void testIterationListener() {
MultiLayerNetwork model1 = new MultiLayerNetwork(getConf());
model1.init();
model1.setListeners(Collections.singletonList((IterationListener) new ScoreIterationListener(1)));
MultiLayerNetwork model2 = new MultiLayerNetwork(getConf());
model2.setListeners(Collections.singletonList((IterationListener) new ScoreIterationListener(1)));
model2.init();
Layer[] l1 = model1.getLayers();
for (int i = 0; i < l1.length; i++) assertTrue(l1[i].getListeners() != null && l1[i].getListeners().size() == 1);
Layer[] l2 = model2.getLayers();
for (int i = 0; i < l2.length; i++) assertTrue(l2[i].getListeners() != null && l2[i].getListeners().size() == 1);
}
Aggregations