use of org.deeplearning4j.nn.conf.ComputationGraphConfiguration in project deeplearning4j by deeplearning4j.
the class TestEarlyStoppingSparkCompGraph method testListeners.
@Test
public void testListeners() {
ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1).updater(Updater.SGD).weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in").addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in").setOutputs("0").pretrain(false).backprop(true).build();
ComputationGraph net = new ComputationGraph(conf);
net.setListeners(new ScoreIterationListener(1));
JavaRDD<DataSet> irisData = getIris();
EarlyStoppingModelSaver<ComputationGraph> saver = new InMemoryModelSaver<>();
EarlyStoppingConfiguration<ComputationGraph> esConf = new EarlyStoppingConfiguration.Builder<ComputationGraph>().epochTerminationConditions(new MaxEpochsTerminationCondition(5)).iterationTerminationConditions(new MaxTimeIterationTerminationCondition(1, TimeUnit.MINUTES)).scoreCalculator(new SparkLossCalculatorComputationGraph(irisData.map(new DataSetToMultiDataSetFn()), true, sc.sc())).modelSaver(saver).build();
LoggingEarlyStoppingListener listener = new LoggingEarlyStoppingListener();
TrainingMaster tm = new ParameterAveragingTrainingMaster(true, numExecutors(), 1, 10, 1, 0);
IEarlyStoppingTrainer<ComputationGraph> trainer = new SparkEarlyStoppingGraphTrainer(getContext().sc(), tm, esConf, net, irisData.map(new DataSetToMultiDataSetFn()));
trainer.setListener(listener);
trainer.fit();
assertEquals(1, listener.onStartCallCount);
assertEquals(5, listener.onEpochCallCount);
assertEquals(1, listener.onCompletionCallCount);
}
use of org.deeplearning4j.nn.conf.ComputationGraphConfiguration in project deeplearning4j by deeplearning4j.
the class TestEarlyStoppingSparkCompGraph method testTimeTermination.
@Test
public void testTimeTermination() {
//test termination after max time
Nd4j.getRandom().setSeed(12345);
ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1).updater(Updater.SGD).learningRate(1e-6).weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in").addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in").setOutputs("0").pretrain(false).backprop(true).build();
ComputationGraph net = new ComputationGraph(conf);
net.setListeners(new ScoreIterationListener(1));
JavaRDD<DataSet> irisData = getIris();
EarlyStoppingModelSaver<ComputationGraph> saver = new InMemoryModelSaver<>();
EarlyStoppingConfiguration<ComputationGraph> esConf = new EarlyStoppingConfiguration.Builder<ComputationGraph>().epochTerminationConditions(new MaxEpochsTerminationCondition(10000)).iterationTerminationConditions(new MaxTimeIterationTerminationCondition(3, TimeUnit.SECONDS), //Initial score is ~2.5
new MaxScoreIterationTerminationCondition(7.5)).scoreCalculator(new SparkLossCalculatorComputationGraph(irisData.map(new DataSetToMultiDataSetFn()), true, sc.sc())).modelSaver(saver).build();
TrainingMaster tm = new ParameterAveragingTrainingMaster(true, numExecutors(), 1, 10, 1, 0);
IEarlyStoppingTrainer<ComputationGraph> trainer = new SparkEarlyStoppingGraphTrainer(getContext().sc(), tm, esConf, net, irisData.map(new DataSetToMultiDataSetFn()));
long startTime = System.currentTimeMillis();
EarlyStoppingResult result = trainer.fit();
long endTime = System.currentTimeMillis();
int durationSeconds = (int) (endTime - startTime) / 1000;
assertTrue(durationSeconds >= 3);
assertTrue(durationSeconds <= 9);
assertEquals(EarlyStoppingResult.TerminationReason.IterationTerminationCondition, result.getTerminationReason());
String expDetails = new MaxTimeIterationTerminationCondition(3, TimeUnit.SECONDS).toString();
assertEquals(expDetails, result.getTerminationDetails());
}
use of org.deeplearning4j.nn.conf.ComputationGraphConfiguration in project deeplearning4j by deeplearning4j.
the class TestEarlyStoppingSparkCompGraph method testBadTuning.
@Test
public void testBadTuning() {
//Test poor tuning (high LR): should terminate on MaxScoreIterationTerminationCondition
Nd4j.getRandom().setSeed(12345);
ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1).updater(Updater.SGD).learningRate(//Intentionally huge LR
2.0).weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in").addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.IDENTITY).lossFunction(LossFunctions.LossFunction.MSE).build(), "in").setOutputs("0").pretrain(false).backprop(true).build();
ComputationGraph net = new ComputationGraph(conf);
net.setListeners(new ScoreIterationListener(1));
JavaRDD<DataSet> irisData = getIris();
EarlyStoppingModelSaver<ComputationGraph> saver = new InMemoryModelSaver<>();
EarlyStoppingConfiguration<ComputationGraph> esConf = new EarlyStoppingConfiguration.Builder<ComputationGraph>().epochTerminationConditions(new MaxEpochsTerminationCondition(5000)).iterationTerminationConditions(new MaxTimeIterationTerminationCondition(1, TimeUnit.MINUTES), //Initial score is ~2.5
new MaxScoreIterationTerminationCondition(7.5)).scoreCalculator(new SparkLossCalculatorComputationGraph(irisData.map(new DataSetToMultiDataSetFn()), true, sc.sc())).modelSaver(saver).build();
TrainingMaster tm = new ParameterAveragingTrainingMaster(true, numExecutors(), 1, 10, 1, 0);
IEarlyStoppingTrainer<ComputationGraph> trainer = new SparkEarlyStoppingGraphTrainer(getContext().sc(), tm, esConf, net, irisData.map(new DataSetToMultiDataSetFn()));
EarlyStoppingResult result = trainer.fit();
assertTrue(result.getTotalEpochs() < 5);
assertEquals(EarlyStoppingResult.TerminationReason.IterationTerminationCondition, result.getTerminationReason());
String expDetails = new MaxScoreIterationTerminationCondition(7.5).toString();
assertEquals(expDetails, result.getTerminationDetails());
}
use of org.deeplearning4j.nn.conf.ComputationGraphConfiguration in project deeplearning4j by deeplearning4j.
the class TestSparkComputationGraph method testDistributedScoring.
@Test
public void testDistributedScoring() {
ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().regularization(true).l1(0.1).l2(0.1).seed(123).updater(Updater.NESTEROVS).learningRate(0.1).momentum(0.9).graphBuilder().addInputs("in").addLayer("0", new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(nIn).nOut(3).activation(Activation.TANH).build(), "in").addLayer("1", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(3).nOut(nOut).activation(Activation.SOFTMAX).build(), "0").setOutputs("1").backprop(true).pretrain(false).build();
TrainingMaster tm = new ParameterAveragingTrainingMaster(true, numExecutors(), 1, 10, 1, 0);
SparkComputationGraph sparkNet = new SparkComputationGraph(sc, conf, tm);
ComputationGraph netCopy = sparkNet.getNetwork().clone();
int nRows = 100;
INDArray features = Nd4j.rand(nRows, nIn);
INDArray labels = Nd4j.zeros(nRows, nOut);
Random r = new Random(12345);
for (int i = 0; i < nRows; i++) {
labels.putScalar(new int[] { i, r.nextInt(nOut) }, 1.0);
}
INDArray localScoresWithReg = netCopy.scoreExamples(new DataSet(features, labels), true);
INDArray localScoresNoReg = netCopy.scoreExamples(new DataSet(features, labels), false);
List<Tuple2<String, DataSet>> dataWithKeys = new ArrayList<>();
for (int i = 0; i < nRows; i++) {
DataSet ds = new DataSet(features.getRow(i).dup(), labels.getRow(i).dup());
dataWithKeys.add(new Tuple2<>(String.valueOf(i), ds));
}
JavaPairRDD<String, DataSet> dataWithKeysRdd = sc.parallelizePairs(dataWithKeys);
JavaPairRDD<String, Double> sparkScoresWithReg = sparkNet.scoreExamples(dataWithKeysRdd, true, 4);
JavaPairRDD<String, Double> sparkScoresNoReg = sparkNet.scoreExamples(dataWithKeysRdd, false, 4);
Map<String, Double> sparkScoresWithRegMap = sparkScoresWithReg.collectAsMap();
Map<String, Double> sparkScoresNoRegMap = sparkScoresNoReg.collectAsMap();
for (int i = 0; i < nRows; i++) {
double scoreRegExp = localScoresWithReg.getDouble(i);
double scoreRegAct = sparkScoresWithRegMap.get(String.valueOf(i));
assertEquals(scoreRegExp, scoreRegAct, 1e-5);
double scoreNoRegExp = localScoresNoReg.getDouble(i);
double scoreNoRegAct = sparkScoresNoRegMap.get(String.valueOf(i));
assertEquals(scoreNoRegExp, scoreNoRegAct, 1e-5);
// System.out.println(scoreRegExp + "\t" + scoreRegAct + "\t" + scoreNoRegExp + "\t" + scoreNoRegAct);
}
List<DataSet> dataNoKeys = new ArrayList<>();
for (int i = 0; i < nRows; i++) {
dataNoKeys.add(new DataSet(features.getRow(i).dup(), labels.getRow(i).dup()));
}
JavaRDD<DataSet> dataNoKeysRdd = sc.parallelize(dataNoKeys);
List<Double> scoresWithReg = new ArrayList<>(sparkNet.scoreExamples(dataNoKeysRdd, true, 4).collect());
List<Double> scoresNoReg = new ArrayList<>(sparkNet.scoreExamples(dataNoKeysRdd, false, 4).collect());
Collections.sort(scoresWithReg);
Collections.sort(scoresNoReg);
double[] localScoresWithRegDouble = localScoresWithReg.data().asDouble();
double[] localScoresNoRegDouble = localScoresNoReg.data().asDouble();
Arrays.sort(localScoresWithRegDouble);
Arrays.sort(localScoresNoRegDouble);
for (int i = 0; i < localScoresWithRegDouble.length; i++) {
assertEquals(localScoresWithRegDouble[i], scoresWithReg.get(i), 1e-5);
assertEquals(localScoresNoRegDouble[i], scoresNoReg.get(i), 1e-5);
// System.out.println(localScoresWithRegDouble[i] + "\t" + scoresWithReg.get(i) + "\t" + localScoresNoRegDouble[i] + "\t" + scoresNoReg.get(i));
}
}
use of org.deeplearning4j.nn.conf.ComputationGraphConfiguration in project deeplearning4j by deeplearning4j.
the class TestMiscFunctions method testFeedForwardWithKeyGraph.
@Test
public void testFeedForwardWithKeyGraph() {
ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in1", "in2").addLayer("0", new DenseLayer.Builder().nIn(4).nOut(3).build(), "in1").addLayer("1", new DenseLayer.Builder().nIn(4).nOut(3).build(), "in2").addLayer("2", new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(6).nOut(3).activation(Activation.SOFTMAX).build(), "0", "1").setOutputs("2").build();
ComputationGraph net = new ComputationGraph(conf);
net.init();
DataSetIterator iter = new IrisDataSetIterator(150, 150);
DataSet ds = iter.next();
List<INDArray> expected = new ArrayList<>();
List<Tuple2<Integer, INDArray[]>> mapFeatures = new ArrayList<>();
int count = 0;
int arrayCount = 0;
Random r = new Random(12345);
while (count < 150) {
//1 to 5 inclusive examples
int exampleCount = r.nextInt(5) + 1;
if (count + exampleCount > 150)
exampleCount = 150 - count;
INDArray subset = ds.getFeatures().get(NDArrayIndex.interval(count, count + exampleCount), NDArrayIndex.all());
expected.add(net.outputSingle(false, subset, subset));
mapFeatures.add(new Tuple2<>(arrayCount, new INDArray[] { subset, subset }));
arrayCount++;
count += exampleCount;
}
JavaPairRDD<Integer, INDArray[]> rdd = sc.parallelizePairs(mapFeatures);
SparkComputationGraph graph = new SparkComputationGraph(sc, net, null);
Map<Integer, INDArray[]> map = graph.feedForwardWithKey(rdd, 16).collectAsMap();
for (int i = 0; i < expected.size(); i++) {
INDArray exp = expected.get(i);
INDArray act = map.get(i)[0];
assertEquals(exp, act);
}
}
Aggregations