use of org.deeplearning4j.nn.conf.ComputationGraphConfiguration in project deeplearning4j by deeplearning4j.
the class TestEarlyStoppingCompGraph method testListeners.
@Test
public void testListeners() {
ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1).updater(Updater.SGD).weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in").addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in").setOutputs("0").pretrain(false).backprop(true).build();
ComputationGraph net = new ComputationGraph(conf);
net.setListeners(new ScoreIterationListener(1));
DataSetIterator irisIter = new IrisDataSetIterator(150, 150);
EarlyStoppingModelSaver<ComputationGraph> saver = new InMemoryModelSaver<>();
EarlyStoppingConfiguration<ComputationGraph> esConf = new EarlyStoppingConfiguration.Builder<ComputationGraph>().epochTerminationConditions(new MaxEpochsTerminationCondition(5)).iterationTerminationConditions(new MaxTimeIterationTerminationCondition(1, TimeUnit.MINUTES)).scoreCalculator(new DataSetLossCalculatorCG(irisIter, true)).modelSaver(saver).build();
LoggingEarlyStoppingListener listener = new LoggingEarlyStoppingListener();
IEarlyStoppingTrainer trainer = new EarlyStoppingGraphTrainer(esConf, net, irisIter, listener);
trainer.fit();
assertEquals(1, listener.onStartCallCount);
assertEquals(5, listener.onEpochCallCount);
assertEquals(1, listener.onCompletionCallCount);
}
use of org.deeplearning4j.nn.conf.ComputationGraphConfiguration in project deeplearning4j by deeplearning4j.
the class TestEarlyStoppingCompGraph method testTimeTermination.
@Test
public void testTimeTermination() {
//test termination after max time
Nd4j.getRandom().setSeed(12345);
ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1).updater(Updater.SGD).learningRate(1e-6).weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in").addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in").setOutputs("0").pretrain(false).backprop(true).build();
ComputationGraph net = new ComputationGraph(conf);
net.setListeners(new ScoreIterationListener(1));
DataSetIterator irisIter = new IrisDataSetIterator(150, 150);
EarlyStoppingModelSaver<ComputationGraph> saver = new InMemoryModelSaver<>();
EarlyStoppingConfiguration<ComputationGraph> esConf = new EarlyStoppingConfiguration.Builder<ComputationGraph>().epochTerminationConditions(new MaxEpochsTerminationCondition(10000)).iterationTerminationConditions(new MaxTimeIterationTerminationCondition(3, TimeUnit.SECONDS), //Initial score is ~2.5
new MaxScoreIterationTerminationCondition(7.5)).modelSaver(saver).build();
IEarlyStoppingTrainer trainer = new EarlyStoppingGraphTrainer(esConf, net, irisIter);
long startTime = System.currentTimeMillis();
EarlyStoppingResult result = trainer.fit();
long endTime = System.currentTimeMillis();
int durationSeconds = (int) (endTime - startTime) / 1000;
assertTrue(durationSeconds >= 3);
assertTrue(durationSeconds <= 9);
assertEquals(EarlyStoppingResult.TerminationReason.IterationTerminationCondition, result.getTerminationReason());
String expDetails = new MaxTimeIterationTerminationCondition(3, TimeUnit.SECONDS).toString();
assertEquals(expDetails, result.getTerminationDetails());
}
use of org.deeplearning4j.nn.conf.ComputationGraphConfiguration in project deeplearning4j by deeplearning4j.
the class TestKryoWarning method doTestCG.
private static void doTestCG(SparkConf sparkConf) {
JavaSparkContext sc = new JavaSparkContext(sparkConf);
try {
ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().graphBuilder().addInputs("in").addLayer("0", new OutputLayer.Builder().nIn(10).nOut(10).build(), "in").setOutputs("0").pretrain(false).backprop(true).build();
TrainingMaster tm = new ParameterAveragingTrainingMaster.Builder(1).build();
SparkListenable scg = new SparkComputationGraph(sc, conf, tm);
} finally {
sc.stop();
}
}
use of org.deeplearning4j.nn.conf.ComputationGraphConfiguration in project deeplearning4j by deeplearning4j.
the class GradientCheckTestsComputationGraph method testBasicTwoOutputs.
@Test
public void testBasicTwoOutputs() {
Nd4j.getRandom().setSeed(12345);
ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1)).activation(Activation.TANH).updater(Updater.NONE).learningRate(1.0).graphBuilder().addInputs("in1", "in2").addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in1").addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in2").addLayer("out1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2).nOut(2).activation(Activation.IDENTITY).build(), "d0").addLayer("out2", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2).nOut(2).activation(Activation.IDENTITY).build(), "d1").setOutputs("out1", "out2").pretrain(false).backprop(true).build();
ComputationGraph graph = new ComputationGraph(conf);
graph.init();
System.out.println("Num layers: " + graph.getNumLayers());
System.out.println("Num params: " + graph.numParams());
Nd4j.getRandom().setSeed(12345);
int nParams = graph.numParams();
INDArray newParams = Nd4j.rand(1, nParams);
graph.setParams(newParams);
int[] mbSizes = new int[] { 1, 3, 10 };
for (int minibatch : mbSizes) {
INDArray in1 = Nd4j.rand(minibatch, 2);
INDArray in2 = Nd4j.rand(minibatch, 2);
INDArray labels1 = Nd4j.rand(minibatch, 2);
INDArray labels2 = Nd4j.rand(minibatch, 2);
String testName = "testBasicStackUnstack() - minibatch = " + minibatch;
if (PRINT_RESULTS) {
System.out.println(testName);
for (int j = 0; j < graph.getNumLayers(); j++) System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams());
}
boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] { in1, in2 }, new INDArray[] { labels1, labels2 });
assertTrue(testName, gradOK);
}
}
use of org.deeplearning4j.nn.conf.ComputationGraphConfiguration in project deeplearning4j by deeplearning4j.
the class GradientCheckTestsComputationGraph method testMultipleOutputsLayer.
@Test
public void testMultipleOutputsLayer() {
Nd4j.getRandom().setSeed(12345);
ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1)).updater(Updater.NONE).learningRate(1.0).activation(Activation.TANH).graphBuilder().addInputs("i0").addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "i0").addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "d0").addLayer("d2", new DenseLayer.Builder().nIn(2).nOut(2).build(), "d0").addLayer("d3", new DenseLayer.Builder().nIn(2).nOut(2).build(), "d0").addLayer("out", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(6).nOut(2).build(), "d1", "d2", "d3").setOutputs("out").pretrain(false).backprop(true).build();
ComputationGraph graph = new ComputationGraph(conf);
graph.init();
int[] minibatchSizes = { 1, 3 };
for (int mb : minibatchSizes) {
INDArray input = Nd4j.rand(mb, 2);
INDArray out = Nd4j.rand(mb, 2);
String msg = "testMultipleOutputsLayer() - minibatchSize = " + mb;
if (PRINT_RESULTS) {
System.out.println(msg);
for (int j = 0; j < graph.getNumLayers(); j++) System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams());
}
boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] { input }, new INDArray[] { out });
assertTrue(msg, gradOK);
}
}
Aggregations