Search in sources :

Example 11 with TrainingMaster

use of org.deeplearning4j.spark.api.TrainingMaster in project deeplearning4j by deeplearning4j.

the class TestEarlyStoppingSparkCompGraph method testTimeTermination.

@Test
public void testTimeTermination() {
    //test termination after max time
    Nd4j.getRandom().setSeed(12345);
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1).updater(Updater.SGD).learningRate(1e-6).weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in").addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in").setOutputs("0").pretrain(false).backprop(true).build();
    ComputationGraph net = new ComputationGraph(conf);
    net.setListeners(new ScoreIterationListener(1));
    JavaRDD<DataSet> irisData = getIris();
    EarlyStoppingModelSaver<ComputationGraph> saver = new InMemoryModelSaver<>();
    EarlyStoppingConfiguration<ComputationGraph> esConf = new EarlyStoppingConfiguration.Builder<ComputationGraph>().epochTerminationConditions(new MaxEpochsTerminationCondition(10000)).iterationTerminationConditions(new MaxTimeIterationTerminationCondition(3, TimeUnit.SECONDS), //Initial score is ~2.5
    new MaxScoreIterationTerminationCondition(7.5)).scoreCalculator(new SparkLossCalculatorComputationGraph(irisData.map(new DataSetToMultiDataSetFn()), true, sc.sc())).modelSaver(saver).build();
    TrainingMaster tm = new ParameterAveragingTrainingMaster(true, numExecutors(), 1, 10, 1, 0);
    IEarlyStoppingTrainer<ComputationGraph> trainer = new SparkEarlyStoppingGraphTrainer(getContext().sc(), tm, esConf, net, irisData.map(new DataSetToMultiDataSetFn()));
    long startTime = System.currentTimeMillis();
    EarlyStoppingResult result = trainer.fit();
    long endTime = System.currentTimeMillis();
    int durationSeconds = (int) (endTime - startTime) / 1000;
    assertTrue(durationSeconds >= 3);
    assertTrue(durationSeconds <= 9);
    assertEquals(EarlyStoppingResult.TerminationReason.IterationTerminationCondition, result.getTerminationReason());
    String expDetails = new MaxTimeIterationTerminationCondition(3, TimeUnit.SECONDS).toString();
    assertEquals(expDetails, result.getTerminationDetails());
}
Also used : OutputLayer(org.deeplearning4j.nn.conf.layers.OutputLayer) DataSet(org.nd4j.linalg.dataset.DataSet) TrainingMaster(org.deeplearning4j.spark.api.TrainingMaster) ParameterAveragingTrainingMaster(org.deeplearning4j.spark.impl.paramavg.ParameterAveragingTrainingMaster) EarlyStoppingConfiguration(org.deeplearning4j.earlystopping.EarlyStoppingConfiguration) DataSetToMultiDataSetFn(org.deeplearning4j.spark.impl.graph.dataset.DataSetToMultiDataSetFn) SparkLossCalculatorComputationGraph(org.deeplearning4j.spark.earlystopping.SparkLossCalculatorComputationGraph) ComputationGraph(org.deeplearning4j.nn.graph.ComputationGraph) ScoreIterationListener(org.deeplearning4j.optimize.listeners.ScoreIterationListener) InMemoryModelSaver(org.deeplearning4j.earlystopping.saver.InMemoryModelSaver) MaxEpochsTerminationCondition(org.deeplearning4j.earlystopping.termination.MaxEpochsTerminationCondition) SparkEarlyStoppingGraphTrainer(org.deeplearning4j.spark.earlystopping.SparkEarlyStoppingGraphTrainer) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) ParameterAveragingTrainingMaster(org.deeplearning4j.spark.impl.paramavg.ParameterAveragingTrainingMaster) EarlyStoppingResult(org.deeplearning4j.earlystopping.EarlyStoppingResult) SparkLossCalculatorComputationGraph(org.deeplearning4j.spark.earlystopping.SparkLossCalculatorComputationGraph) ComputationGraphConfiguration(org.deeplearning4j.nn.conf.ComputationGraphConfiguration) MaxScoreIterationTerminationCondition(org.deeplearning4j.earlystopping.termination.MaxScoreIterationTerminationCondition) MaxTimeIterationTerminationCondition(org.deeplearning4j.earlystopping.termination.MaxTimeIterationTerminationCondition) Test(org.junit.Test)

Example 12 with TrainingMaster

use of org.deeplearning4j.spark.api.TrainingMaster in project deeplearning4j by deeplearning4j.

the class TestEarlyStoppingSparkCompGraph method testBadTuning.

@Test
public void testBadTuning() {
    //Test poor tuning (high LR): should terminate on MaxScoreIterationTerminationCondition
    Nd4j.getRandom().setSeed(12345);
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1).updater(Updater.SGD).learningRate(//Intentionally huge LR
    2.0).weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in").addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.IDENTITY).lossFunction(LossFunctions.LossFunction.MSE).build(), "in").setOutputs("0").pretrain(false).backprop(true).build();
    ComputationGraph net = new ComputationGraph(conf);
    net.setListeners(new ScoreIterationListener(1));
    JavaRDD<DataSet> irisData = getIris();
    EarlyStoppingModelSaver<ComputationGraph> saver = new InMemoryModelSaver<>();
    EarlyStoppingConfiguration<ComputationGraph> esConf = new EarlyStoppingConfiguration.Builder<ComputationGraph>().epochTerminationConditions(new MaxEpochsTerminationCondition(5000)).iterationTerminationConditions(new MaxTimeIterationTerminationCondition(1, TimeUnit.MINUTES), //Initial score is ~2.5
    new MaxScoreIterationTerminationCondition(7.5)).scoreCalculator(new SparkLossCalculatorComputationGraph(irisData.map(new DataSetToMultiDataSetFn()), true, sc.sc())).modelSaver(saver).build();
    TrainingMaster tm = new ParameterAveragingTrainingMaster(true, numExecutors(), 1, 10, 1, 0);
    IEarlyStoppingTrainer<ComputationGraph> trainer = new SparkEarlyStoppingGraphTrainer(getContext().sc(), tm, esConf, net, irisData.map(new DataSetToMultiDataSetFn()));
    EarlyStoppingResult result = trainer.fit();
    assertTrue(result.getTotalEpochs() < 5);
    assertEquals(EarlyStoppingResult.TerminationReason.IterationTerminationCondition, result.getTerminationReason());
    String expDetails = new MaxScoreIterationTerminationCondition(7.5).toString();
    assertEquals(expDetails, result.getTerminationDetails());
}
Also used : InMemoryModelSaver(org.deeplearning4j.earlystopping.saver.InMemoryModelSaver) MaxEpochsTerminationCondition(org.deeplearning4j.earlystopping.termination.MaxEpochsTerminationCondition) SparkEarlyStoppingGraphTrainer(org.deeplearning4j.spark.earlystopping.SparkEarlyStoppingGraphTrainer) DataSet(org.nd4j.linalg.dataset.DataSet) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) ParameterAveragingTrainingMaster(org.deeplearning4j.spark.impl.paramavg.ParameterAveragingTrainingMaster) TrainingMaster(org.deeplearning4j.spark.api.TrainingMaster) ParameterAveragingTrainingMaster(org.deeplearning4j.spark.impl.paramavg.ParameterAveragingTrainingMaster) EarlyStoppingResult(org.deeplearning4j.earlystopping.EarlyStoppingResult) EarlyStoppingConfiguration(org.deeplearning4j.earlystopping.EarlyStoppingConfiguration) SparkLossCalculatorComputationGraph(org.deeplearning4j.spark.earlystopping.SparkLossCalculatorComputationGraph) ComputationGraphConfiguration(org.deeplearning4j.nn.conf.ComputationGraphConfiguration) DataSetToMultiDataSetFn(org.deeplearning4j.spark.impl.graph.dataset.DataSetToMultiDataSetFn) SparkLossCalculatorComputationGraph(org.deeplearning4j.spark.earlystopping.SparkLossCalculatorComputationGraph) ComputationGraph(org.deeplearning4j.nn.graph.ComputationGraph) MaxScoreIterationTerminationCondition(org.deeplearning4j.earlystopping.termination.MaxScoreIterationTerminationCondition) ScoreIterationListener(org.deeplearning4j.optimize.listeners.ScoreIterationListener) MaxTimeIterationTerminationCondition(org.deeplearning4j.earlystopping.termination.MaxTimeIterationTerminationCondition) Test(org.junit.Test)

Example 13 with TrainingMaster

use of org.deeplearning4j.spark.api.TrainingMaster in project deeplearning4j by deeplearning4j.

the class TestSparkComputationGraph method testDistributedScoring.

@Test
public void testDistributedScoring() {
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().regularization(true).l1(0.1).l2(0.1).seed(123).updater(Updater.NESTEROVS).learningRate(0.1).momentum(0.9).graphBuilder().addInputs("in").addLayer("0", new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(nIn).nOut(3).activation(Activation.TANH).build(), "in").addLayer("1", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(3).nOut(nOut).activation(Activation.SOFTMAX).build(), "0").setOutputs("1").backprop(true).pretrain(false).build();
    TrainingMaster tm = new ParameterAveragingTrainingMaster(true, numExecutors(), 1, 10, 1, 0);
    SparkComputationGraph sparkNet = new SparkComputationGraph(sc, conf, tm);
    ComputationGraph netCopy = sparkNet.getNetwork().clone();
    int nRows = 100;
    INDArray features = Nd4j.rand(nRows, nIn);
    INDArray labels = Nd4j.zeros(nRows, nOut);
    Random r = new Random(12345);
    for (int i = 0; i < nRows; i++) {
        labels.putScalar(new int[] { i, r.nextInt(nOut) }, 1.0);
    }
    INDArray localScoresWithReg = netCopy.scoreExamples(new DataSet(features, labels), true);
    INDArray localScoresNoReg = netCopy.scoreExamples(new DataSet(features, labels), false);
    List<Tuple2<String, DataSet>> dataWithKeys = new ArrayList<>();
    for (int i = 0; i < nRows; i++) {
        DataSet ds = new DataSet(features.getRow(i).dup(), labels.getRow(i).dup());
        dataWithKeys.add(new Tuple2<>(String.valueOf(i), ds));
    }
    JavaPairRDD<String, DataSet> dataWithKeysRdd = sc.parallelizePairs(dataWithKeys);
    JavaPairRDD<String, Double> sparkScoresWithReg = sparkNet.scoreExamples(dataWithKeysRdd, true, 4);
    JavaPairRDD<String, Double> sparkScoresNoReg = sparkNet.scoreExamples(dataWithKeysRdd, false, 4);
    Map<String, Double> sparkScoresWithRegMap = sparkScoresWithReg.collectAsMap();
    Map<String, Double> sparkScoresNoRegMap = sparkScoresNoReg.collectAsMap();
    for (int i = 0; i < nRows; i++) {
        double scoreRegExp = localScoresWithReg.getDouble(i);
        double scoreRegAct = sparkScoresWithRegMap.get(String.valueOf(i));
        assertEquals(scoreRegExp, scoreRegAct, 1e-5);
        double scoreNoRegExp = localScoresNoReg.getDouble(i);
        double scoreNoRegAct = sparkScoresNoRegMap.get(String.valueOf(i));
        assertEquals(scoreNoRegExp, scoreNoRegAct, 1e-5);
    //            System.out.println(scoreRegExp + "\t" + scoreRegAct + "\t" + scoreNoRegExp + "\t" + scoreNoRegAct);
    }
    List<DataSet> dataNoKeys = new ArrayList<>();
    for (int i = 0; i < nRows; i++) {
        dataNoKeys.add(new DataSet(features.getRow(i).dup(), labels.getRow(i).dup()));
    }
    JavaRDD<DataSet> dataNoKeysRdd = sc.parallelize(dataNoKeys);
    List<Double> scoresWithReg = new ArrayList<>(sparkNet.scoreExamples(dataNoKeysRdd, true, 4).collect());
    List<Double> scoresNoReg = new ArrayList<>(sparkNet.scoreExamples(dataNoKeysRdd, false, 4).collect());
    Collections.sort(scoresWithReg);
    Collections.sort(scoresNoReg);
    double[] localScoresWithRegDouble = localScoresWithReg.data().asDouble();
    double[] localScoresNoRegDouble = localScoresNoReg.data().asDouble();
    Arrays.sort(localScoresWithRegDouble);
    Arrays.sort(localScoresNoRegDouble);
    for (int i = 0; i < localScoresWithRegDouble.length; i++) {
        assertEquals(localScoresWithRegDouble[i], scoresWithReg.get(i), 1e-5);
        assertEquals(localScoresNoRegDouble[i], scoresNoReg.get(i), 1e-5);
    //            System.out.println(localScoresWithRegDouble[i] + "\t" + scoresWithReg.get(i) + "\t" + localScoresNoRegDouble[i] + "\t" + scoresNoReg.get(i));
    }
}
Also used : OutputLayer(org.deeplearning4j.nn.conf.layers.OutputLayer) DataSet(org.nd4j.linalg.dataset.DataSet) MultiDataSet(org.nd4j.linalg.dataset.api.MultiDataSet) TrainingMaster(org.deeplearning4j.spark.api.TrainingMaster) ParameterAveragingTrainingMaster(org.deeplearning4j.spark.impl.paramavg.ParameterAveragingTrainingMaster) ComputationGraph(org.deeplearning4j.nn.graph.ComputationGraph) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) ParameterAveragingTrainingMaster(org.deeplearning4j.spark.impl.paramavg.ParameterAveragingTrainingMaster) INDArray(org.nd4j.linalg.api.ndarray.INDArray) Tuple2(scala.Tuple2) ComputationGraphConfiguration(org.deeplearning4j.nn.conf.ComputationGraphConfiguration) BaseSparkTest(org.deeplearning4j.spark.BaseSparkTest) Test(org.junit.Test)

Example 14 with TrainingMaster

use of org.deeplearning4j.spark.api.TrainingMaster in project deeplearning4j by deeplearning4j.

the class TestCompareParameterAveragingSparkVsSingleMachine method testOneExecutorGraph.

@Test
public void testOneExecutorGraph() {
    //Idea: single worker/executor on Spark should give identical results to a single machine
    int miniBatchSize = 10;
    int nWorkers = 1;
    for (boolean saveUpdater : new boolean[] { true, false }) {
        JavaSparkContext sc = getContext(nWorkers);
        try {
            //Do training locally, for 3 minibatches
            int[] seeds = { 1, 2, 3 };
            ComputationGraph net = new ComputationGraph(getGraphConf(12345, Updater.RMSPROP));
            net.init();
            INDArray initialParams = net.params().dup();
            for (int i = 0; i < seeds.length; i++) {
                DataSet ds = getOneDataSet(miniBatchSize, seeds[i]);
                if (!saveUpdater)
                    net.setUpdater(null);
                net.fit(ds);
            }
            INDArray finalParams = net.params().dup();
            //Do training on Spark with one executor, for 3 separate minibatches
            TrainingMaster tm = getTrainingMaster(1, miniBatchSize, saveUpdater);
            SparkComputationGraph sparkNet = new SparkComputationGraph(sc, getGraphConf(12345, Updater.RMSPROP), tm);
            sparkNet.setCollectTrainingStats(true);
            INDArray initialSparkParams = sparkNet.getNetwork().params().dup();
            for (int i = 0; i < seeds.length; i++) {
                List<DataSet> list = getOneDataSetAsIndividalExamples(miniBatchSize, seeds[i]);
                JavaRDD<DataSet> rdd = sc.parallelize(list);
                sparkNet.fit(rdd);
            }
            INDArray finalSparkParams = sparkNet.getNetwork().params().dup();
            assertEquals(initialParams, initialSparkParams);
            assertNotEquals(initialParams, finalParams);
            assertEquals(finalParams, finalSparkParams);
        } finally {
            sc.stop();
        }
    }
}
Also used : SparkComputationGraph(org.deeplearning4j.spark.impl.graph.SparkComputationGraph) INDArray(org.nd4j.linalg.api.ndarray.INDArray) DataSet(org.nd4j.linalg.dataset.DataSet) JavaSparkContext(org.apache.spark.api.java.JavaSparkContext) ComputationGraph(org.deeplearning4j.nn.graph.ComputationGraph) SparkComputationGraph(org.deeplearning4j.spark.impl.graph.SparkComputationGraph) TrainingMaster(org.deeplearning4j.spark.api.TrainingMaster) Test(org.junit.Test)

Example 15 with TrainingMaster

use of org.deeplearning4j.spark.api.TrainingMaster in project deeplearning4j by deeplearning4j.

the class TestListeners method testStatsCollection.

@Test
public void testStatsCollection() {
    JavaSparkContext sc = getContext();
    int nExecutors = numExecutors();
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(123).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1).list().layer(0, new DenseLayer.Builder().nIn(4).nOut(100).weightInit(WeightInit.XAVIER).activation(Activation.RELU).build()).layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(100).nOut(3).activation(Activation.SOFTMAX).weightInit(WeightInit.XAVIER).build()).pretrain(false).backprop(true).build();
    MultiLayerNetwork network = new MultiLayerNetwork(conf);
    network.init();
    TrainingMaster tm = new ParameterAveragingTrainingMaster.Builder(1).batchSizePerWorker(5).averagingFrequency(6).build();
    SparkDl4jMultiLayer net = new SparkDl4jMultiLayer(sc, conf, tm);
    //In-memory
    StatsStorage ss = new MapDBStatsStorage();
    net.setListeners(ss, Collections.singletonList(new StatsListener(null)));
    List<DataSet> list = new IrisDataSetIterator(120, 150).next().asList();
    //120 examples, 4 executors, 30 examples per executor -> 6 updates of size 5 per executor
    JavaRDD<DataSet> rdd = sc.parallelize(list);
    net.fit(rdd);
    List<String> sessions = ss.listSessionIDs();
    System.out.println("Sessions: " + sessions);
    assertEquals(1, sessions.size());
    String sid = sessions.get(0);
    List<String> typeIDs = ss.listTypeIDsForSession(sid);
    List<String> workers = ss.listWorkerIDsForSession(sid);
    System.out.println(sid + "\t" + typeIDs + "\t" + workers);
    List<Persistable> lastUpdates = ss.getLatestUpdateAllWorkers(sid, StatsListener.TYPE_ID);
    System.out.println(lastUpdates);
    System.out.println("Static info:");
    for (String wid : workers) {
        Persistable staticInfo = ss.getStaticInfo(sid, StatsListener.TYPE_ID, wid);
        System.out.println(sid + "\t" + wid);
    }
    assertEquals(1, typeIDs.size());
    assertEquals(numExecutors(), workers.size());
    String firstWorker = workers.get(0);
    String firstWorkerSubstring = workers.get(0).substring(0, firstWorker.length() - 1);
    for (String wid : workers) {
        String widSubstring = wid.substring(0, wid.length() - 1);
        assertEquals(firstWorkerSubstring, widSubstring);
        String counterVal = wid.substring(wid.length() - 1, wid.length());
        int cv = Integer.parseInt(counterVal);
        assertTrue(0 <= cv && cv < numExecutors());
    }
}
Also used : Persistable(org.deeplearning4j.api.storage.Persistable) IrisDataSetIterator(org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator) DataSet(org.nd4j.linalg.dataset.DataSet) TrainingMaster(org.deeplearning4j.spark.api.TrainingMaster) ParameterAveragingTrainingMaster(org.deeplearning4j.spark.impl.paramavg.ParameterAveragingTrainingMaster) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) SparkDl4jMultiLayer(org.deeplearning4j.spark.impl.multilayer.SparkDl4jMultiLayer) MapDBStatsStorage(org.deeplearning4j.ui.storage.mapdb.MapDBStatsStorage) JavaSparkContext(org.apache.spark.api.java.JavaSparkContext) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) MapDBStatsStorage(org.deeplearning4j.ui.storage.mapdb.MapDBStatsStorage) StatsStorage(org.deeplearning4j.api.storage.StatsStorage) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) ParameterAveragingTrainingMaster(org.deeplearning4j.spark.impl.paramavg.ParameterAveragingTrainingMaster) StatsListener(org.deeplearning4j.ui.stats.StatsListener) BaseSparkTest(org.deeplearning4j.spark.BaseSparkTest) Test(org.junit.Test)

Aggregations

TrainingMaster (org.deeplearning4j.spark.api.TrainingMaster)15 Test (org.junit.Test)13 DataSet (org.nd4j.linalg.dataset.DataSet)12 ComputationGraph (org.deeplearning4j.nn.graph.ComputationGraph)10 ParameterAveragingTrainingMaster (org.deeplearning4j.spark.impl.paramavg.ParameterAveragingTrainingMaster)10 NeuralNetConfiguration (org.deeplearning4j.nn.conf.NeuralNetConfiguration)9 JavaSparkContext (org.apache.spark.api.java.JavaSparkContext)8 ComputationGraphConfiguration (org.deeplearning4j.nn.conf.ComputationGraphConfiguration)8 OutputLayer (org.deeplearning4j.nn.conf.layers.OutputLayer)6 ScoreIterationListener (org.deeplearning4j.optimize.listeners.ScoreIterationListener)6 EarlyStoppingConfiguration (org.deeplearning4j.earlystopping.EarlyStoppingConfiguration)5 InMemoryModelSaver (org.deeplearning4j.earlystopping.saver.InMemoryModelSaver)5 MaxEpochsTerminationCondition (org.deeplearning4j.earlystopping.termination.MaxEpochsTerminationCondition)5 SparkEarlyStoppingGraphTrainer (org.deeplearning4j.spark.earlystopping.SparkEarlyStoppingGraphTrainer)5 SparkLossCalculatorComputationGraph (org.deeplearning4j.spark.earlystopping.SparkLossCalculatorComputationGraph)5 DataSetToMultiDataSetFn (org.deeplearning4j.spark.impl.graph.dataset.DataSetToMultiDataSetFn)5 INDArray (org.nd4j.linalg.api.ndarray.INDArray)5 MaxTimeIterationTerminationCondition (org.deeplearning4j.earlystopping.termination.MaxTimeIterationTerminationCondition)4 SparkComputationGraph (org.deeplearning4j.spark.impl.graph.SparkComputationGraph)4 IrisDataSetIterator (org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator)3