use of org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator in project deeplearning4j by deeplearning4j.
the class TestSparkMultiLayerParameterAveraging method testSeedRepeatability.
@Test
public void testSeedRepeatability() throws Exception {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).updater(Updater.RMSPROP).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1).weightInit(WeightInit.XAVIER).list().layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(4).nOut(4).activation(Activation.TANH).build()).layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(4).nOut(3).activation(Activation.SOFTMAX).build()).pretrain(false).backprop(true).build();
Nd4j.getRandom().setSeed(12345);
MultiLayerNetwork n1 = new MultiLayerNetwork(conf);
n1.init();
Nd4j.getRandom().setSeed(12345);
MultiLayerNetwork n2 = new MultiLayerNetwork(conf);
n2.init();
Nd4j.getRandom().setSeed(12345);
MultiLayerNetwork n3 = new MultiLayerNetwork(conf);
n3.init();
SparkDl4jMultiLayer sparkNet1 = new SparkDl4jMultiLayer(sc, n1, new ParameterAveragingTrainingMaster.Builder(1).workerPrefetchNumBatches(5).batchSizePerWorker(5).averagingFrequency(1).repartionData(Repartition.Always).rngSeed(12345).build());
//Training master IDs are only unique if they are created at least 1 ms apart...
Thread.sleep(100);
SparkDl4jMultiLayer sparkNet2 = new SparkDl4jMultiLayer(sc, n2, new ParameterAveragingTrainingMaster.Builder(1).workerPrefetchNumBatches(5).batchSizePerWorker(5).averagingFrequency(1).repartionData(Repartition.Always).rngSeed(12345).build());
Thread.sleep(100);
SparkDl4jMultiLayer sparkNet3 = new SparkDl4jMultiLayer(sc, n3, new ParameterAveragingTrainingMaster.Builder(1).workerPrefetchNumBatches(5).batchSizePerWorker(5).averagingFrequency(1).repartionData(Repartition.Always).rngSeed(98765).build());
List<DataSet> data = new ArrayList<>();
DataSetIterator iter = new IrisDataSetIterator(1, 150);
while (iter.hasNext()) data.add(iter.next());
JavaRDD<DataSet> rdd = sc.parallelize(data);
sparkNet1.fit(rdd);
sparkNet2.fit(rdd);
sparkNet3.fit(rdd);
INDArray p1 = sparkNet1.getNetwork().params();
INDArray p2 = sparkNet2.getNetwork().params();
INDArray p3 = sparkNet3.getNetwork().params();
sparkNet1.getTrainingMaster().deleteTempFiles(sc);
sparkNet2.getTrainingMaster().deleteTempFiles(sc);
sparkNet3.getTrainingMaster().deleteTempFiles(sc);
assertEquals(p1, p2);
assertNotEquals(p1, p3);
}
use of org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator in project deeplearning4j by deeplearning4j.
the class TestSparkMultiLayerParameterAveraging method testRunIteration.
@Test
public void testRunIteration() {
DataSet dataSet = new IrisDataSetIterator(5, 5).next();
List<DataSet> list = dataSet.asList();
JavaRDD<DataSet> data = sc.parallelize(list);
SparkDl4jMultiLayer sparkNetCopy = new SparkDl4jMultiLayer(sc, getBasicConf(), new ParameterAveragingTrainingMaster(true, numExecutors(), 1, 5, 1, 0));
MultiLayerNetwork networkCopy = sparkNetCopy.fit(data);
INDArray expectedParams = networkCopy.params();
SparkDl4jMultiLayer sparkNet = getBasicNetwork();
MultiLayerNetwork network = sparkNet.fit(data);
INDArray actualParams = network.params();
assertEquals(expectedParams.size(1), actualParams.size(1));
}
use of org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator in project deeplearning4j by deeplearning4j.
the class TestSerialization method testModelSerde.
@Test
public void testModelSerde() throws Exception {
ObjectMapper mapper = getMapper();
NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().momentum(0.9f).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1000).learningRate(1e-1f).layer(new org.deeplearning4j.nn.conf.layers.AutoEncoder.Builder().nIn(4).nOut(3).corruptionLevel(0.6).sparsity(0.5).lossFunction(LossFunctions.LossFunction.RECONSTRUCTION_CROSSENTROPY).build()).build();
DataSet d2 = new IrisDataSetIterator(150, 150).next();
INDArray input = d2.getFeatureMatrix();
int numParams = conf.getLayer().initializer().numParams(conf);
INDArray params = Nd4j.create(1, numParams);
AutoEncoder da = (AutoEncoder) conf.getLayer().instantiate(conf, Arrays.asList(new ScoreIterationListener(1), new HistogramIterationListener(1)), 0, params, true);
da.setInput(input);
ModelAndGradient g = new ModelAndGradient(da);
String json = mapper.writeValueAsString(g);
ModelAndGradient read = mapper.readValue(json, ModelAndGradient.class);
assertEquals(g, read);
}
use of org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator in project deeplearning4j by deeplearning4j.
the class TestPlayUI method testUICompGraph.
@Test
@Ignore
public void testUICompGraph() throws Exception {
StatsStorage ss = new InMemoryStatsStorage();
UIServer uiServer = UIServer.getInstance();
uiServer.attach(ss);
ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().graphBuilder().addInputs("in").addLayer("L0", new DenseLayer.Builder().activation(Activation.TANH).nIn(4).nOut(4).build(), "in").addLayer("L1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(4).nOut(3).build(), "L0").pretrain(false).backprop(true).setOutputs("L1").build();
ComputationGraph net = new ComputationGraph(conf);
net.init();
net.setListeners(new StatsListener(ss), new ScoreIterationListener(1));
DataSetIterator iter = new IrisDataSetIterator(150, 150);
for (int i = 0; i < 100; i++) {
net.fit(iter);
Thread.sleep(100);
}
Thread.sleep(100000);
}
use of org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator in project deeplearning4j by deeplearning4j.
the class TestPlayUI method testUI_VAE.
@Test
@Ignore
public void testUI_VAE() throws Exception {
//Variational autoencoder - for unsupervised layerwise pretraining
StatsStorage ss = new InMemoryStatsStorage();
UIServer uiServer = UIServer.getInstance();
uiServer.attach(ss);
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1).learningRate(1e-5).list().layer(0, new VariationalAutoencoder.Builder().nIn(4).nOut(3).encoderLayerSizes(10, 11).decoderLayerSizes(12, 13).weightInit(WeightInit.XAVIER).pzxActivationFunction("identity").reconstructionDistribution(new GaussianReconstructionDistribution()).activation(Activation.LEAKYRELU).updater(Updater.SGD).build()).layer(1, new VariationalAutoencoder.Builder().nIn(3).nOut(3).encoderLayerSizes(7).decoderLayerSizes(8).weightInit(WeightInit.XAVIER).pzxActivationFunction("identity").reconstructionDistribution(new GaussianReconstructionDistribution()).activation(Activation.LEAKYRELU).updater(Updater.SGD).build()).layer(2, new OutputLayer.Builder().nIn(3).nOut(3).build()).pretrain(true).backprop(true).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
net.setListeners(new StatsListener(ss), new ScoreIterationListener(1));
DataSetIterator iter = new IrisDataSetIterator(150, 150);
for (int i = 0; i < 50; i++) {
net.fit(iter);
Thread.sleep(100);
}
Thread.sleep(100000);
}
Aggregations