use of org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator in project deeplearning4j by deeplearning4j.
the class TestDecayPolicies method testOriginalLearningRateUnchanged.
@Test
public void testOriginalLearningRateUnchanged() {
// Confirm learning rate is unchanged while hash is updated
DataSet ds = new IrisDataSetIterator(150, 150).next();
ds.normalizeZeroMeanZeroUnitVariance();
Nd4j.getRandom().setSeed(12345);
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().regularization(false).optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).learningRate(1.0).learningRateDecayPolicy(LearningRatePolicy.Score).lrPolicyDecayRate(0.10).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1)).updater(org.deeplearning4j.nn.conf.Updater.SGD).seed(12345L).list().layer(0, new DenseLayer.Builder().nIn(4).nOut(3).activation(Activation.SIGMOID).build()).layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.TANH).nIn(3).nOut(3).build()).pretrain(false).backprop(true).build();
MultiLayerNetwork mln = new MultiLayerNetwork(conf);
mln.init();
//Run a number of iterations of learning
mln.setInput(ds.getFeatureMatrix());
mln.setLabels(ds.getLabels());
mln.computeGradientAndScore();
for (int j = 0; j < 1; j++) mln.fit(ds);
mln.computeGradientAndScore();
double lr0 = mln.getLayer(0).conf().getLayer().getLearningRate();
double lr1 = mln.getLayer(1).conf().getLayer().getLearningRate();
assertEquals(1.0, lr0, 0.0);
assertEquals(1.0, lr1, 0.0);
}
use of org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator in project deeplearning4j by deeplearning4j.
the class ModelSerializerTest method testWriteMlnModelInputStream.
@Test
public void testWriteMlnModelInputStream() throws Exception {
int nIn = 5;
int nOut = 6;
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).regularization(true).l1(0.01).l2(0.01).learningRate(0.1).activation(Activation.TANH).weightInit(WeightInit.XAVIER).list().layer(0, new DenseLayer.Builder().nIn(nIn).nOut(20).build()).layer(1, new DenseLayer.Builder().nIn(20).nOut(30).build()).layer(2, new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(30).nOut(nOut).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
File tempFile = File.createTempFile("tsfs", "fdfsdf");
tempFile.deleteOnExit();
FileOutputStream fos = new FileOutputStream(tempFile);
ModelSerializer.writeModel(net, fos, true);
// checking adding of DataNormalization to the model file
NormalizerMinMaxScaler scaler = new NormalizerMinMaxScaler();
DataSetIterator iter = new IrisDataSetIterator(150, 150);
scaler.fit(iter);
ModelSerializer.addNormalizerToModel(tempFile, scaler);
NormalizerMinMaxScaler restoredScaler = ModelSerializer.restoreNormalizerFromFile(tempFile);
assertNotEquals(null, scaler.getMax());
assertEquals(scaler.getMax(), restoredScaler.getMax());
assertEquals(scaler.getMin(), restoredScaler.getMin());
FileInputStream fis = new FileInputStream(tempFile);
MultiLayerNetwork network = ModelSerializer.restoreMultiLayerNetwork(fis);
assertEquals(network.getLayerWiseConfigurations().toJson(), net.getLayerWiseConfigurations().toJson());
assertEquals(net.params(), network.params());
assertEquals(net.getUpdater(), network.getUpdater());
}
use of org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator in project deeplearning4j by deeplearning4j.
the class SerializationUtilsTest method testWriteRead.
@Test
public void testWriteRead() {
DataSetIterator iter = new IrisDataSetIterator(150, 150);
String irisData = "irisData.dat";
DataSet freshDataSet = iter.next(150);
SerializationUtils.saveObject(freshDataSet, new File(irisData));
DataSet readDataSet = SerializationUtils.readObject(new File(irisData));
assertEquals(freshDataSet.getFeatureMatrix(), readDataSet.getFeatureMatrix());
assertEquals(freshDataSet.getLabels(), readDataSet.getLabels());
try {
FileUtils.forceDelete(new File(irisData));
} catch (IOException e) {
e.printStackTrace();
}
}
use of org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator in project deeplearning4j by deeplearning4j.
the class TestParallelEarlyStoppingUI method testParallelStatsListenerCompatibility.
@Test
//To be run manually
@Ignore
public void testParallelStatsListenerCompatibility() throws Exception {
UIServer uiServer = UIServer.getInstance();
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1).updater(Updater.SGD).weightInit(WeightInit.XAVIER).list().layer(0, new DenseLayer.Builder().nIn(4).nOut(3).build()).layer(1, new OutputLayer.Builder().nIn(3).nOut(3).lossFunction(LossFunctions.LossFunction.MCXENT).build()).pretrain(false).backprop(true).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
// it's important that the UI can report results from parallel training
// there's potential for StatsListener to fail if certain properties aren't set in the model
StatsStorage statsStorage = new InMemoryStatsStorage();
net.setListeners(new StatsListener(statsStorage));
uiServer.attach(statsStorage);
DataSetIterator irisIter = new IrisDataSetIterator(50, 500);
EarlyStoppingModelSaver<MultiLayerNetwork> saver = new InMemoryModelSaver<>();
EarlyStoppingConfiguration<MultiLayerNetwork> esConf = new EarlyStoppingConfiguration.Builder<MultiLayerNetwork>().epochTerminationConditions(new MaxEpochsTerminationCondition(500)).scoreCalculator(new DataSetLossCalculator(irisIter, true)).evaluateEveryNEpochs(2).modelSaver(saver).build();
IEarlyStoppingTrainer<MultiLayerNetwork> trainer = new EarlyStoppingParallelTrainer<>(esConf, net, irisIter, null, 3, 6, 2);
EarlyStoppingResult<MultiLayerNetwork> result = trainer.fit();
System.out.println(result);
assertEquals(EarlyStoppingResult.TerminationReason.EpochTerminationCondition, result.getTerminationReason());
}
use of org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator in project deeplearning4j by deeplearning4j.
the class TestPreProcessedData method testPreprocessedDataCompGraphDataSet.
@Test
public void testPreprocessedDataCompGraphDataSet() {
//Test _loading_ of preprocessed DataSet data
int dataSetObjSize = 5;
int batchSizePerExecutor = 10;
String path = FilenameUtils.concat(System.getProperty("java.io.tmpdir"), "dl4j_testpreprocdata2");
File f = new File(path);
if (f.exists())
f.delete();
f.mkdir();
DataSetIterator iter = new IrisDataSetIterator(5, 150);
int i = 0;
while (iter.hasNext()) {
File f2 = new File(FilenameUtils.concat(path, "data" + (i++) + ".bin"));
iter.next().save(f2);
}
ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().updater(Updater.RMSPROP).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1).graphBuilder().addInputs("in").addLayer("0", new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(4).nOut(3).activation(Activation.TANH).build(), "in").addLayer("1", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(3).nOut(3).activation(Activation.SOFTMAX).build(), "0").setOutputs("1").pretrain(false).backprop(true).build();
SparkComputationGraph sparkNet = new SparkComputationGraph(sc, conf, new ParameterAveragingTrainingMaster.Builder(numExecutors(), dataSetObjSize).batchSizePerWorker(batchSizePerExecutor).averagingFrequency(1).repartionData(Repartition.Always).build());
sparkNet.setCollectTrainingStats(true);
sparkNet.fit("file:///" + path.replaceAll("\\\\", "/"));
SparkTrainingStats sts = sparkNet.getSparkTrainingStats();
//4 'fits' per averaging (4 executors, 1 averaging freq); 10 examples each -> 40 examples per fit. 150/40 = 3 averagings (round down); 3*4 = 12
int expNumFits = 12;
//Unfortunately: perfect partitioning isn't guaranteed by SparkUtils.balancedRandomSplit (esp. if original partitions are all size 1
// which appears to be occurring at least some of the time), but we should get close to what we expect...
assertTrue(Math.abs(expNumFits - sts.getValue("ParameterAveragingWorkerFitTimesMs").size()) < 3);
assertEquals(3, sts.getValue("ParameterAveragingMasterMapPartitionsTimesMs").size());
}
Aggregations