use of org.nd4j.linalg.dataset.api.iterator.DataSetIterator in project deeplearning4j by deeplearning4j.
the class ManualTests method testCNNActivationsVisualization.
/**
* This test is for manual execution only, since it's here just to get working CNN and visualize it's layers
*
* @throws Exception
*/
@Test
public void testCNNActivationsVisualization() throws Exception {
final int numRows = 40;
final int numColumns = 40;
int nChannels = 3;
int outputNum = LFWLoader.NUM_LABELS;
int numSamples = LFWLoader.NUM_IMAGES;
boolean useSubset = false;
// numSamples/10;
int batchSize = 200;
int iterations = 5;
int splitTrainNum = (int) (batchSize * .8);
int seed = 123;
int listenerFreq = iterations / 5;
DataSet lfwNext;
SplitTestAndTrain trainTest;
DataSet trainInput;
List<INDArray> testInput = new ArrayList<>();
List<INDArray> testLabels = new ArrayList<>();
log.info("Load data....");
DataSetIterator lfw = new LFWDataSetIterator(batchSize, numSamples, new int[] { numRows, numColumns, nChannels }, outputNum, useSubset, true, 1.0, new Random(seed));
log.info("Build model....");
MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed).iterations(iterations).activation(Activation.RELU).weightInit(WeightInit.XAVIER).gradientNormalization(GradientNormalization.RenormalizeL2PerLayer).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).learningRate(0.01).momentum(0.9).regularization(true).updater(Updater.ADAGRAD).useDropConnect(true).list().layer(0, new ConvolutionLayer.Builder(4, 4).name("cnn1").nIn(nChannels).stride(1, 1).nOut(20).build()).layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] { 2, 2 }).name("pool1").build()).layer(2, new ConvolutionLayer.Builder(3, 3).name("cnn2").stride(1, 1).nOut(40).build()).layer(3, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] { 2, 2 }).name("pool2").build()).layer(4, new ConvolutionLayer.Builder(3, 3).name("cnn3").stride(1, 1).nOut(60).build()).layer(5, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] { 2, 2 }).name("pool3").build()).layer(6, new ConvolutionLayer.Builder(2, 2).name("cnn3").stride(1, 1).nOut(80).build()).layer(7, new DenseLayer.Builder().name("ffn1").nOut(160).dropOut(0.5).build()).layer(8, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(outputNum).activation(Activation.SOFTMAX).build()).backprop(true).pretrain(false);
new ConvolutionLayerSetup(builder, numRows, numColumns, nChannels);
MultiLayerNetwork model = new MultiLayerNetwork(builder.build());
model.init();
log.info("Train model....");
model.setListeners(Arrays.asList(new ScoreIterationListener(listenerFreq), new ConvolutionalIterationListener(listenerFreq)));
while (lfw.hasNext()) {
lfwNext = lfw.next();
lfwNext.scale();
// train set that is the result
trainTest = lfwNext.splitTestAndTrain(splitTrainNum, new Random(seed));
// get feature matrix and labels for training
trainInput = trainTest.getTrain();
testInput.add(trainTest.getTest().getFeatureMatrix());
testLabels.add(trainTest.getTest().getLabels());
model.fit(trainInput);
}
log.info("Evaluate model....");
Evaluation eval = new Evaluation(lfw.getLabels());
for (int i = 0; i < testInput.size(); i++) {
INDArray output = model.output(testInput.get(i));
eval.eval(testLabels.get(i), output);
}
INDArray output = model.output(testInput.get(0));
eval.eval(testLabels.get(0), output);
log.info(eval.stats());
log.info("****************Example finished********************");
}
use of org.nd4j.linalg.dataset.api.iterator.DataSetIterator in project deeplearning4j by deeplearning4j.
the class TestPlayUI method testUICompGraph.
@Test
@Ignore
public void testUICompGraph() throws Exception {
StatsStorage ss = new InMemoryStatsStorage();
UIServer uiServer = UIServer.getInstance();
uiServer.attach(ss);
ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().graphBuilder().addInputs("in").addLayer("L0", new DenseLayer.Builder().activation(Activation.TANH).nIn(4).nOut(4).build(), "in").addLayer("L1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(4).nOut(3).build(), "L0").pretrain(false).backprop(true).setOutputs("L1").build();
ComputationGraph net = new ComputationGraph(conf);
net.init();
net.setListeners(new StatsListener(ss), new ScoreIterationListener(1));
DataSetIterator iter = new IrisDataSetIterator(150, 150);
for (int i = 0; i < 100; i++) {
net.fit(iter);
Thread.sleep(100);
}
Thread.sleep(100000);
}
use of org.nd4j.linalg.dataset.api.iterator.DataSetIterator in project deeplearning4j by deeplearning4j.
the class TestPlayUI method testUI_VAE.
@Test
@Ignore
public void testUI_VAE() throws Exception {
//Variational autoencoder - for unsupervised layerwise pretraining
StatsStorage ss = new InMemoryStatsStorage();
UIServer uiServer = UIServer.getInstance();
uiServer.attach(ss);
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1).learningRate(1e-5).list().layer(0, new VariationalAutoencoder.Builder().nIn(4).nOut(3).encoderLayerSizes(10, 11).decoderLayerSizes(12, 13).weightInit(WeightInit.XAVIER).pzxActivationFunction("identity").reconstructionDistribution(new GaussianReconstructionDistribution()).activation(Activation.LEAKYRELU).updater(Updater.SGD).build()).layer(1, new VariationalAutoencoder.Builder().nIn(3).nOut(3).encoderLayerSizes(7).decoderLayerSizes(8).weightInit(WeightInit.XAVIER).pzxActivationFunction("identity").reconstructionDistribution(new GaussianReconstructionDistribution()).activation(Activation.LEAKYRELU).updater(Updater.SGD).build()).layer(2, new OutputLayer.Builder().nIn(3).nOut(3).build()).pretrain(true).backprop(true).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
net.setListeners(new StatsListener(ss), new ScoreIterationListener(1));
DataSetIterator iter = new IrisDataSetIterator(150, 150);
for (int i = 0; i < 50; i++) {
net.fit(iter);
Thread.sleep(100);
}
Thread.sleep(100000);
}
use of org.nd4j.linalg.dataset.api.iterator.DataSetIterator in project deeplearning4j by deeplearning4j.
the class TestAsyncIterator method testBasic.
@Test
public void testBasic() {
//Basic test. Make sure it returns the right number of elements,
// hasNext() works, etc
int size = 13;
DataSetIterator baseIter = new TestIterator(size, 0);
//async iterator with queue size of 1
DataSetIterator async = new AsyncDataSetIterator(baseIter, 1);
for (int i = 0; i < size; i++) {
assertTrue(async.hasNext());
DataSet ds = async.next();
assertEquals(ds.getFeatureMatrix().getDouble(0), i, 0.0);
assertEquals(ds.getLabels().getDouble(0), i, 0.0);
}
assertFalse(async.hasNext());
async.reset();
assertEquals(baseIter.cursor(), 0);
assertTrue(async.hasNext());
((AsyncDataSetIterator) async).shutdown();
//async iterator with queue size of 5
baseIter = new TestIterator(size, 5);
async = new AsyncDataSetIterator(baseIter, 5);
for (int i = 0; i < size; i++) {
assertTrue(async.hasNext());
DataSet ds = async.next();
assertEquals(ds.getFeatureMatrix().getDouble(0), i, 0.0);
assertEquals(ds.getLabels().getDouble(0), i, 0.0);
}
assertFalse(async.hasNext());
async.reset();
assertEquals(baseIter.cursor(), 0);
assertTrue(async.hasNext());
((AsyncDataSetIterator) async).shutdown();
//async iterator with queue size of 100
baseIter = new TestIterator(size, 100);
async = new AsyncDataSetIterator(baseIter, 100);
for (int i = 0; i < size; i++) {
assertTrue(async.hasNext());
DataSet ds = async.next();
while (ds == null) ds = async.next();
assertEquals(ds.getFeatureMatrix().getDouble(0), i, 0.0);
assertEquals(ds.getLabels().getDouble(0), i, 0.0);
}
assertFalse(async.hasNext());
async.reset();
assertEquals(baseIter.cursor(), 0);
assertTrue(async.hasNext());
((AsyncDataSetIterator) async).shutdown();
//Test iteration where performance is limited by baseIterator.next() speed
baseIter = new TestIterator(size, 1000);
async = new AsyncDataSetIterator(baseIter, 5);
for (int i = 0; i < size; i++) {
assertTrue(async.hasNext());
DataSet ds = async.next();
assertEquals(ds.getFeatureMatrix().getDouble(0), i, 0.0);
assertEquals(ds.getLabels().getDouble(0), i, 0.0);
}
assertFalse(async.hasNext());
async.reset();
assertEquals(baseIter.cursor(), 0);
assertTrue(async.hasNext());
((AsyncDataSetIterator) async).shutdown();
}
use of org.nd4j.linalg.dataset.api.iterator.DataSetIterator in project deeplearning4j by deeplearning4j.
the class TestAsyncIterator method testResetWhileBlocking.
@Test
public void testResetWhileBlocking() {
int size = 6;
//Test reset while blocking on baseIterator.next()
DataSetIterator baseIter = new TestIterator(size, 1000);
AsyncDataSetIterator async = new AsyncDataSetIterator(baseIter);
async.next();
//Should be waiting on baseIter.next()
async.reset();
for (int i = 0; i < 6; i++) {
assertTrue(async.hasNext());
DataSet ds = async.next();
assertEquals(ds.getFeatureMatrix().getDouble(0), i, 0.0);
assertEquals(ds.getLabels().getDouble(0), i, 0.0);
}
assertFalse(async.hasNext());
async.shutdown();
//Test reset while blocking on blockingQueue.put()
baseIter = new TestIterator(size, 0);
async = new AsyncDataSetIterator(baseIter);
async.next();
async.next();
//Should be waiting on blocingQueue
async.reset();
for (int i = 0; i < 6; i++) {
assertTrue(async.hasNext());
DataSet ds = async.next();
assertEquals(ds.getFeatureMatrix().getDouble(0), i, 0.0);
assertEquals(ds.getLabels().getDouble(0), i, 0.0);
}
assertFalse(async.hasNext());
async.shutdown();
}
Aggregations