use of org.deeplearning4j.datasets.iterator.impl.CifarDataSetIterator in project deeplearning4j by deeplearning4j.
the class DataSetIteratorTest method testCifarIterator.
@Test
public void testCifarIterator() throws Exception {
int numExamples = 1;
int row = 28;
int col = 28;
int channels = 1;
CifarDataSetIterator iter = new CifarDataSetIterator(numExamples, numExamples, new int[] { row, col, channels });
assertTrue(iter.hasNext());
DataSet data = iter.next();
assertEquals(numExamples, data.getLabels().size(0));
assertEquals(channels * row * col, data.getFeatureMatrix().ravel().length());
}
use of org.deeplearning4j.datasets.iterator.impl.CifarDataSetIterator in project deeplearning4j by deeplearning4j.
the class DataSetIteratorTest method runCifar.
public void runCifar(boolean preProcessCifar) throws Exception {
final int height = 32;
final int width = 32;
int channels = 3;
int outputNum = CifarLoader.NUM_LABELS;
int numSamples = 10;
int batchSize = 5;
int iterations = 1;
int seed = 123;
int listenerFreq = iterations;
CifarDataSetIterator cifar = new CifarDataSetIterator(batchSize, numSamples, new int[] { height, width, channels }, preProcessCifar, true);
MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed).iterations(iterations).gradientNormalization(GradientNormalization.RenormalizeL2PerLayer).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list().layer(0, new ConvolutionLayer.Builder(5, 5).nIn(channels).nOut(6).weightInit(WeightInit.XAVIER).activation(Activation.RELU).build()).layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] { 2, 2 }).build()).layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).build()).backprop(true).pretrain(false).setInputType(InputType.convolutionalFlat(height, width, channels));
MultiLayerNetwork model = new MultiLayerNetwork(builder.build());
model.init();
model.setListeners(Arrays.asList((IterationListener) new ScoreIterationListener(listenerFreq)));
model.fit(cifar);
cifar.test(10);
Evaluation eval = new Evaluation(cifar.getLabels());
while (cifar.hasNext()) {
DataSet testDS = cifar.next(batchSize);
INDArray output = model.output(testDS.getFeatureMatrix());
eval.eval(testDS.getLabels(), output);
}
System.out.println(eval.stats(true));
}
use of org.deeplearning4j.datasets.iterator.impl.CifarDataSetIterator in project deeplearning4j by deeplearning4j.
the class MultipleEpochsIteratorTest method testCifarDataSetIteratorReset.
// use when checking cifar dataset iterator
@Ignore
@Test
public void testCifarDataSetIteratorReset() {
int epochs = 2;
Nd4j.getRandom().setSeed(12345);
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().regularization(false).learningRate(1.0).weightInit(WeightInit.XAVIER).seed(12345L).list().layer(0, new DenseLayer.Builder().nIn(400).nOut(50).activation(Activation.RELU).build()).layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(50).nOut(10).build()).pretrain(false).backprop(true).inputPreProcessor(0, new CnnToFeedForwardPreProcessor(20, 20, 1)).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
net.setListeners(new ScoreIterationListener(1));
MultipleEpochsIterator ds = new MultipleEpochsIterator(epochs, new CifarDataSetIterator(10, 20, new int[] { 20, 20, 1 }));
net.fit(ds);
assertEquals(epochs, ds.epochs);
assertEquals(2, ds.batch);
}
Aggregations