use of org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator in project deeplearning4j by deeplearning4j.
the class ConvolutionLayerTest method testTwdFirstLayer.
@Test
public void testTwdFirstLayer() throws Exception {
MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(123).iterations(5).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).l2(2e-4).regularization(true).momentum(0.9).updater(Updater.NESTEROVS).useDropConnect(true).dropOut(0.5).list().layer(0, //16 filters kernel size 8 stride 4
new ConvolutionLayer.Builder(8, 8).stride(4, 4).nOut(16).dropOut(0.5).activation(Activation.RELU).weightInit(WeightInit.XAVIER).build()).layer(1, //32 filters kernel size 4 stride 2
new ConvolutionLayer.Builder(4, 4).stride(2, 2).nOut(32).dropOut(0.5).activation(Activation.RELU).weightInit(WeightInit.XAVIER).build()).layer(2, //fully connected with 256 rectified units
new DenseLayer.Builder().nOut(256).activation(Activation.RELU).weightInit(WeightInit.XAVIER).dropOut(0.5).build()).layer(3, //output layer
new OutputLayer.Builder(LossFunctions.LossFunction.SQUARED_LOSS).nOut(10).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).build()).setInputType(InputType.convolutionalFlat(28, 28, 1)).backprop(true).pretrain(false);
DataSetIterator iter = new MnistDataSetIterator(10, 10);
MultiLayerConfiguration conf = builder.build();
MultiLayerNetwork network = new MultiLayerNetwork(conf);
network.init();
network.fit(iter.next());
}
use of org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator in project deeplearning4j by deeplearning4j.
the class ConvolutionLayerTest method testCNNMLNBackprop.
@Test
public void testCNNMLNBackprop() throws Exception {
int numSamples = 10;
int batchSize = 10;
DataSetIterator mnistIter = new MnistDataSetIterator(batchSize, numSamples, true);
MultiLayerNetwork model = getCNNMLNConfig(true, false);
model.fit(mnistIter);
MultiLayerNetwork model2 = getCNNMLNConfig(true, false);
model2.fit(mnistIter);
DataSet test = mnistIter.next();
Evaluation eval = new Evaluation();
INDArray output = model.output(test.getFeatureMatrix());
eval.eval(test.getLabels(), output);
double f1Score = eval.f1();
Evaluation eval2 = new Evaluation();
INDArray output2 = model2.output(test.getFeatureMatrix());
eval2.eval(test.getLabels(), output2);
double f1Score2 = eval2.f1();
assertEquals(f1Score, f1Score2, 1e-4);
}
use of org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator in project deeplearning4j by deeplearning4j.
the class BatchNormalizationTest method testGradientAndUpdaters.
@Test
public void testGradientAndUpdaters() throws Exception {
//Global mean/variance are part of the parameter vector. Expect 0 gradient, and no-op updater for these
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1).updater(Updater.RMSPROP).seed(12345).list().layer(0, new ConvolutionLayer.Builder().nIn(1).nOut(6).weightInit(WeightInit.XAVIER).activation(Activation.IDENTITY).build()).layer(1, new BatchNormalization.Builder().build()).layer(2, new ActivationLayer.Builder().activation(Activation.LEAKYRELU).build()).layer(3, new DenseLayer.Builder().nOut(10).activation(Activation.LEAKYRELU).build()).layer(4, new BatchNormalization.Builder().build()).layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).nOut(10).build()).backprop(true).pretrain(false).setInputType(InputType.convolutionalFlat(28, 28, 1)).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
DataSetIterator iter = new MnistDataSetIterator(16, true, 12345);
DataSet ds = iter.next();
net.setInput(ds.getFeatures());
net.setLabels(ds.getLabels());
net.computeGradientAndScore();
Gradient g = net.gradient();
Map<String, INDArray> map = g.gradientForVariable();
for (String s : map.keySet()) {
INDArray grad = map.get(s);
if (s.endsWith(BatchNormalizationParamInitializer.GLOBAL_MEAN) || s.endsWith(BatchNormalizationParamInitializer.GLOBAL_VAR)) {
assertEquals(Nd4j.zeros(grad.shape()), grad);
}
}
org.deeplearning4j.nn.api.Updater u = net.getUpdater();
Field f = MultiLayerUpdater.class.getDeclaredField("layerUpdaters");
f.setAccessible(true);
org.deeplearning4j.nn.api.Updater[] updaters = (org.deeplearning4j.nn.api.Updater[]) f.get(u);
assertNotNull(updaters);
assertEquals(6, updaters.length);
for (int i = 0; i <= 5; i++) {
LayerUpdater lu = (LayerUpdater) updaters[i];
Map<String, GradientUpdater> guMap = lu.getUpdaterForVariable();
for (Map.Entry<String, GradientUpdater> entry : guMap.entrySet()) {
if (i == 1 || i == 4) {
String param = entry.getKey();
if (BatchNormalizationParamInitializer.GLOBAL_MEAN.equals(param) || BatchNormalizationParamInitializer.GLOBAL_VAR.equals(param)) {
assertTrue(entry.getValue() instanceof NoOpUpdater);
} else {
assertTrue(entry.getValue() instanceof RmsProp);
}
} else {
assertTrue(entry.getValue() instanceof RmsProp);
}
}
}
}
use of org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator in project deeplearning4j by deeplearning4j.
the class BatchNormalizationTest method testCNNBNActivationCombo.
@Test
public void testCNNBNActivationCombo() throws Exception {
DataSetIterator iter = new MnistDataSetIterator(2, 2);
DataSet next = iter.next();
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(2).seed(123).list().layer(0, new ConvolutionLayer.Builder().nIn(1).nOut(6).weightInit(WeightInit.XAVIER).activation(Activation.IDENTITY).build()).layer(1, new BatchNormalization.Builder().build()).layer(2, new ActivationLayer.Builder().activation(Activation.RELU).build()).layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).nOut(10).build()).backprop(true).pretrain(false).setInputType(InputType.convolutionalFlat(28, 28, 1)).build();
MultiLayerNetwork network = new MultiLayerNetwork(conf);
network.init();
network.fit(next);
assertNotEquals(null, network.getLayer(0).getParam("W"));
assertNotEquals(null, network.getLayer(0).getParam("b"));
}
use of org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator in project deeplearning4j by deeplearning4j.
the class BatchNormalizationTest method testDBNBNMultiLayer.
@Test
public void testDBNBNMultiLayer() throws Exception {
DataSetIterator iter = new MnistDataSetIterator(2, 2);
DataSet next = iter.next();
// Run with separate activation layer
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(2).seed(123).list().layer(0, new DenseLayer.Builder().nIn(28 * 28).nOut(10).weightInit(WeightInit.XAVIER).activation(Activation.RELU).build()).layer(1, new BatchNormalization.Builder().nOut(10).build()).layer(2, new ActivationLayer.Builder().activation(Activation.RELU).build()).layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).nIn(10).nOut(10).build()).backprop(true).pretrain(false).build();
MultiLayerNetwork network = new MultiLayerNetwork(conf);
network.init();
network.setInput(next.getFeatureMatrix());
INDArray activationsActual = network.preOutput(next.getFeatureMatrix());
assertEquals(10, activationsActual.shape()[1], 1e-2);
network.fit(next);
INDArray actualGammaParam = network.getLayer(1).getParam(BatchNormalizationParamInitializer.GAMMA);
INDArray actualBetaParam = network.getLayer(1).getParam(BatchNormalizationParamInitializer.BETA);
assertTrue(actualGammaParam != null);
assertTrue(actualBetaParam != null);
}
Aggregations