use of org.deeplearning4j.nn.conf.MultiLayerConfiguration in project deeplearning4j by deeplearning4j.
the class RBMTests method getMultiLayerRBMNet.
private static MultiLayerNetwork getMultiLayerRBMNet(boolean backprop, boolean pretrain, INDArray input, int nOut1, int nOut2, int nOut3, WeightInit weightInit) {
MultiLayerConfiguration rbm = new NeuralNetConfiguration.Builder().seed(0xDEADBEEF).iterations(1000).biasInit(0).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(Updater.NONE).epsilon(1).weightInit(weightInit).list(new org.deeplearning4j.nn.conf.layers.RBM.Builder().lossFunction(LossFunctions.LossFunction.KL_DIVERGENCE).nOut(nOut1).build(), new org.deeplearning4j.nn.conf.layers.RBM.Builder().lossFunction(LossFunctions.LossFunction.KL_DIVERGENCE).nOut(nOut2).build(), new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.RELU).nOut(nOut3).build()).pretrain(pretrain).backprop(backprop).setInputType(InputType.feedForward(input.columns())).build();
MultiLayerNetwork network = new MultiLayerNetwork(rbm);
network.init();
return network;
}
use of org.deeplearning4j.nn.conf.MultiLayerConfiguration in project deeplearning4j by deeplearning4j.
the class TestCustomLayers method testCustomOutputLayerMLN.
@Test
public void testCustomOutputLayerMLN() {
//First: Ensure that the CustomOutputLayer class is registered
ObjectMapper mapper = NeuralNetConfiguration.mapper();
AnnotatedClass ac = AnnotatedClass.construct(Layer.class, mapper.getSerializationConfig().getAnnotationIntrospector(), null);
Collection<NamedType> types = mapper.getSubtypeResolver().collectAndResolveSubtypes(ac, mapper.getSerializationConfig(), mapper.getSerializationConfig().getAnnotationIntrospector());
Set<Class<?>> registeredSubtypes = new HashSet<>();
boolean found = false;
for (NamedType nt : types) {
System.out.println(nt);
// registeredSubtypes.add(nt.getType());
if (nt.getType() == CustomOutputLayer.class)
found = true;
}
assertTrue("CustomOutputLayer: not registered with NeuralNetConfiguration mapper", found);
//Second: let's create a MultiLayerCofiguration with one, and check JSON and YAML config actually works...
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).learningRate(0.1).list().layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(1, new CustomOutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(10).nOut(10).build()).pretrain(false).backprop(true).build();
String json = conf.toJson();
String yaml = conf.toYaml();
System.out.println(json);
MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json);
assertEquals(conf, confFromJson);
MultiLayerConfiguration confFromYaml = MultiLayerConfiguration.fromYaml(yaml);
assertEquals(conf, confFromYaml);
//Third: check initialization
Nd4j.getRandom().setSeed(12345);
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
assertTrue(net.getLayer(1) instanceof CustomOutputLayerImpl);
//Fourth: compare to an equivalent standard output layer (should be identical)
MultiLayerConfiguration conf2 = new NeuralNetConfiguration.Builder().seed(12345).learningRate(0.1).weightInit(WeightInit.XAVIER).list().layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(10).nOut(10).build()).pretrain(false).backprop(true).build();
Nd4j.getRandom().setSeed(12345);
MultiLayerNetwork net2 = new MultiLayerNetwork(conf2);
net2.init();
assertEquals(net2.params(), net.params());
INDArray testFeatures = Nd4j.rand(1, 10);
INDArray testLabels = Nd4j.zeros(1, 10);
testLabels.putScalar(0, 3, 1.0);
DataSet ds = new DataSet(testFeatures, testLabels);
assertEquals(net2.output(testFeatures), net.output(testFeatures));
assertEquals(net2.score(ds), net.score(ds), 1e-6);
}
use of org.deeplearning4j.nn.conf.MultiLayerConfiguration in project deeplearning4j by deeplearning4j.
the class TestCustomLayers method checkInitializationFF.
@Test
public void checkInitializationFF() {
//Actually create a network with a custom layer; check initialization and forward pass
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().learningRate(0.1).list().layer(0, new DenseLayer.Builder().nIn(9).nOut(10).build()).layer(1, //hard-coded nIn/nOut of 10
new CustomLayer(3.14159)).layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(10).nOut(11).build()).pretrain(false).backprop(true).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
assertEquals(9 * 10 + 10, net.getLayer(0).numParams());
assertEquals(10 * 10 + 10, net.getLayer(1).numParams());
assertEquals(10 * 11 + 11, net.getLayer(2).numParams());
//Check for exceptions...
net.output(Nd4j.rand(1, 9));
net.fit(new DataSet(Nd4j.rand(1, 9), Nd4j.rand(1, 11)));
}
use of org.deeplearning4j.nn.conf.MultiLayerConfiguration in project deeplearning4j by deeplearning4j.
the class TestCustomLayers method testJsonMultiLayerNetwork.
@Test
public void testJsonMultiLayerNetwork() {
//First: Ensure that the CustomLayer class is registered
ObjectMapper mapper = NeuralNetConfiguration.mapper();
AnnotatedClass ac = AnnotatedClass.construct(Layer.class, mapper.getSerializationConfig().getAnnotationIntrospector(), null);
Collection<NamedType> types = mapper.getSubtypeResolver().collectAndResolveSubtypes(ac, mapper.getSerializationConfig(), mapper.getSerializationConfig().getAnnotationIntrospector());
Set<Class<?>> registeredSubtypes = new HashSet<>();
boolean found = false;
for (NamedType nt : types) {
System.out.println(nt);
// registeredSubtypes.add(nt.getType());
if (nt.getType() == CustomLayer.class)
found = true;
}
assertTrue("CustomLayer: not registered with NeuralNetConfiguration mapper", found);
//Second: let's create a MultiLayerCofiguration with one, and check JSON and YAML config actually works...
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().learningRate(0.1).list().layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(1, new CustomLayer(3.14159)).layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(10).nOut(10).build()).pretrain(false).backprop(true).build();
String json = conf.toJson();
String yaml = conf.toYaml();
System.out.println(json);
MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json);
assertEquals(conf, confFromJson);
MultiLayerConfiguration confFromYaml = MultiLayerConfiguration.fromYaml(yaml);
assertEquals(conf, confFromYaml);
}
use of org.deeplearning4j.nn.conf.MultiLayerConfiguration in project deeplearning4j by deeplearning4j.
the class OutputLayerTest method testOutputLayersRnnForwardPass.
@Test
public void testOutputLayersRnnForwardPass() {
//Test output layer with RNNs (
//Expect all outputs etc. to be 2d
int nIn = 2;
int nOut = 5;
int layerSize = 4;
int timeSeriesLength = 6;
int miniBatchSize = 3;
Random r = new Random(12345L);
INDArray input = Nd4j.zeros(miniBatchSize, nIn, timeSeriesLength);
for (int i = 0; i < miniBatchSize; i++) {
for (int j = 0; j < nIn; j++) {
for (int k = 0; k < timeSeriesLength; k++) {
input.putScalar(new int[] { i, j, k }, r.nextDouble() - 0.5);
}
}
}
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345L).list().layer(0, new GravesLSTM.Builder().nIn(nIn).nOut(layerSize).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1)).activation(Activation.TANH).updater(Updater.NONE).build()).layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1)).updater(Updater.NONE).build()).inputPreProcessor(1, new RnnToFeedForwardPreProcessor()).build();
MultiLayerNetwork mln = new MultiLayerNetwork(conf);
mln.init();
INDArray out2d = mln.feedForward(input).get(2);
assertArrayEquals(out2d.shape(), new int[] { miniBatchSize * timeSeriesLength, nOut });
INDArray out = mln.output(input);
assertArrayEquals(out.shape(), new int[] { miniBatchSize * timeSeriesLength, nOut });
INDArray act = mln.activate();
assertArrayEquals(act.shape(), new int[] { miniBatchSize * timeSeriesLength, nOut });
INDArray preout = mln.preOutput(input);
assertArrayEquals(preout.shape(), new int[] { miniBatchSize * timeSeriesLength, nOut });
//As above, but for RnnOutputLayer. Expect all activations etc. to be 3d
MultiLayerConfiguration confRnn = new NeuralNetConfiguration.Builder().seed(12345L).list().layer(0, new GravesLSTM.Builder().nIn(nIn).nOut(layerSize).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1)).activation(Activation.TANH).updater(Updater.NONE).build()).layer(1, new org.deeplearning4j.nn.conf.layers.RnnOutputLayer.Builder(LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1)).updater(Updater.NONE).build()).build();
MultiLayerNetwork mlnRnn = new MultiLayerNetwork(confRnn);
mln.init();
INDArray out3d = mlnRnn.feedForward(input).get(2);
assertArrayEquals(out3d.shape(), new int[] { miniBatchSize, nOut, timeSeriesLength });
INDArray outRnn = mlnRnn.output(input);
assertArrayEquals(outRnn.shape(), new int[] { miniBatchSize, nOut, timeSeriesLength });
INDArray actRnn = mlnRnn.activate();
assertArrayEquals(actRnn.shape(), new int[] { miniBatchSize, nOut, timeSeriesLength });
INDArray preoutRnn = mlnRnn.preOutput(input);
assertArrayEquals(preoutRnn.shape(), new int[] { miniBatchSize, nOut, timeSeriesLength });
}
Aggregations