use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.
the class LocalResponseTest method doBefore.
@Before
public void doBefore() {
NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().gradientNormalization(GradientNormalization.RenormalizeL2PerLayer).seed(123).layer(new LocalResponseNormalization.Builder().k(2).n(5).alpha(1e-4).beta(0.75).build()).build();
layer = new LocalResponseNormalization().instantiate(conf, null, 0, null, false);
activationsActual = layer.activate(x);
}
use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.
the class ConvolutionLayerTest method getCNNConfig.
//////////////////////////////////////////////////////////////////////////////////
private static Layer getCNNConfig(int nIn, int nOut, int[] kernelSize, int[] stride, int[] padding) {
ConvolutionLayer layer = new ConvolutionLayer.Builder(kernelSize, stride, padding).nIn(nIn).nOut(nOut).activation(Activation.SIGMOID).build();
NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().iterations(1).layer(layer).build();
int numParams = conf.getLayer().initializer().numParams(conf);
INDArray params = Nd4j.create(1, numParams);
return conf.getLayer().instantiate(conf, null, 0, params, true);
}
use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.
the class RBMTests method testMnist.
@Test
public void testMnist() throws Exception {
MnistDataFetcher fetcher = new MnistDataFetcher(true);
Nd4j.ENFORCE_NUMERICAL_STABILITY = true;
NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().iterations(30).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).learningRate(1e-1f).layer(new org.deeplearning4j.nn.conf.layers.RBM.Builder().nIn(784).nOut(600).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(1, 1e-5)).lossFunction(LossFunctions.LossFunction.RECONSTRUCTION_CROSSENTROPY).build()).build();
conf.setPretrain(true);
org.deeplearning4j.nn.conf.layers.RBM layerConf = (org.deeplearning4j.nn.conf.layers.RBM) conf.getLayer();
fetcher.fetch(10);
DataSet d2 = fetcher.next();
org.nd4j.linalg.api.rng.distribution.Distribution dist = Nd4j.getDistributions().createNormal(1, 1e-5);
System.out.println(dist.sample(new int[] { layerConf.getNIn(), layerConf.getNOut() }));
INDArray input = d2.getFeatureMatrix();
int numParams = conf.getLayer().initializer().numParams(conf);
INDArray params = Nd4j.create(1, numParams);
RBM rbm = (RBM) conf.getLayer().instantiate(conf, null, 0, params, true);
rbm.fit(input);
}
use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.
the class TestCustomLayers method testCustomOutputLayerMLN.
@Test
public void testCustomOutputLayerMLN() {
//First: Ensure that the CustomOutputLayer class is registered
ObjectMapper mapper = NeuralNetConfiguration.mapper();
AnnotatedClass ac = AnnotatedClass.construct(Layer.class, mapper.getSerializationConfig().getAnnotationIntrospector(), null);
Collection<NamedType> types = mapper.getSubtypeResolver().collectAndResolveSubtypes(ac, mapper.getSerializationConfig(), mapper.getSerializationConfig().getAnnotationIntrospector());
Set<Class<?>> registeredSubtypes = new HashSet<>();
boolean found = false;
for (NamedType nt : types) {
System.out.println(nt);
// registeredSubtypes.add(nt.getType());
if (nt.getType() == CustomOutputLayer.class)
found = true;
}
assertTrue("CustomOutputLayer: not registered with NeuralNetConfiguration mapper", found);
//Second: let's create a MultiLayerCofiguration with one, and check JSON and YAML config actually works...
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).learningRate(0.1).list().layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(1, new CustomOutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(10).nOut(10).build()).pretrain(false).backprop(true).build();
String json = conf.toJson();
String yaml = conf.toYaml();
System.out.println(json);
MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json);
assertEquals(conf, confFromJson);
MultiLayerConfiguration confFromYaml = MultiLayerConfiguration.fromYaml(yaml);
assertEquals(conf, confFromYaml);
//Third: check initialization
Nd4j.getRandom().setSeed(12345);
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
assertTrue(net.getLayer(1) instanceof CustomOutputLayerImpl);
//Fourth: compare to an equivalent standard output layer (should be identical)
MultiLayerConfiguration conf2 = new NeuralNetConfiguration.Builder().seed(12345).learningRate(0.1).weightInit(WeightInit.XAVIER).list().layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(10).nOut(10).build()).pretrain(false).backprop(true).build();
Nd4j.getRandom().setSeed(12345);
MultiLayerNetwork net2 = new MultiLayerNetwork(conf2);
net2.init();
assertEquals(net2.params(), net.params());
INDArray testFeatures = Nd4j.rand(1, 10);
INDArray testLabels = Nd4j.zeros(1, 10);
testLabels.putScalar(0, 3, 1.0);
DataSet ds = new DataSet(testFeatures, testLabels);
assertEquals(net2.output(testFeatures), net.output(testFeatures));
assertEquals(net2.score(ds), net.score(ds), 1e-6);
}
use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.
the class AutoEncoderTest method testAutoEncoderBiasInit.
@Test
public void testAutoEncoderBiasInit() {
org.deeplearning4j.nn.conf.layers.AutoEncoder build = new org.deeplearning4j.nn.conf.layers.AutoEncoder.Builder().nIn(1).nOut(3).biasInit(1).build();
NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().layer(build).build();
// int numParams = LayerFactories.getFactory(conf).initializer().numParams(conf,true);
int numParams = conf.getLayer().initializer().numParams(conf);
INDArray params = Nd4j.create(1, numParams);
Layer layer = conf.getLayer().instantiate(conf, null, 0, params, true);
assertEquals(1, layer.getParam("b").size(0));
}
Aggregations