Search in sources :

Example 6 with NeuralNetConfiguration

use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.

the class LocalResponseTest method doBefore.

@Before
public void doBefore() {
    NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().gradientNormalization(GradientNormalization.RenormalizeL2PerLayer).seed(123).layer(new LocalResponseNormalization.Builder().k(2).n(5).alpha(1e-4).beta(0.75).build()).build();
    layer = new LocalResponseNormalization().instantiate(conf, null, 0, null, false);
    activationsActual = layer.activate(x);
}
Also used : LocalResponseNormalization(org.deeplearning4j.nn.conf.layers.LocalResponseNormalization) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) Before(org.junit.Before)

Example 7 with NeuralNetConfiguration

use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.

the class ConvolutionLayerTest method getCNNConfig.

//////////////////////////////////////////////////////////////////////////////////
private static Layer getCNNConfig(int nIn, int nOut, int[] kernelSize, int[] stride, int[] padding) {
    ConvolutionLayer layer = new ConvolutionLayer.Builder(kernelSize, stride, padding).nIn(nIn).nOut(nOut).activation(Activation.SIGMOID).build();
    NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().iterations(1).layer(layer).build();
    int numParams = conf.getLayer().initializer().numParams(conf);
    INDArray params = Nd4j.create(1, numParams);
    return conf.getLayer().instantiate(conf, null, 0, params, true);
}
Also used : INDArray(org.nd4j.linalg.api.ndarray.INDArray) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) ConvolutionLayer(org.deeplearning4j.nn.conf.layers.ConvolutionLayer)

Example 8 with NeuralNetConfiguration

use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.

the class RBMTests method testMnist.

@Test
public void testMnist() throws Exception {
    MnistDataFetcher fetcher = new MnistDataFetcher(true);
    Nd4j.ENFORCE_NUMERICAL_STABILITY = true;
    NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().iterations(30).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).learningRate(1e-1f).layer(new org.deeplearning4j.nn.conf.layers.RBM.Builder().nIn(784).nOut(600).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(1, 1e-5)).lossFunction(LossFunctions.LossFunction.RECONSTRUCTION_CROSSENTROPY).build()).build();
    conf.setPretrain(true);
    org.deeplearning4j.nn.conf.layers.RBM layerConf = (org.deeplearning4j.nn.conf.layers.RBM) conf.getLayer();
    fetcher.fetch(10);
    DataSet d2 = fetcher.next();
    org.nd4j.linalg.api.rng.distribution.Distribution dist = Nd4j.getDistributions().createNormal(1, 1e-5);
    System.out.println(dist.sample(new int[] { layerConf.getNIn(), layerConf.getNOut() }));
    INDArray input = d2.getFeatureMatrix();
    int numParams = conf.getLayer().initializer().numParams(conf);
    INDArray params = Nd4j.create(1, numParams);
    RBM rbm = (RBM) conf.getLayer().instantiate(conf, null, 0, params, true);
    rbm.fit(input);
}
Also used : Distribution(org.nd4j.linalg.api.rng.distribution.Distribution) DataSet(org.nd4j.linalg.dataset.DataSet) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) MnistDataFetcher(org.deeplearning4j.datasets.fetchers.MnistDataFetcher) INDArray(org.nd4j.linalg.api.ndarray.INDArray) NormalDistribution(org.deeplearning4j.nn.conf.distribution.NormalDistribution) Test(org.junit.Test)

Example 9 with NeuralNetConfiguration

use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.

the class TestCustomLayers method testCustomOutputLayerMLN.

@Test
public void testCustomOutputLayerMLN() {
    //First: Ensure that the CustomOutputLayer class is registered
    ObjectMapper mapper = NeuralNetConfiguration.mapper();
    AnnotatedClass ac = AnnotatedClass.construct(Layer.class, mapper.getSerializationConfig().getAnnotationIntrospector(), null);
    Collection<NamedType> types = mapper.getSubtypeResolver().collectAndResolveSubtypes(ac, mapper.getSerializationConfig(), mapper.getSerializationConfig().getAnnotationIntrospector());
    Set<Class<?>> registeredSubtypes = new HashSet<>();
    boolean found = false;
    for (NamedType nt : types) {
        System.out.println(nt);
        //            registeredSubtypes.add(nt.getType());
        if (nt.getType() == CustomOutputLayer.class)
            found = true;
    }
    assertTrue("CustomOutputLayer: not registered with NeuralNetConfiguration mapper", found);
    //Second: let's create a MultiLayerCofiguration with one, and check JSON and YAML config actually works...
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).learningRate(0.1).list().layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(1, new CustomOutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(10).nOut(10).build()).pretrain(false).backprop(true).build();
    String json = conf.toJson();
    String yaml = conf.toYaml();
    System.out.println(json);
    MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json);
    assertEquals(conf, confFromJson);
    MultiLayerConfiguration confFromYaml = MultiLayerConfiguration.fromYaml(yaml);
    assertEquals(conf, confFromYaml);
    //Third: check initialization
    Nd4j.getRandom().setSeed(12345);
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();
    assertTrue(net.getLayer(1) instanceof CustomOutputLayerImpl);
    //Fourth: compare to an equivalent standard output layer (should be identical)
    MultiLayerConfiguration conf2 = new NeuralNetConfiguration.Builder().seed(12345).learningRate(0.1).weightInit(WeightInit.XAVIER).list().layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(10).nOut(10).build()).pretrain(false).backprop(true).build();
    Nd4j.getRandom().setSeed(12345);
    MultiLayerNetwork net2 = new MultiLayerNetwork(conf2);
    net2.init();
    assertEquals(net2.params(), net.params());
    INDArray testFeatures = Nd4j.rand(1, 10);
    INDArray testLabels = Nd4j.zeros(1, 10);
    testLabels.putScalar(0, 3, 1.0);
    DataSet ds = new DataSet(testFeatures, testLabels);
    assertEquals(net2.output(testFeatures), net.output(testFeatures));
    assertEquals(net2.score(ds), net.score(ds), 1e-6);
}
Also used : OutputLayer(org.deeplearning4j.nn.conf.layers.OutputLayer) CustomOutputLayer(org.deeplearning4j.nn.layers.custom.testclasses.CustomOutputLayer) DataSet(org.nd4j.linalg.dataset.DataSet) NamedType(org.nd4j.shade.jackson.databind.jsontype.NamedType) CustomOutputLayerImpl(org.deeplearning4j.nn.layers.custom.testclasses.CustomOutputLayerImpl) CustomOutputLayer(org.deeplearning4j.nn.layers.custom.testclasses.CustomOutputLayer) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) INDArray(org.nd4j.linalg.api.ndarray.INDArray) AnnotatedClass(org.nd4j.shade.jackson.databind.introspect.AnnotatedClass) AnnotatedClass(org.nd4j.shade.jackson.databind.introspect.AnnotatedClass) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) ObjectMapper(org.nd4j.shade.jackson.databind.ObjectMapper) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 10 with NeuralNetConfiguration

use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.

the class AutoEncoderTest method testAutoEncoderBiasInit.

@Test
public void testAutoEncoderBiasInit() {
    org.deeplearning4j.nn.conf.layers.AutoEncoder build = new org.deeplearning4j.nn.conf.layers.AutoEncoder.Builder().nIn(1).nOut(3).biasInit(1).build();
    NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().layer(build).build();
    //        int numParams = LayerFactories.getFactory(conf).initializer().numParams(conf,true);
    int numParams = conf.getLayer().initializer().numParams(conf);
    INDArray params = Nd4j.create(1, numParams);
    Layer layer = conf.getLayer().instantiate(conf, null, 0, params, true);
    assertEquals(1, layer.getParam("b").size(0));
}
Also used : INDArray(org.nd4j.linalg.api.ndarray.INDArray) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) Layer(org.deeplearning4j.nn.api.Layer) Test(org.junit.Test)

Aggregations

NeuralNetConfiguration (org.deeplearning4j.nn.conf.NeuralNetConfiguration)83 INDArray (org.nd4j.linalg.api.ndarray.INDArray)65 Test (org.junit.Test)55 Layer (org.deeplearning4j.nn.api.Layer)29 Gradient (org.deeplearning4j.nn.gradient.Gradient)26 DenseLayer (org.deeplearning4j.nn.conf.layers.DenseLayer)24 Updater (org.deeplearning4j.nn.api.Updater)22 OutputLayer (org.deeplearning4j.nn.conf.layers.OutputLayer)21 DefaultGradient (org.deeplearning4j.nn.gradient.DefaultGradient)21 DataSet (org.nd4j.linalg.dataset.DataSet)14 MultiLayerConfiguration (org.deeplearning4j.nn.conf.MultiLayerConfiguration)11 ScoreIterationListener (org.deeplearning4j.optimize.listeners.ScoreIterationListener)9 MultiLayerNetwork (org.deeplearning4j.nn.multilayer.MultiLayerNetwork)8 IrisDataSetIterator (org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator)6 UniformDistribution (org.deeplearning4j.nn.conf.distribution.UniformDistribution)6 RnnOutputLayer (org.deeplearning4j.nn.layers.recurrent.RnnOutputLayer)6 MnistDataFetcher (org.deeplearning4j.datasets.fetchers.MnistDataFetcher)4 Evaluation (org.deeplearning4j.eval.Evaluation)4 Model (org.deeplearning4j.nn.api.Model)4 ConvolutionLayer (org.deeplearning4j.nn.conf.layers.ConvolutionLayer)4