Search in sources :

Example 61 with DataSet

use of org.nd4j.linalg.dataset.DataSet in project deeplearning4j by deeplearning4j.

the class TestUpdaters method testUpdaters.

@Test
public void testUpdaters() throws Exception {
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(Updater.NESTEROVS).momentum(0.9).graphBuilder().addInputs(// 40x40x1
    "input").addLayer("l0_cnn", new ConvolutionLayer.Builder(new int[] { 3, 3 }, new int[] { 1, 1 }, new int[] { 1, 1 }).nOut(100).build(), // out: 40x40x100
    "input").addLayer("l1_max", new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] { 2, 2 }, new int[] { 2, 2 }, new int[] { 1, 1 }).build(), // 21x21x100
    "l0_cnn").addLayer("l2_cnn", new ConvolutionLayer.Builder(new int[] { 3, 3 }, new int[] { 2, 2 }, new int[] { 1, 1 }).nOut(200).build(), // 11x11x200
    "l1_max").addLayer("l3_max", new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] { 3, 3 }, new int[] { 2, 2 }, new int[] { 1, 1 }).build(), // 6x6x200
    "l2_cnn").addLayer("l4_fc", new DenseLayer.Builder().nOut(1024).build(), // output: 1x1x1024
    "l3_max").addLayer("l5_out", new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(10).activation(Activation.SOFTMAX).build(), "l4_fc").setOutputs("l5_out").backprop(true).pretrain(false).setInputTypes(InputType.convolutional(40, 40, 1)).build();
    //First: check that the nIns are set properly...
    Map<String, GraphVertex> map = conf.getVertices();
    LayerVertex l0_cnn = (LayerVertex) map.get("l0_cnn");
    LayerVertex l2_cnn = (LayerVertex) map.get("l2_cnn");
    LayerVertex l4_fc = (LayerVertex) map.get("l4_fc");
    LayerVertex l5_out = (LayerVertex) map.get("l5_out");
    assertEquals(1, ((FeedForwardLayer) l0_cnn.getLayerConf().getLayer()).getNIn());
    assertEquals(100, ((FeedForwardLayer) l2_cnn.getLayerConf().getLayer()).getNIn());
    assertEquals(6 * 6 * 200, ((FeedForwardLayer) l4_fc.getLayerConf().getLayer()).getNIn());
    assertEquals(1024, ((FeedForwardLayer) l5_out.getLayerConf().getLayer()).getNIn());
    //Check updaters state:
    ComputationGraph g = new ComputationGraph(conf);
    g.init();
    g.initGradientsView();
    ComputationGraphUpdater updater = g.getUpdater();
    //First: get the updaters array
    Field layerUpdatersField = updater.getClass().getDeclaredField("layerUpdaters");
    layerUpdatersField.setAccessible(true);
    org.deeplearning4j.nn.api.Updater[] layerUpdaters = (org.deeplearning4j.nn.api.Updater[]) layerUpdatersField.get(updater);
    //And get the map between names and updater indexes
    Field layerUpdatersMapField = updater.getClass().getDeclaredField("layerUpdatersMap");
    layerUpdatersMapField.setAccessible(true);
    Map<String, Integer> layerUpdatersMap = (Map<String, Integer>) layerUpdatersMapField.get(updater);
    //Go through each layer; check that the updater state size matches the parameters size
    org.deeplearning4j.nn.api.Layer[] layers = g.getLayers();
    for (org.deeplearning4j.nn.api.Layer l : layers) {
        String layerName = l.conf().getLayer().getLayerName();
        int nParams = l.numParams();
        Map<String, INDArray> paramTable = l.paramTable();
        Map<String, Integer> parameterSizeCounts = new LinkedHashMap<>();
        for (Map.Entry<String, INDArray> e : paramTable.entrySet()) {
            parameterSizeCounts.put(e.getKey(), e.getValue().length());
        }
        int updaterIdx = layerUpdatersMap.get(layerName);
        org.deeplearning4j.nn.api.Updater u = layerUpdaters[updaterIdx];
        LayerUpdater lu = (LayerUpdater) u;
        Field updaterForVariableField = LayerUpdater.class.getDeclaredField("updaterForVariable");
        updaterForVariableField.setAccessible(true);
        Map<String, GradientUpdater> updaterForVariable = (Map<String, GradientUpdater>) updaterForVariableField.get(lu);
        Map<String, Integer> updaterStateSizeCounts = new HashMap<>();
        for (Map.Entry<String, GradientUpdater> entry : updaterForVariable.entrySet()) {
            GradientUpdater gu = entry.getValue();
            Nesterovs nesterovs = (Nesterovs) gu;
            INDArray v = nesterovs.getV();
            int length = (v == null ? -1 : v.length());
            updaterStateSizeCounts.put(entry.getKey(), length);
        }
        //Check subsampling layers:
        if (l.numParams() == 0) {
            assertEquals(0, updaterForVariable.size());
        }
        System.out.println(layerName + "\t" + nParams + "\t" + parameterSizeCounts + "\t Updater size: " + updaterStateSizeCounts);
        //Now, with nesterov updater: 1 history value per parameter
        for (String s : parameterSizeCounts.keySet()) {
            int paramSize = parameterSizeCounts.get(s);
            int updaterSize = updaterStateSizeCounts.get(s);
            assertEquals(layerName + "/" + s, paramSize, updaterSize);
        }
    }
    //minibatch, depth, height, width
    INDArray in = Nd4j.create(2, 1, 40, 40);
    INDArray l = Nd4j.create(2, 10);
    DataSet ds = new DataSet(in, l);
    g.fit(ds);
}
Also used : LayerVertex(org.deeplearning4j.nn.conf.graph.LayerVertex) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) DataSet(org.nd4j.linalg.dataset.DataSet) ComputationGraphUpdater(org.deeplearning4j.nn.updater.graph.ComputationGraphUpdater) GradientUpdater(org.nd4j.linalg.learning.GradientUpdater) LinkedHashMap(java.util.LinkedHashMap) Field(java.lang.reflect.Field) GraphVertex(org.deeplearning4j.nn.conf.graph.GraphVertex) LayerUpdater(org.deeplearning4j.nn.updater.LayerUpdater) Updater(org.deeplearning4j.nn.conf.Updater) ComputationGraphUpdater(org.deeplearning4j.nn.updater.graph.ComputationGraphUpdater) GradientUpdater(org.nd4j.linalg.learning.GradientUpdater) Nesterovs(org.nd4j.linalg.learning.Nesterovs) INDArray(org.nd4j.linalg.api.ndarray.INDArray) ComputationGraphConfiguration(org.deeplearning4j.nn.conf.ComputationGraphConfiguration) LayerUpdater(org.deeplearning4j.nn.updater.LayerUpdater) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map) Test(org.junit.Test)

Example 62 with DataSet

use of org.nd4j.linalg.dataset.DataSet in project deeplearning4j by deeplearning4j.

the class ComputationGraphTestRNN method checkMaskArrayClearance.

@Test
public void checkMaskArrayClearance() {
    for (boolean tbptt : new boolean[] { true, false }) {
        //Simple "does it throw an exception" type test...
        ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().iterations(1).seed(12345).graphBuilder().addInputs("in").addLayer("out", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).nIn(1).nOut(1).build(), "in").setOutputs("out").backpropType(tbptt ? BackpropType.TruncatedBPTT : BackpropType.Standard).tBPTTForwardLength(8).tBPTTBackwardLength(8).build();
        ComputationGraph net = new ComputationGraph(conf);
        net.init();
        MultiDataSet data = new MultiDataSet(new INDArray[] { Nd4j.linspace(1, 10, 10).reshape(1, 1, 10) }, new INDArray[] { Nd4j.linspace(2, 20, 10).reshape(1, 1, 10) }, new INDArray[] { Nd4j.ones(10) }, new INDArray[] { Nd4j.ones(10) });
        net.fit(data);
        assertNull(net.getInputMaskArrays());
        assertNull(net.getLabelMaskArrays());
        for (Layer l : net.getLayers()) {
            assertNull(l.getMaskArray());
        }
        DataSet ds = new DataSet(data.getFeatures(0), data.getLabels(0), data.getFeaturesMaskArray(0), data.getLabelsMaskArray(0));
        net.fit(ds);
        assertNull(net.getInputMaskArrays());
        assertNull(net.getLabelMaskArrays());
        for (Layer l : net.getLayers()) {
            assertNull(l.getMaskArray());
        }
        net.fit(data.getFeatures(), data.getLabels(), data.getFeaturesMaskArrays(), data.getLabelsMaskArrays());
        assertNull(net.getInputMaskArrays());
        assertNull(net.getLabelMaskArrays());
        for (Layer l : net.getLayers()) {
            assertNull(l.getMaskArray());
        }
        MultiDataSetIterator iter = new IteratorMultiDataSetIterator(Collections.singletonList((org.nd4j.linalg.dataset.api.MultiDataSet) data).iterator(), 1);
        net.fit(iter);
        assertNull(net.getInputMaskArrays());
        assertNull(net.getLabelMaskArrays());
        for (Layer l : net.getLayers()) {
            assertNull(l.getMaskArray());
        }
        DataSetIterator iter2 = new IteratorDataSetIterator(Collections.singletonList(ds).iterator(), 1);
        net.fit(iter2);
        assertNull(net.getInputMaskArrays());
        assertNull(net.getLabelMaskArrays());
        for (Layer l : net.getLayers()) {
            assertNull(l.getMaskArray());
        }
    }
}
Also used : MultiDataSet(org.nd4j.linalg.dataset.MultiDataSet) DataSet(org.nd4j.linalg.dataset.DataSet) IteratorMultiDataSetIterator(org.deeplearning4j.datasets.iterator.IteratorMultiDataSetIterator) Layer(org.deeplearning4j.nn.api.Layer) RnnOutputLayer(org.deeplearning4j.nn.conf.layers.RnnOutputLayer) DenseLayer(org.deeplearning4j.nn.conf.layers.DenseLayer) BaseRecurrentLayer(org.deeplearning4j.nn.layers.recurrent.BaseRecurrentLayer) IteratorMultiDataSetIterator(org.deeplearning4j.datasets.iterator.IteratorMultiDataSetIterator) MultiDataSetIterator(org.nd4j.linalg.dataset.api.iterator.MultiDataSetIterator) MultiDataSet(org.nd4j.linalg.dataset.MultiDataSet) ComputationGraphConfiguration(org.deeplearning4j.nn.conf.ComputationGraphConfiguration) IteratorDataSetIterator(org.deeplearning4j.datasets.iterator.IteratorDataSetIterator) DataSetIterator(org.nd4j.linalg.dataset.api.iterator.DataSetIterator) IteratorDataSetIterator(org.deeplearning4j.datasets.iterator.IteratorDataSetIterator) IteratorMultiDataSetIterator(org.deeplearning4j.datasets.iterator.IteratorMultiDataSetIterator) MultiDataSetIterator(org.nd4j.linalg.dataset.api.iterator.MultiDataSetIterator) Test(org.junit.Test)

Example 63 with DataSet

use of org.nd4j.linalg.dataset.DataSet in project deeplearning4j by deeplearning4j.

the class MultiNeuralNetConfLayerBuilderTest method testRbmSetup.

@Test
public void testRbmSetup() throws Exception {
    MultiLayerConfiguration multiLayerConfiguration = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).seed(123).iterations(5).maxNumLineSearchIterations(// Magical Optimisation Stuff
    10).regularization(true).list().layer(0, new RBM.Builder(RBM.HiddenUnit.RECTIFIED, RBM.VisibleUnit.GAUSSIAN).nIn(784).nOut(1000).weightInit(WeightInit.XAVIER).activation(Activation.RELU).build()).layer(1, new RBM.Builder(RBM.HiddenUnit.RECTIFIED, RBM.VisibleUnit.GAUSSIAN).nIn(1000).nOut(500).weightInit(WeightInit.XAVIER).activation(Activation.RELU).build()).layer(2, new RBM.Builder(RBM.HiddenUnit.RECTIFIED, RBM.VisibleUnit.GAUSSIAN).nIn(500).nOut(250).weightInit(WeightInit.XAVIER).activation(Activation.RELU).build()).layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).nIn(250).nOut(10).build()).pretrain(false).backprop(true).build();
    MultiLayerNetwork network = new MultiLayerNetwork(multiLayerConfiguration);
    network.init();
    DataSet d = new MnistDataSetIterator(2, 2).next();
    org.deeplearning4j.nn.api.Layer firstRbm = network.getLayer(0);
    org.deeplearning4j.nn.api.Layer secondRbm = network.getLayer(1);
    org.deeplearning4j.nn.api.Layer thirdRbm = network.getLayer(2);
    org.deeplearning4j.nn.api.Layer fourthRbm = network.getLayer(3);
    INDArray[] weightMatrices = new INDArray[] { firstRbm.getParam(DefaultParamInitializer.WEIGHT_KEY), secondRbm.getParam(DefaultParamInitializer.WEIGHT_KEY), thirdRbm.getParam(DefaultParamInitializer.WEIGHT_KEY), fourthRbm.getParam(DefaultParamInitializer.WEIGHT_KEY) };
    INDArray[] hiddenBiases = new INDArray[] { firstRbm.getParam(DefaultParamInitializer.BIAS_KEY), secondRbm.getParam(DefaultParamInitializer.BIAS_KEY), thirdRbm.getParam(DefaultParamInitializer.BIAS_KEY), fourthRbm.getParam(DefaultParamInitializer.BIAS_KEY) };
    int[][] shapeAssertions = new int[][] { { 784, 1000 }, { 1000, 500 }, { 500, 250 }, { 250, 10 } };
    int[][] biasAssertions = new int[][] { { 1, 1000 }, { 1, 500 }, { 1, 250 }, { 1, 10 } };
    for (int i = 0; i < shapeAssertions.length; i++) {
        assertArrayEquals(shapeAssertions[i], weightMatrices[i].shape());
        assertArrayEquals(biasAssertions[i], hiddenBiases[i].shape());
    }
    network.fit(d);
}
Also used : OutputLayer(org.deeplearning4j.nn.conf.layers.OutputLayer) MnistDataSetIterator(org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator) DataSet(org.nd4j.linalg.dataset.DataSet) INDArray(org.nd4j.linalg.api.ndarray.INDArray) RBM(org.deeplearning4j.nn.conf.layers.RBM) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) Test(org.junit.Test)

Example 64 with DataSet

use of org.nd4j.linalg.dataset.DataSet in project deeplearning4j by deeplearning4j.

the class GradientCheckTests method testRbm.

@Test
public void testRbm() {
    //As above (testGradientMLP2LayerIrisSimple()) but with L2, L1, and both L2/L1 applied
    //Need to run gradient through updater, so that L2 can be applied
    RBM.HiddenUnit[] hiddenFunc = { RBM.HiddenUnit.BINARY, RBM.HiddenUnit.RECTIFIED };
    //If true: run some backprop steps first
    boolean[] characteristic = { false, true };
    LossFunction[] lossFunctions = { LossFunction.MSE, LossFunction.KL_DIVERGENCE };
    //i.e., lossFunctions[i] used with outputActivations[i] here
    String[] outputActivations = { "softmax", "sigmoid" };
    DataNormalization scaler = new NormalizerMinMaxScaler();
    DataSetIterator iter = new IrisDataSetIterator(150, 150);
    scaler.fit(iter);
    iter.setPreProcessor(scaler);
    DataSet ds = iter.next();
    INDArray input = ds.getFeatureMatrix();
    INDArray labels = ds.getLabels();
    double[] l2vals = { 0.4, 0.0, 0.4 };
    //i.e., use l2vals[i] with l1vals[i]
    double[] l1vals = { 0.0, 0.5, 0.5 };
    for (RBM.HiddenUnit hidunit : hiddenFunc) {
        for (boolean doLearningFirst : characteristic) {
            for (int i = 0; i < lossFunctions.length; i++) {
                for (int k = 0; k < l2vals.length; k++) {
                    LossFunction lf = lossFunctions[i];
                    String outputActivation = outputActivations[i];
                    double l2 = l2vals[k];
                    double l1 = l1vals[k];
                    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().regularization(true).l2(l2).l1(l1).learningRate(1.0).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(12345L).list().layer(0, new RBM.Builder(hidunit, RBM.VisibleUnit.BINARY).nIn(4).nOut(3).weightInit(WeightInit.UNIFORM).updater(Updater.SGD).build()).layer(1, new OutputLayer.Builder(lf).nIn(3).nOut(3).weightInit(WeightInit.XAVIER).updater(Updater.SGD).activation(outputActivation).build()).pretrain(false).backprop(true).build();
                    MultiLayerNetwork mln = new MultiLayerNetwork(conf);
                    mln.init();
                    if (doLearningFirst) {
                        //Run a number of iterations of learning
                        mln.setInput(ds.getFeatures());
                        mln.setLabels(ds.getLabels());
                        mln.computeGradientAndScore();
                        double scoreBefore = mln.score();
                        for (int j = 0; j < 10; j++) mln.fit(ds);
                        mln.computeGradientAndScore();
                        double scoreAfter = mln.score();
                        //Can't test in 'characteristic mode of operation' if not learning
                        String msg = "testGradMLP2LayerIrisSimple() - score did not (sufficiently) decrease during learning - activationFn=" + hidunit.toString() + ", lossFn=" + lf + ", outputActivation=" + outputActivation + ", doLearningFirst=" + doLearningFirst + ", l2=" + l2 + ", l1=" + l1 + " (before=" + scoreBefore + ", scoreAfter=" + scoreAfter + ")";
                        assertTrue(msg, scoreAfter < scoreBefore);
                    }
                    if (PRINT_RESULTS) {
                        System.out.println("testGradientMLP2LayerIrisSimpleRandom() - activationFn=" + hidunit.toString() + ", lossFn=" + lf + ", outputActivation=" + outputActivation + ", doLearningFirst=" + doLearningFirst + ", l2=" + l2 + ", l1=" + l1);
                        for (int j = 0; j < mln.getnLayers(); j++) System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams());
                    }
                    boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels);
                    String msg = "testGradMLP2LayerIrisSimple() - activationFn=" + hidunit.toString() + ", lossFn=" + lf + ", outputActivation=" + outputActivation + ", doLearningFirst=" + doLearningFirst + ", l2=" + l2 + ", l1=" + l1;
                    assertTrue(msg, gradOK);
                }
            }
        }
    }
}
Also used : NormalizerMinMaxScaler(org.nd4j.linalg.dataset.api.preprocessor.NormalizerMinMaxScaler) IrisDataSetIterator(org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator) DataSet(org.nd4j.linalg.dataset.DataSet) DataNormalization(org.nd4j.linalg.dataset.api.preprocessor.DataNormalization) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) INDArray(org.nd4j.linalg.api.ndarray.INDArray) LossFunction(org.nd4j.linalg.lossfunctions.LossFunctions.LossFunction) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) IrisDataSetIterator(org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator) DataSetIterator(org.nd4j.linalg.dataset.api.iterator.DataSetIterator) Test(org.junit.Test)

Example 65 with DataSet

use of org.nd4j.linalg.dataset.DataSet in project deeplearning4j by deeplearning4j.

the class GradientCheckTestsComputationGraph method testBasicIris.

@Test
public void testBasicIris() {
    Nd4j.getRandom().setSeed(12345);
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1)).updater(Updater.NONE).learningRate(1.0).graphBuilder().addInputs("input").addLayer("firstLayer", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build(), "input").addLayer("outputLayer", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(5).nOut(3).build(), "firstLayer").setOutputs("outputLayer").pretrain(false).backprop(true).build();
    ComputationGraph graph = new ComputationGraph(conf);
    graph.init();
    Nd4j.getRandom().setSeed(12345);
    int nParams = graph.numParams();
    INDArray newParams = Nd4j.rand(1, nParams);
    graph.setParams(newParams);
    DataSet ds = new IrisDataSetIterator(150, 150).next();
    INDArray min = ds.getFeatureMatrix().min(0);
    INDArray max = ds.getFeatureMatrix().max(0);
    ds.getFeatureMatrix().subiRowVector(min).diviRowVector(max.sub(min));
    INDArray input = ds.getFeatureMatrix();
    INDArray labels = ds.getLabels();
    if (PRINT_RESULTS) {
        System.out.println("testBasicIris()");
        for (int j = 0; j < graph.getNumLayers(); j++) System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams());
    }
    boolean gradOK = GradientCheckUtil.checkGradients(graph, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, new INDArray[] { input }, new INDArray[] { labels });
    String msg = "testBasicIris()";
    assertTrue(msg, gradOK);
}
Also used : INDArray(org.nd4j.linalg.api.ndarray.INDArray) IrisDataSetIterator(org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator) NormalDistribution(org.deeplearning4j.nn.conf.distribution.NormalDistribution) DataSet(org.nd4j.linalg.dataset.DataSet) ComputationGraphConfiguration(org.deeplearning4j.nn.conf.ComputationGraphConfiguration) ComputationGraph(org.deeplearning4j.nn.graph.ComputationGraph) Test(org.junit.Test)

Aggregations

DataSet (org.nd4j.linalg.dataset.DataSet)334 Test (org.junit.Test)226 INDArray (org.nd4j.linalg.api.ndarray.INDArray)194 MultiLayerNetwork (org.deeplearning4j.nn.multilayer.MultiLayerNetwork)93 DataSetIterator (org.nd4j.linalg.dataset.api.iterator.DataSetIterator)82 NeuralNetConfiguration (org.deeplearning4j.nn.conf.NeuralNetConfiguration)79 MultiLayerConfiguration (org.deeplearning4j.nn.conf.MultiLayerConfiguration)73 IrisDataSetIterator (org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator)62 ArrayList (java.util.ArrayList)50 MnistDataSetIterator (org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator)41 ScoreIterationListener (org.deeplearning4j.optimize.listeners.ScoreIterationListener)38 BaseSparkTest (org.deeplearning4j.spark.BaseSparkTest)34 OutputLayer (org.deeplearning4j.nn.conf.layers.OutputLayer)32 DenseLayer (org.deeplearning4j.nn.conf.layers.DenseLayer)31 MultiDataSet (org.nd4j.linalg.dataset.MultiDataSet)31 ComputationGraph (org.deeplearning4j.nn.graph.ComputationGraph)25 SequenceRecordReader (org.datavec.api.records.reader.SequenceRecordReader)24 ComputationGraphConfiguration (org.deeplearning4j.nn.conf.ComputationGraphConfiguration)24 CSVSequenceRecordReader (org.datavec.api.records.reader.impl.csv.CSVSequenceRecordReader)23 ClassPathResource (org.nd4j.linalg.io.ClassPathResource)23