use of org.deeplearning4j.nn.layers.OutputLayer in project deeplearning4j by deeplearning4j.
the class DL4jWorker method call.
@Override
public INDArray call(DataSet v1) throws Exception {
try {
Layer network = (Layer) this.network;
if (network instanceof OutputLayer) {
OutputLayer o = (OutputLayer) network;
o.fit(v1);
} else
network.fit(v1.getFeatureMatrix());
return network.params();
} catch (Exception e) {
System.err.println("Error with dataset " + v1.numExamples());
throw e;
}
}
use of org.deeplearning4j.nn.layers.OutputLayer in project deeplearning4j by deeplearning4j.
the class BackTrackLineSearchTest method testSingleMaxLineSearch.
@Test
public void testSingleMaxLineSearch() throws Exception {
double score1, score2;
OutputLayer layer = getIrisLogisticLayerConfig(Activation.SOFTMAX, 100, LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD);
int nParams = layer.numParams();
layer.setBackpropGradientsViewArray(Nd4j.create(1, nParams));
layer.setInput(irisData.getFeatureMatrix());
layer.setLabels(irisData.getLabels());
layer.computeGradientAndScore();
score1 = layer.score();
BackTrackLineSearch lineSearch = new BackTrackLineSearch(layer, new NegativeDefaultStepFunction(), layer.getOptimizer());
double step = lineSearch.optimize(layer.params(), layer.gradient().gradient(), layer.gradient().gradient());
assertEquals(1.0, step, 1e-3);
}
use of org.deeplearning4j.nn.layers.OutputLayer in project deeplearning4j by deeplearning4j.
the class BackTrackLineSearchTest method testMultMinLineSearch.
@Test
public void testMultMinLineSearch() throws Exception {
double score1, score2;
OutputLayer layer = getIrisLogisticLayerConfig(Activation.SOFTMAX, 100, LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD);
int nParams = layer.numParams();
layer.setBackpropGradientsViewArray(Nd4j.create(1, nParams));
layer.setInput(irisData.getFeatureMatrix());
layer.setLabels(irisData.getLabels());
layer.computeGradientAndScore();
score1 = layer.score();
INDArray origGradient = layer.gradient().gradient().dup();
NegativeDefaultStepFunction sf = new NegativeDefaultStepFunction();
BackTrackLineSearch lineSearch = new BackTrackLineSearch(layer, sf, layer.getOptimizer());
double step = lineSearch.optimize(layer.params(), layer.gradient().gradient(), layer.gradient().gradient());
INDArray currParams = layer.params();
sf.step(currParams, origGradient, step);
layer.setParams(currParams);
layer.computeGradientAndScore();
score2 = layer.score();
assertTrue("score1=" + score1 + ", score2=" + score2, score1 > score2);
}
use of org.deeplearning4j.nn.layers.OutputLayer in project deeplearning4j by deeplearning4j.
the class TestSparkLayer method testIris2.
@Test
public void testIris2() throws Exception {
NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(10).learningRate(1e-1).layer(new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(4).nOut(3).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).build()).build();
System.out.println("Initializing network");
SparkDl4jLayer master = new SparkDl4jLayer(sc, conf);
DataSet d = new IrisDataSetIterator(150, 150).next();
d.normalizeZeroMeanZeroUnitVariance();
d.shuffle();
List<DataSet> next = d.asList();
JavaRDD<DataSet> data = sc.parallelize(next);
OutputLayer network2 = (OutputLayer) master.fitDataSet(data);
Evaluation evaluation = new Evaluation();
evaluation.eval(d.getLabels(), network2.output(d.getFeatureMatrix()));
System.out.println(evaluation.stats());
}
use of org.deeplearning4j.nn.layers.OutputLayer in project deeplearning4j by deeplearning4j.
the class BackTrackLineSearchTest method getIrisLogisticLayerConfig.
private static OutputLayer getIrisLogisticLayerConfig(Activation activationFunction, int maxIterations, LossFunctions.LossFunction lossFunction) {
NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345L).iterations(1).miniBatch(true).maxNumLineSearchIterations(maxIterations).layer(new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(lossFunction).nIn(4).nOut(3).activation(activationFunction).weightInit(WeightInit.XAVIER).build()).build();
int numParams = conf.getLayer().initializer().numParams(conf);
INDArray params = Nd4j.create(1, numParams);
return (OutputLayer) conf.getLayer().instantiate(conf, null, 0, params, true);
}
Aggregations