use of org.deeplearning4j.spark.impl.multilayer.SparkDl4jMultiLayer in project deeplearning4j by deeplearning4j.
the class TestSparkMultiLayerParameterAveraging method testFromSvmLight.
@Test
public void testFromSvmLight() throws Exception {
JavaRDD<LabeledPoint> data = MLUtils.loadLibSVMFile(sc.sc(), new ClassPathResource("svmLight/iris_svmLight_0.txt").getTempFileFromArchive().getAbsolutePath()).toJavaRDD().map(new Function<LabeledPoint, LabeledPoint>() {
@Override
public LabeledPoint call(LabeledPoint v1) throws Exception {
return new LabeledPoint(v1.label(), Vectors.dense(v1.features().toArray()));
}
});
DataSet d = new IrisDataSetIterator(150, 150).next();
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(123).optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).iterations(100).miniBatch(true).maxNumLineSearchIterations(10).list().layer(0, new RBM.Builder(RBM.HiddenUnit.RECTIFIED, RBM.VisibleUnit.GAUSSIAN).nIn(4).nOut(100).weightInit(WeightInit.XAVIER).activation(Activation.RELU).lossFunction(LossFunctions.LossFunction.RMSE_XENT).build()).layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(100).nOut(3).activation(Activation.SOFTMAX).weightInit(WeightInit.XAVIER).build()).backprop(false).build();
MultiLayerNetwork network = new MultiLayerNetwork(conf);
network.init();
System.out.println("Initializing network");
SparkDl4jMultiLayer master = new SparkDl4jMultiLayer(sc, getBasicConf(), new ParameterAveragingTrainingMaster(true, numExecutors(), 1, 5, 1, 0));
MultiLayerNetwork network2 = master.fitLabeledPoint(data);
Evaluation evaluation = new Evaluation();
evaluation.eval(d.getLabels(), network2.output(d.getFeatureMatrix()));
System.out.println(evaluation.stats());
}
use of org.deeplearning4j.spark.impl.multilayer.SparkDl4jMultiLayer in project deeplearning4j by deeplearning4j.
the class TestSparkMultiLayerParameterAveraging method testROC.
@Test
public void testROC() {
int nArrays = 100;
int minibatch = 64;
int steps = 20;
int nIn = 5;
int nOut = 2;
int layerSize = 10;
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().weightInit(WeightInit.XAVIER).list().layer(0, new DenseLayer.Builder().nIn(nIn).nOut(layerSize).build()).layer(1, new OutputLayer.Builder().nIn(layerSize).nOut(nOut).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
Nd4j.getRandom().setSeed(12345);
Random r = new Random(12345);
ROC local = new ROC(steps);
List<DataSet> dsList = new ArrayList<>();
for (int i = 0; i < nArrays; i++) {
INDArray features = Nd4j.rand(minibatch, nIn);
INDArray p = net.output(features);
INDArray l = Nd4j.zeros(minibatch, 2);
for (int j = 0; j < minibatch; j++) {
l.putScalar(j, r.nextInt(2), 1.0);
}
local.eval(l, p);
dsList.add(new DataSet(features, l));
}
SparkDl4jMultiLayer sparkNet = new SparkDl4jMultiLayer(sc, net, null);
JavaRDD<DataSet> rdd = sc.parallelize(dsList);
ROC sparkROC = sparkNet.evaluateROC(rdd, steps, 32);
assertEquals(sparkROC.calculateAUC(), sparkROC.calculateAUC(), 1e-6);
double[][] arrLocal = local.getResultsAsArray();
double[][] arrSpark = sparkROC.getResultsAsArray();
assertArrayEquals(arrLocal[0], arrSpark[0], 1e-6);
assertArrayEquals(arrLocal[1], arrSpark[1], 1e-6);
}
use of org.deeplearning4j.spark.impl.multilayer.SparkDl4jMultiLayer in project deeplearning4j by deeplearning4j.
the class TestListeners method testStatsCollection.
@Test
public void testStatsCollection() {
JavaSparkContext sc = getContext();
int nExecutors = numExecutors();
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(123).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1).list().layer(0, new DenseLayer.Builder().nIn(4).nOut(100).weightInit(WeightInit.XAVIER).activation(Activation.RELU).build()).layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(100).nOut(3).activation(Activation.SOFTMAX).weightInit(WeightInit.XAVIER).build()).pretrain(false).backprop(true).build();
MultiLayerNetwork network = new MultiLayerNetwork(conf);
network.init();
TrainingMaster tm = new ParameterAveragingTrainingMaster.Builder(1).batchSizePerWorker(5).averagingFrequency(6).build();
SparkDl4jMultiLayer net = new SparkDl4jMultiLayer(sc, conf, tm);
//In-memory
StatsStorage ss = new MapDBStatsStorage();
net.setListeners(ss, Collections.singletonList(new StatsListener(null)));
List<DataSet> list = new IrisDataSetIterator(120, 150).next().asList();
//120 examples, 4 executors, 30 examples per executor -> 6 updates of size 5 per executor
JavaRDD<DataSet> rdd = sc.parallelize(list);
net.fit(rdd);
List<String> sessions = ss.listSessionIDs();
System.out.println("Sessions: " + sessions);
assertEquals(1, sessions.size());
String sid = sessions.get(0);
List<String> typeIDs = ss.listTypeIDsForSession(sid);
List<String> workers = ss.listWorkerIDsForSession(sid);
System.out.println(sid + "\t" + typeIDs + "\t" + workers);
List<Persistable> lastUpdates = ss.getLatestUpdateAllWorkers(sid, StatsListener.TYPE_ID);
System.out.println(lastUpdates);
System.out.println("Static info:");
for (String wid : workers) {
Persistable staticInfo = ss.getStaticInfo(sid, StatsListener.TYPE_ID, wid);
System.out.println(sid + "\t" + wid);
}
assertEquals(1, typeIDs.size());
assertEquals(numExecutors(), workers.size());
String firstWorker = workers.get(0);
String firstWorkerSubstring = workers.get(0).substring(0, firstWorker.length() - 1);
for (String wid : workers) {
String widSubstring = wid.substring(0, wid.length() - 1);
assertEquals(firstWorkerSubstring, widSubstring);
String counterVal = wid.substring(wid.length() - 1, wid.length());
int cv = Integer.parseInt(counterVal);
assertTrue(0 <= cv && cv < numExecutors());
}
}
Aggregations