use of org.nd4j.linalg.io.ClassPathResource in project deeplearning4j by deeplearning4j.
the class RecordReaderMultiDataSetIteratorTest method testSplittingCSV.
@Test
public void testSplittingCSV() throws Exception {
//Here's the idea: take Iris, and split it up into 2 inputs and 2 output arrays
//Inputs: columns 0 and 1-2
//Outputs: columns 3, and 4->OneHot
//need to manually extract
RecordReader rr = new CSVRecordReader(0, ",");
rr.initialize(new FileSplit(new ClassPathResource("iris.txt").getTempFileFromArchive()));
RecordReaderDataSetIterator rrdsi = new RecordReaderDataSetIterator(rr, 10, 4, 3);
RecordReader rr2 = new CSVRecordReader(0, ",");
rr2.initialize(new FileSplit(new ClassPathResource("iris.txt").getTempFileFromArchive()));
MultiDataSetIterator rrmdsi = new RecordReaderMultiDataSetIterator.Builder(10).addReader("reader", rr2).addInput("reader", 0, 0).addInput("reader", 1, 2).addOutput("reader", 3, 3).addOutputOneHot("reader", 4, 3).build();
while (rrdsi.hasNext()) {
DataSet ds = rrdsi.next();
INDArray fds = ds.getFeatureMatrix();
INDArray lds = ds.getLabels();
MultiDataSet mds = rrmdsi.next();
assertEquals(2, mds.getFeatures().length);
assertEquals(2, mds.getLabels().length);
assertNull(mds.getFeaturesMaskArrays());
assertNull(mds.getLabelsMaskArrays());
INDArray[] fmds = mds.getFeatures();
INDArray[] lmds = mds.getLabels();
assertNotNull(fmds);
assertNotNull(lmds);
for (int i = 0; i < fmds.length; i++) assertNotNull(fmds[i]);
for (int i = 0; i < lmds.length; i++) assertNotNull(lmds[i]);
//Get the subsets of the original iris data
INDArray expIn1 = fds.get(NDArrayIndex.all(), NDArrayIndex.point(0));
INDArray expIn2 = fds.get(NDArrayIndex.all(), NDArrayIndex.interval(1, 2, true));
INDArray expOut1 = fds.get(NDArrayIndex.all(), NDArrayIndex.point(3));
INDArray expOut2 = lds;
assertEquals(expIn1, fmds[0]);
assertEquals(expIn2, fmds[1]);
assertEquals(expOut1, lmds[0]);
assertEquals(expOut2, lmds[1]);
}
assertFalse(rrmdsi.hasNext());
}
use of org.nd4j.linalg.io.ClassPathResource in project deeplearning4j by deeplearning4j.
the class RecordReaderMultiDataSetIteratorTest method testImagesRRDMSI_Batched.
@Test
public void testImagesRRDMSI_Batched() throws Exception {
File parentDir = Files.createTempDir();
parentDir.deleteOnExit();
String str1 = FilenameUtils.concat(parentDir.getAbsolutePath(), "Zico/");
String str2 = FilenameUtils.concat(parentDir.getAbsolutePath(), "Ziwang_Xu/");
File f1 = new File(str1);
File f2 = new File(str2);
f1.mkdirs();
f2.mkdirs();
writeStreamToFile(new File(FilenameUtils.concat(f1.getPath(), "Zico_0001.jpg")), new ClassPathResource("lfwtest/Zico/Zico_0001.jpg").getInputStream());
writeStreamToFile(new File(FilenameUtils.concat(f2.getPath(), "Ziwang_Xu_0001.jpg")), new ClassPathResource("lfwtest/Ziwang_Xu/Ziwang_Xu_0001.jpg").getInputStream());
int outputNum = 2;
ParentPathLabelGenerator labelMaker = new ParentPathLabelGenerator();
ImageRecordReader rr1 = new ImageRecordReader(10, 10, 1, labelMaker);
ImageRecordReader rr1s = new ImageRecordReader(5, 5, 1, labelMaker);
rr1.initialize(new FileSplit(parentDir));
rr1s.initialize(new FileSplit(parentDir));
MultiDataSetIterator trainDataIterator = new RecordReaderMultiDataSetIterator.Builder(2).addReader("rr1", rr1).addReader("rr1s", rr1s).addInput("rr1", 0, 0).addInput("rr1s", 0, 0).addOutputOneHot("rr1s", 1, outputNum).build();
//Now, do the same thing with ImageRecordReader, and check we get the same results:
ImageRecordReader rr1_b = new ImageRecordReader(10, 10, 1, labelMaker);
ImageRecordReader rr1s_b = new ImageRecordReader(5, 5, 1, labelMaker);
rr1_b.initialize(new FileSplit(parentDir));
rr1s_b.initialize(new FileSplit(parentDir));
DataSetIterator dsi1 = new RecordReaderDataSetIterator(rr1_b, 2, 1, 2);
DataSetIterator dsi2 = new RecordReaderDataSetIterator(rr1s_b, 2, 1, 2);
MultiDataSet mds = trainDataIterator.next();
DataSet d1 = dsi1.next();
DataSet d2 = dsi2.next();
assertEquals(d1.getFeatureMatrix(), mds.getFeatures(0));
assertEquals(d2.getFeatureMatrix(), mds.getFeatures(1));
assertEquals(d1.getLabels(), mds.getLabels(0));
}
use of org.nd4j.linalg.io.ClassPathResource in project deeplearning4j by deeplearning4j.
the class RegressionTest050 method regressionTestMLP1.
@Test
public void regressionTestMLP1() throws Exception {
File f = new ClassPathResource("regression_testing/050/050_ModelSerializer_Regression_MLP_1.zip").getTempFileFromArchive();
MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);
MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
assertEquals(2, conf.getConfs().size());
assertTrue(conf.isBackprop());
assertFalse(conf.isPretrain());
DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer();
assertEquals("relu", l0.getActivationFn().toString());
assertEquals(3, l0.getNIn());
assertEquals(4, l0.getNOut());
assertEquals(WeightInit.XAVIER, l0.getWeightInit());
assertEquals(Updater.NESTEROVS, l0.getUpdater());
assertEquals(0.9, l0.getMomentum(), 1e-6);
assertEquals(0.15, l0.getLearningRate(), 1e-6);
OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer();
assertEquals("softmax", l1.getActivationFn().toString());
assertEquals(LossFunctions.LossFunction.MCXENT, l1.getLossFunction());
assertTrue(l1.getLossFn() instanceof LossMCXENT);
assertEquals(4, l1.getNIn());
assertEquals(5, l1.getNOut());
assertEquals(WeightInit.XAVIER, l1.getWeightInit());
assertEquals(Updater.NESTEROVS, l1.getUpdater());
assertEquals(0.9, l1.getMomentum(), 1e-6);
assertEquals(0.15, l1.getLearningRate(), 1e-6);
int numParams = net.numParams();
assertEquals(Nd4j.linspace(1, numParams, numParams), net.params());
int updaterSize = net.getUpdater().stateSizeForLayer(net);
assertEquals(Nd4j.linspace(1, updaterSize, updaterSize), net.getUpdater().getStateViewArray());
}
use of org.nd4j.linalg.io.ClassPathResource in project deeplearning4j by deeplearning4j.
the class RegressionTest050 method regressionTestCNN1.
@Test
public void regressionTestCNN1() throws Exception {
File f = new ClassPathResource("regression_testing/050/050_ModelSerializer_Regression_CNN_1.zip").getTempFileFromArchive();
MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);
MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
assertEquals(3, conf.getConfs().size());
assertTrue(conf.isBackprop());
assertFalse(conf.isPretrain());
ConvolutionLayer l0 = (ConvolutionLayer) conf.getConf(0).getLayer();
assertEquals("tanh", l0.getActivationFn().toString());
assertEquals(3, l0.getNIn());
assertEquals(3, l0.getNOut());
assertEquals(WeightInit.RELU, l0.getWeightInit());
assertEquals(Updater.RMSPROP, l0.getUpdater());
assertEquals(0.96, l0.getRmsDecay(), 1e-6);
assertEquals(0.15, l0.getLearningRate(), 1e-6);
assertArrayEquals(new int[] { 2, 2 }, l0.getKernelSize());
assertArrayEquals(new int[] { 1, 1 }, l0.getStride());
assertArrayEquals(new int[] { 0, 0 }, l0.getPadding());
//Pre-0.7.0: no ConvolutionMode. Want to default to truncate here if not set
assertEquals(l0.getConvolutionMode(), ConvolutionMode.Truncate);
SubsamplingLayer l1 = (SubsamplingLayer) conf.getConf(1).getLayer();
assertArrayEquals(new int[] { 2, 2 }, l1.getKernelSize());
assertArrayEquals(new int[] { 1, 1 }, l1.getStride());
assertArrayEquals(new int[] { 0, 0 }, l1.getPadding());
assertEquals(PoolingType.MAX, l1.getPoolingType());
//Pre-0.7.0: no ConvolutionMode. Want to default to truncate here if not set
assertEquals(l1.getConvolutionMode(), ConvolutionMode.Truncate);
OutputLayer l2 = (OutputLayer) conf.getConf(2).getLayer();
assertEquals("sigmoid", l1.getActivationFn().toString());
assertEquals(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD, l2.getLossFunction());
//TODO
assertTrue(l2.getLossFn() instanceof LossNegativeLogLikelihood);
assertEquals(26 * 26 * 3, l2.getNIn());
assertEquals(5, l2.getNOut());
assertEquals(WeightInit.RELU, l0.getWeightInit());
assertEquals(Updater.RMSPROP, l0.getUpdater());
assertEquals(0.96, l0.getRmsDecay(), 1e-6);
assertEquals(0.15, l0.getLearningRate(), 1e-6);
int numParams = net.numParams();
assertEquals(Nd4j.linspace(1, numParams, numParams), net.params());
int updaterSize = net.getUpdater().stateSizeForLayer(net);
assertEquals(Nd4j.linspace(1, updaterSize, updaterSize), net.getUpdater().getStateViewArray());
}
use of org.nd4j.linalg.io.ClassPathResource in project deeplearning4j by deeplearning4j.
the class RegressionTest060 method regressionTestCGLSTM1.
@Test
public void regressionTestCGLSTM1() throws Exception {
File f = new ClassPathResource("regression_testing/060/060_ModelSerializer_Regression_CG_LSTM_1.zip").getTempFileFromArchive();
ComputationGraph net = ModelSerializer.restoreComputationGraph(f, true);
ComputationGraphConfiguration conf = net.getConfiguration();
assertEquals(3, conf.getVertices().size());
assertTrue(conf.isBackprop());
assertFalse(conf.isPretrain());
GravesLSTM l0 = (GravesLSTM) ((LayerVertex) conf.getVertices().get("0")).getLayerConf().getLayer();
assertEquals("tanh", l0.getActivationFn().toString());
assertEquals(3, l0.getNIn());
assertEquals(4, l0.getNOut());
assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l0.getGradientNormalization());
assertEquals(1.5, l0.getGradientNormalizationThreshold(), 1e-5);
GravesBidirectionalLSTM l1 = (GravesBidirectionalLSTM) ((LayerVertex) conf.getVertices().get("1")).getLayerConf().getLayer();
assertEquals("softsign", l1.getActivationFn().toString());
assertEquals(4, l1.getNIn());
assertEquals(4, l1.getNOut());
assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l1.getGradientNormalization());
assertEquals(1.5, l1.getGradientNormalizationThreshold(), 1e-5);
RnnOutputLayer l2 = (RnnOutputLayer) ((LayerVertex) conf.getVertices().get("2")).getLayerConf().getLayer();
assertEquals(4, l2.getNIn());
assertEquals(5, l2.getNOut());
assertEquals("softmax", l2.getActivationFn().toString());
assertTrue(l2.getLossFn() instanceof LossMCXENT);
}
Aggregations