Search in sources :

Example 11 with CSVSequenceRecordReader

use of org.datavec.api.records.reader.impl.csv.CSVSequenceRecordReader in project deeplearning4j by deeplearning4j.

the class RecordReaderMultiDataSetIteratorTest method testsBasic.

@Test
public void testsBasic() throws Exception {
    //Load details from CSV files; single input/output -> compare to RecordReaderDataSetIterator
    RecordReader rr = new CSVRecordReader(0, ",");
    rr.initialize(new FileSplit(new ClassPathResource("iris.txt").getTempFileFromArchive()));
    RecordReaderDataSetIterator rrdsi = new RecordReaderDataSetIterator(rr, 10, 4, 3);
    RecordReader rr2 = new CSVRecordReader(0, ",");
    rr2.initialize(new FileSplit(new ClassPathResource("iris.txt").getTempFileFromArchive()));
    MultiDataSetIterator rrmdsi = new RecordReaderMultiDataSetIterator.Builder(10).addReader("reader", rr2).addInput("reader", 0, 3).addOutputOneHot("reader", 4, 3).build();
    while (rrdsi.hasNext()) {
        DataSet ds = rrdsi.next();
        INDArray fds = ds.getFeatureMatrix();
        INDArray lds = ds.getLabels();
        MultiDataSet mds = rrmdsi.next();
        assertEquals(1, mds.getFeatures().length);
        assertEquals(1, mds.getLabels().length);
        assertNull(mds.getFeaturesMaskArrays());
        assertNull(mds.getLabelsMaskArrays());
        INDArray fmds = mds.getFeatures(0);
        INDArray lmds = mds.getLabels(0);
        assertNotNull(fmds);
        assertNotNull(lmds);
        assertEquals(fds, fmds);
        assertEquals(lds, lmds);
    }
    assertFalse(rrmdsi.hasNext());
    //need to manually extract
    for (int i = 0; i < 3; i++) {
        new ClassPathResource(String.format("csvsequence_%d.txt", i)).getTempFileFromArchive();
        new ClassPathResource(String.format("csvsequencelabels_%d.txt", i)).getTempFileFromArchive();
        new ClassPathResource(String.format("csvsequencelabelsShort_%d.txt", i)).getTempFileFromArchive();
    }
    //Load time series from CSV sequence files; compare to SequenceRecordReaderDataSetIterator
    ClassPathResource resource = new ClassPathResource("csvsequence_0.txt");
    String featuresPath = resource.getTempFileFromArchive().getAbsolutePath().replaceAll("0", "%d");
    resource = new ClassPathResource("csvsequencelabels_0.txt");
    String labelsPath = resource.getTempFileFromArchive().getAbsolutePath().replaceAll("0", "%d");
    SequenceRecordReader featureReader = new CSVSequenceRecordReader(1, ",");
    SequenceRecordReader labelReader = new CSVSequenceRecordReader(1, ",");
    featureReader.initialize(new NumberedFileInputSplit(featuresPath, 0, 2));
    labelReader.initialize(new NumberedFileInputSplit(labelsPath, 0, 2));
    SequenceRecordReaderDataSetIterator iter = new SequenceRecordReaderDataSetIterator(featureReader, labelReader, 1, 4, false);
    SequenceRecordReader featureReader2 = new CSVSequenceRecordReader(1, ",");
    SequenceRecordReader labelReader2 = new CSVSequenceRecordReader(1, ",");
    featureReader2.initialize(new NumberedFileInputSplit(featuresPath, 0, 2));
    labelReader2.initialize(new NumberedFileInputSplit(labelsPath, 0, 2));
    MultiDataSetIterator srrmdsi = new RecordReaderMultiDataSetIterator.Builder(1).addSequenceReader("in", featureReader2).addSequenceReader("out", labelReader2).addInput("in").addOutputOneHot("out", 0, 4).build();
    while (iter.hasNext()) {
        DataSet ds = iter.next();
        INDArray fds = ds.getFeatureMatrix();
        INDArray lds = ds.getLabels();
        MultiDataSet mds = srrmdsi.next();
        assertEquals(1, mds.getFeatures().length);
        assertEquals(1, mds.getLabels().length);
        assertNull(mds.getFeaturesMaskArrays());
        assertNull(mds.getLabelsMaskArrays());
        INDArray fmds = mds.getFeatures(0);
        INDArray lmds = mds.getLabels(0);
        assertNotNull(fmds);
        assertNotNull(lmds);
        assertEquals(fds, fmds);
        assertEquals(lds, lmds);
    }
    assertFalse(srrmdsi.hasNext());
}
Also used : CSVSequenceRecordReader(org.datavec.api.records.reader.impl.csv.CSVSequenceRecordReader) SequenceRecordReader(org.datavec.api.records.reader.SequenceRecordReader) DataSet(org.nd4j.linalg.dataset.DataSet) MultiDataSet(org.nd4j.linalg.dataset.api.MultiDataSet) RecordReader(org.datavec.api.records.reader.RecordReader) ImageRecordReader(org.datavec.image.recordreader.ImageRecordReader) CSVSequenceRecordReader(org.datavec.api.records.reader.impl.csv.CSVSequenceRecordReader) CSVRecordReader(org.datavec.api.records.reader.impl.csv.CSVRecordReader) SequenceRecordReader(org.datavec.api.records.reader.SequenceRecordReader) FileSplit(org.datavec.api.split.FileSplit) ClassPathResource(org.nd4j.linalg.io.ClassPathResource) NumberedFileInputSplit(org.datavec.api.split.NumberedFileInputSplit) MultiDataSetIterator(org.nd4j.linalg.dataset.api.iterator.MultiDataSetIterator) INDArray(org.nd4j.linalg.api.ndarray.INDArray) MultiDataSet(org.nd4j.linalg.dataset.api.MultiDataSet) CSVSequenceRecordReader(org.datavec.api.records.reader.impl.csv.CSVSequenceRecordReader) CSVRecordReader(org.datavec.api.records.reader.impl.csv.CSVRecordReader) Test(org.junit.Test)

Example 12 with CSVSequenceRecordReader

use of org.datavec.api.records.reader.impl.csv.CSVSequenceRecordReader in project deeplearning4j by deeplearning4j.

the class RecordReaderMultiDataSetIteratorTest method testVariableLengthTS.

@Test
public void testVariableLengthTS() throws Exception {
    //need to manually extract
    for (int i = 0; i < 3; i++) {
        new ClassPathResource(String.format("csvsequence_%d.txt", i)).getTempFileFromArchive();
        new ClassPathResource(String.format("csvsequencelabels_%d.txt", i)).getTempFileFromArchive();
        new ClassPathResource(String.format("csvsequencelabelsShort_%d.txt", i)).getTempFileFromArchive();
    }
    //Set up SequenceRecordReaderDataSetIterators for comparison
    ClassPathResource resource = new ClassPathResource("csvsequence_0.txt");
    String featuresPath = resource.getTempFileFromArchive().getAbsolutePath().replaceAll("0", "%d");
    resource = new ClassPathResource("csvsequencelabelsShort_0.txt");
    String labelsPath = resource.getTempFileFromArchive().getAbsolutePath().replaceAll("0", "%d");
    SequenceRecordReader featureReader = new CSVSequenceRecordReader(1, ",");
    SequenceRecordReader labelReader = new CSVSequenceRecordReader(1, ",");
    featureReader.initialize(new NumberedFileInputSplit(featuresPath, 0, 2));
    labelReader.initialize(new NumberedFileInputSplit(labelsPath, 0, 2));
    SequenceRecordReader featureReader2 = new CSVSequenceRecordReader(1, ",");
    SequenceRecordReader labelReader2 = new CSVSequenceRecordReader(1, ",");
    featureReader2.initialize(new NumberedFileInputSplit(featuresPath, 0, 2));
    labelReader2.initialize(new NumberedFileInputSplit(labelsPath, 0, 2));
    SequenceRecordReaderDataSetIterator iterAlignStart = new SequenceRecordReaderDataSetIterator(featureReader, labelReader, 1, 4, false, SequenceRecordReaderDataSetIterator.AlignmentMode.ALIGN_START);
    SequenceRecordReaderDataSetIterator iterAlignEnd = new SequenceRecordReaderDataSetIterator(featureReader2, labelReader2, 1, 4, false, SequenceRecordReaderDataSetIterator.AlignmentMode.ALIGN_END);
    //Set up
    SequenceRecordReader featureReader3 = new CSVSequenceRecordReader(1, ",");
    SequenceRecordReader labelReader3 = new CSVSequenceRecordReader(1, ",");
    featureReader3.initialize(new NumberedFileInputSplit(featuresPath, 0, 2));
    labelReader3.initialize(new NumberedFileInputSplit(labelsPath, 0, 2));
    SequenceRecordReader featureReader4 = new CSVSequenceRecordReader(1, ",");
    SequenceRecordReader labelReader4 = new CSVSequenceRecordReader(1, ",");
    featureReader4.initialize(new NumberedFileInputSplit(featuresPath, 0, 2));
    labelReader4.initialize(new NumberedFileInputSplit(labelsPath, 0, 2));
    RecordReaderMultiDataSetIterator rrmdsiStart = new RecordReaderMultiDataSetIterator.Builder(1).addSequenceReader("in", featureReader3).addSequenceReader("out", labelReader3).addInput("in").addOutputOneHot("out", 0, 4).sequenceAlignmentMode(RecordReaderMultiDataSetIterator.AlignmentMode.ALIGN_START).build();
    RecordReaderMultiDataSetIterator rrmdsiEnd = new RecordReaderMultiDataSetIterator.Builder(1).addSequenceReader("in", featureReader4).addSequenceReader("out", labelReader4).addInput("in").addOutputOneHot("out", 0, 4).sequenceAlignmentMode(RecordReaderMultiDataSetIterator.AlignmentMode.ALIGN_END).build();
    while (iterAlignStart.hasNext()) {
        DataSet dsStart = iterAlignStart.next();
        DataSet dsEnd = iterAlignEnd.next();
        MultiDataSet mdsStart = rrmdsiStart.next();
        MultiDataSet mdsEnd = rrmdsiEnd.next();
        assertEquals(1, mdsStart.getFeatures().length);
        assertEquals(1, mdsStart.getLabels().length);
        //assertEquals(1, mdsStart.getFeaturesMaskArrays().length); //Features data is always longer -> don't need mask arrays for it
        assertEquals(1, mdsStart.getLabelsMaskArrays().length);
        assertEquals(1, mdsEnd.getFeatures().length);
        assertEquals(1, mdsEnd.getLabels().length);
        //assertEquals(1, mdsEnd.getFeaturesMaskArrays().length);
        assertEquals(1, mdsEnd.getLabelsMaskArrays().length);
        assertEquals(dsStart.getFeatureMatrix(), mdsStart.getFeatures(0));
        assertEquals(dsStart.getLabels(), mdsStart.getLabels(0));
        assertEquals(dsStart.getLabelsMaskArray(), mdsStart.getLabelsMaskArray(0));
        assertEquals(dsEnd.getFeatureMatrix(), mdsEnd.getFeatures(0));
        assertEquals(dsEnd.getLabels(), mdsEnd.getLabels(0));
        assertEquals(dsEnd.getLabelsMaskArray(), mdsEnd.getLabelsMaskArray(0));
    }
    assertFalse(rrmdsiStart.hasNext());
    assertFalse(rrmdsiEnd.hasNext());
}
Also used : CSVSequenceRecordReader(org.datavec.api.records.reader.impl.csv.CSVSequenceRecordReader) SequenceRecordReader(org.datavec.api.records.reader.SequenceRecordReader) CSVSequenceRecordReader(org.datavec.api.records.reader.impl.csv.CSVSequenceRecordReader) MultiDataSet(org.nd4j.linalg.dataset.api.MultiDataSet) DataSet(org.nd4j.linalg.dataset.DataSet) MultiDataSet(org.nd4j.linalg.dataset.api.MultiDataSet) ClassPathResource(org.nd4j.linalg.io.ClassPathResource) NumberedFileInputSplit(org.datavec.api.split.NumberedFileInputSplit) Test(org.junit.Test)

Example 13 with CSVSequenceRecordReader

use of org.datavec.api.records.reader.impl.csv.CSVSequenceRecordReader in project deeplearning4j by deeplearning4j.

the class RecordReaderDataSetiteratorTest method testSequenceRecordReader.

@Test
public void testSequenceRecordReader() throws Exception {
    //need to manually extract
    for (int i = 0; i < 3; i++) {
        new ClassPathResource(String.format("csvsequence_%d.txt", i)).getTempFileFromArchive();
        new ClassPathResource(String.format("csvsequencelabels_%d.txt", i)).getTempFileFromArchive();
    }
    ClassPathResource resource = new ClassPathResource("csvsequence_0.txt");
    String featuresPath = resource.getTempFileFromArchive().getAbsolutePath().replaceAll("0", "%d");
    resource = new ClassPathResource("csvsequencelabels_0.txt");
    String labelsPath = resource.getTempFileFromArchive().getAbsolutePath().replaceAll("0", "%d");
    SequenceRecordReader featureReader = new CSVSequenceRecordReader(1, ",");
    SequenceRecordReader labelReader = new CSVSequenceRecordReader(1, ",");
    featureReader.initialize(new NumberedFileInputSplit(featuresPath, 0, 2));
    labelReader.initialize(new NumberedFileInputSplit(labelsPath, 0, 2));
    SequenceRecordReaderDataSetIterator iter = new SequenceRecordReaderDataSetIterator(featureReader, labelReader, 1, 4, false);
    assertEquals(3, iter.inputColumns());
    assertEquals(4, iter.totalOutcomes());
    List<DataSet> dsList = new ArrayList<>();
    while (iter.hasNext()) {
        dsList.add(iter.next());
    }
    //3 files
    assertEquals(3, dsList.size());
    for (int i = 0; i < 3; i++) {
        DataSet ds = dsList.get(i);
        INDArray features = ds.getFeatureMatrix();
        INDArray labels = ds.getLabels();
        //1 example in mini-batch
        assertEquals(1, features.size(0));
        assertEquals(1, labels.size(0));
        //3 values per line/time step
        assertEquals(3, features.size(1));
        //1 value per line, but 4 possible values -> one-hot vector
        assertEquals(4, labels.size(1));
        //sequence length = 4
        assertEquals(4, features.size(2));
        assertEquals(4, labels.size(2));
    }
    //Check features vs. expected:
    INDArray expF0 = Nd4j.create(1, 3, 4);
    expF0.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 0, 1, 2 }));
    expF0.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 10, 11, 12 }));
    expF0.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 20, 21, 22 }));
    expF0.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 30, 31, 32 }));
    assertEquals(dsList.get(0).getFeatureMatrix(), expF0);
    INDArray expF1 = Nd4j.create(1, 3, 4);
    expF1.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 100, 101, 102 }));
    expF1.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 110, 111, 112 }));
    expF1.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 120, 121, 122 }));
    expF1.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 130, 131, 132 }));
    assertEquals(dsList.get(1).getFeatureMatrix(), expF1);
    INDArray expF2 = Nd4j.create(1, 3, 4);
    expF2.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 200, 201, 202 }));
    expF2.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 210, 211, 212 }));
    expF2.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 220, 221, 222 }));
    expF2.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 230, 231, 232 }));
    assertEquals(dsList.get(2).getFeatureMatrix(), expF2);
    //Check labels vs. expected:
    INDArray expL0 = Nd4j.create(1, 4, 4);
    expL0.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 1, 0, 0, 0 }));
    expL0.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 0, 1, 0, 0 }));
    expL0.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 0, 0, 1, 0 }));
    expL0.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 0, 0, 0, 1 }));
    assertEquals(dsList.get(0).getLabels(), expL0);
    INDArray expL1 = Nd4j.create(1, 4, 4);
    expL1.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 0, 0, 0, 1 }));
    expL1.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 0, 0, 1, 0 }));
    expL1.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 0, 1, 0, 0 }));
    expL1.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 1, 0, 0, 0 }));
    assertEquals(dsList.get(1).getLabels(), expL1);
    INDArray expL2 = Nd4j.create(1, 4, 4);
    expL2.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 0, 1, 0, 0 }));
    expL2.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 1, 0, 0, 0 }));
    expL2.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 0, 0, 0, 1 }));
    expL2.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 0, 0, 1, 0 }));
    assertEquals(dsList.get(2).getLabels(), expL2);
}
Also used : CSVSequenceRecordReader(org.datavec.api.records.reader.impl.csv.CSVSequenceRecordReader) SequenceRecordReader(org.datavec.api.records.reader.SequenceRecordReader) CollectionSequenceRecordReader(org.datavec.api.records.reader.impl.collection.CollectionSequenceRecordReader) CSVSequenceRecordReader(org.datavec.api.records.reader.impl.csv.CSVSequenceRecordReader) INDArray(org.nd4j.linalg.api.ndarray.INDArray) DataSet(org.nd4j.linalg.dataset.DataSet) ClassPathResource(org.nd4j.linalg.io.ClassPathResource) NumberedFileInputSplit(org.datavec.api.split.NumberedFileInputSplit) Test(org.junit.Test)

Example 14 with CSVSequenceRecordReader

use of org.datavec.api.records.reader.impl.csv.CSVSequenceRecordReader in project deeplearning4j by deeplearning4j.

the class RecordReaderDataSetiteratorTest method testSequenceRecordReaderSingleReader.

@Test
public void testSequenceRecordReaderSingleReader() throws Exception {
    //need to manually extract
    for (int i = 0; i < 3; i++) {
        new ClassPathResource(String.format("csvsequenceSingle_%d.txt", i)).getTempFileFromArchive();
    }
    ClassPathResource resource = new ClassPathResource("csvsequenceSingle_0.txt");
    String path = resource.getTempFileFromArchive().getAbsolutePath().replaceAll("0", "%d");
    SequenceRecordReader reader = new CSVSequenceRecordReader(1, ",");
    reader.initialize(new NumberedFileInputSplit(path, 0, 2));
    SequenceRecordReaderDataSetIterator iteratorClassification = new SequenceRecordReaderDataSetIterator(reader, 1, 3, 0, false);
    SequenceRecordReader reader2 = new CSVSequenceRecordReader(1, ",");
    reader2.initialize(new NumberedFileInputSplit(path, 0, 2));
    SequenceRecordReaderDataSetIterator iteratorRegression = new SequenceRecordReaderDataSetIterator(reader2, 1, 3, 0, true);
    INDArray expF0 = Nd4j.create(1, 2, 4);
    expF0.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 1, 2 }));
    expF0.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 11, 12 }));
    expF0.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 21, 22 }));
    expF0.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 31, 32 }));
    INDArray expF1 = Nd4j.create(1, 2, 4);
    expF1.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 101, 102 }));
    expF1.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 111, 112 }));
    expF1.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 121, 122 }));
    expF1.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 131, 132 }));
    INDArray expF2 = Nd4j.create(1, 2, 4);
    expF2.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 201, 202 }));
    expF2.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 211, 212 }));
    expF2.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 221, 222 }));
    expF2.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 231, 232 }));
    INDArray[] expF = new INDArray[] { expF0, expF1, expF2 };
    //Expected out for classification:
    INDArray expOut0 = Nd4j.create(1, 3, 4);
    expOut0.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 1, 0, 0 }));
    expOut0.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 0, 1, 0 }));
    expOut0.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 0, 0, 1 }));
    expOut0.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 1, 0, 0 }));
    INDArray expOut1 = Nd4j.create(1, 3, 4);
    expOut1.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 0, 1, 0 }));
    expOut1.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 0, 0, 1 }));
    expOut1.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 1, 0, 0 }));
    expOut1.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 0, 0, 1 }));
    INDArray expOut2 = Nd4j.create(1, 3, 4);
    expOut2.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 0, 1, 0 }));
    expOut2.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 1, 0, 0 }));
    expOut2.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 0, 1, 0 }));
    expOut2.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 0, 0, 1 }));
    INDArray[] expOutClassification = new INDArray[] { expOut0, expOut1, expOut2 };
    //Expected out for regression:
    INDArray expOutR0 = Nd4j.create(1, 1, 4);
    expOutR0.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 0 }));
    expOutR0.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 1 }));
    expOutR0.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 2 }));
    expOutR0.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 0 }));
    INDArray expOutR1 = Nd4j.create(1, 1, 4);
    expOutR1.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 1 }));
    expOutR1.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 2 }));
    expOutR1.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 0 }));
    expOutR1.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 2 }));
    INDArray expOutR2 = Nd4j.create(1, 1, 4);
    expOutR2.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 1 }));
    expOutR2.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 0 }));
    expOutR2.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 1 }));
    expOutR2.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 2 }));
    INDArray[] expOutRegression = new INDArray[] { expOutR0, expOutR1, expOutR2 };
    int countC = 0;
    while (iteratorClassification.hasNext()) {
        DataSet ds = iteratorClassification.next();
        INDArray f = ds.getFeatures();
        INDArray l = ds.getLabels();
        assertNull(ds.getFeaturesMaskArray());
        assertNull(ds.getLabelsMaskArray());
        assertArrayEquals(new int[] { 1, 2, 4 }, f.shape());
        //One-hot representation
        assertArrayEquals(new int[] { 1, 3, 4 }, l.shape());
        assertEquals(expF[countC], f);
        assertEquals(expOutClassification[countC++], l);
    }
    assertEquals(3, countC);
    assertEquals(3, iteratorClassification.totalOutcomes());
    int countF = 0;
    while (iteratorRegression.hasNext()) {
        DataSet ds = iteratorRegression.next();
        INDArray f = ds.getFeatures();
        INDArray l = ds.getLabels();
        assertNull(ds.getFeaturesMaskArray());
        assertNull(ds.getLabelsMaskArray());
        assertArrayEquals(new int[] { 1, 2, 4 }, f.shape());
        //Regression (single output)
        assertArrayEquals(new int[] { 1, 1, 4 }, l.shape());
        assertEquals(expF[countF], f);
        assertEquals(expOutRegression[countF++], l);
    }
    assertEquals(3, countF);
    assertEquals(1, iteratorRegression.totalOutcomes());
}
Also used : CSVSequenceRecordReader(org.datavec.api.records.reader.impl.csv.CSVSequenceRecordReader) SequenceRecordReader(org.datavec.api.records.reader.SequenceRecordReader) CollectionSequenceRecordReader(org.datavec.api.records.reader.impl.collection.CollectionSequenceRecordReader) CSVSequenceRecordReader(org.datavec.api.records.reader.impl.csv.CSVSequenceRecordReader) INDArray(org.nd4j.linalg.api.ndarray.INDArray) DataSet(org.nd4j.linalg.dataset.DataSet) ClassPathResource(org.nd4j.linalg.io.ClassPathResource) NumberedFileInputSplit(org.datavec.api.split.NumberedFileInputSplit) Test(org.junit.Test)

Example 15 with CSVSequenceRecordReader

use of org.datavec.api.records.reader.impl.csv.CSVSequenceRecordReader in project deeplearning4j by deeplearning4j.

the class RecordReaderDataSetiteratorTest method testSequenceRecordReaderSingleReaderWithEmptySequenceThrows.

@Test(expected = ZeroLengthSequenceException.class)
public void testSequenceRecordReaderSingleReaderWithEmptySequenceThrows() throws Exception {
    SequenceRecordReader reader = new CSVSequenceRecordReader(1, ",");
    reader.initialize(new FileSplit(new ClassPathResource("empty.txt").getTempFileFromArchive()));
    new SequenceRecordReaderDataSetIterator(reader, 1, -1, 1, true).next();
}
Also used : CSVSequenceRecordReader(org.datavec.api.records.reader.impl.csv.CSVSequenceRecordReader) SequenceRecordReader(org.datavec.api.records.reader.SequenceRecordReader) CollectionSequenceRecordReader(org.datavec.api.records.reader.impl.collection.CollectionSequenceRecordReader) CSVSequenceRecordReader(org.datavec.api.records.reader.impl.csv.CSVSequenceRecordReader) FileSplit(org.datavec.api.split.FileSplit) ClassPathResource(org.nd4j.linalg.io.ClassPathResource) Test(org.junit.Test)

Aggregations

SequenceRecordReader (org.datavec.api.records.reader.SequenceRecordReader)18 CSVSequenceRecordReader (org.datavec.api.records.reader.impl.csv.CSVSequenceRecordReader)18 Test (org.junit.Test)18 NumberedFileInputSplit (org.datavec.api.split.NumberedFileInputSplit)15 ClassPathResource (org.nd4j.linalg.io.ClassPathResource)15 DataSet (org.nd4j.linalg.dataset.DataSet)13 CollectionSequenceRecordReader (org.datavec.api.records.reader.impl.collection.CollectionSequenceRecordReader)10 INDArray (org.nd4j.linalg.api.ndarray.INDArray)9 FileSplit (org.datavec.api.split.FileSplit)5 MultiDataSet (org.nd4j.linalg.dataset.api.MultiDataSet)5 RecordMetaData (org.datavec.api.records.metadata.RecordMetaData)4 File (java.io.File)3 ArrayList (java.util.ArrayList)3 List (java.util.List)3 SequenceRecordReaderDataSetIterator (org.deeplearning4j.datasets.datavec.SequenceRecordReaderDataSetIterator)3 BaseSparkTest (org.deeplearning4j.spark.BaseSparkTest)3 Path (java.nio.file.Path)2 Text (org.apache.hadoop.io.Text)2 JavaSparkContext (org.apache.spark.api.java.JavaSparkContext)2 MultiDataSetIterator (org.nd4j.linalg.dataset.api.iterator.MultiDataSetIterator)2