use of org.datavec.api.records.reader.SequenceRecordReader in project deeplearning4j by deeplearning4j.
the class RecordReaderMultiDataSetIteratorTest method testVariableLengthTS.
@Test
public void testVariableLengthTS() throws Exception {
//need to manually extract
for (int i = 0; i < 3; i++) {
new ClassPathResource(String.format("csvsequence_%d.txt", i)).getTempFileFromArchive();
new ClassPathResource(String.format("csvsequencelabels_%d.txt", i)).getTempFileFromArchive();
new ClassPathResource(String.format("csvsequencelabelsShort_%d.txt", i)).getTempFileFromArchive();
}
//Set up SequenceRecordReaderDataSetIterators for comparison
ClassPathResource resource = new ClassPathResource("csvsequence_0.txt");
String featuresPath = resource.getTempFileFromArchive().getAbsolutePath().replaceAll("0", "%d");
resource = new ClassPathResource("csvsequencelabelsShort_0.txt");
String labelsPath = resource.getTempFileFromArchive().getAbsolutePath().replaceAll("0", "%d");
SequenceRecordReader featureReader = new CSVSequenceRecordReader(1, ",");
SequenceRecordReader labelReader = new CSVSequenceRecordReader(1, ",");
featureReader.initialize(new NumberedFileInputSplit(featuresPath, 0, 2));
labelReader.initialize(new NumberedFileInputSplit(labelsPath, 0, 2));
SequenceRecordReader featureReader2 = new CSVSequenceRecordReader(1, ",");
SequenceRecordReader labelReader2 = new CSVSequenceRecordReader(1, ",");
featureReader2.initialize(new NumberedFileInputSplit(featuresPath, 0, 2));
labelReader2.initialize(new NumberedFileInputSplit(labelsPath, 0, 2));
SequenceRecordReaderDataSetIterator iterAlignStart = new SequenceRecordReaderDataSetIterator(featureReader, labelReader, 1, 4, false, SequenceRecordReaderDataSetIterator.AlignmentMode.ALIGN_START);
SequenceRecordReaderDataSetIterator iterAlignEnd = new SequenceRecordReaderDataSetIterator(featureReader2, labelReader2, 1, 4, false, SequenceRecordReaderDataSetIterator.AlignmentMode.ALIGN_END);
//Set up
SequenceRecordReader featureReader3 = new CSVSequenceRecordReader(1, ",");
SequenceRecordReader labelReader3 = new CSVSequenceRecordReader(1, ",");
featureReader3.initialize(new NumberedFileInputSplit(featuresPath, 0, 2));
labelReader3.initialize(new NumberedFileInputSplit(labelsPath, 0, 2));
SequenceRecordReader featureReader4 = new CSVSequenceRecordReader(1, ",");
SequenceRecordReader labelReader4 = new CSVSequenceRecordReader(1, ",");
featureReader4.initialize(new NumberedFileInputSplit(featuresPath, 0, 2));
labelReader4.initialize(new NumberedFileInputSplit(labelsPath, 0, 2));
RecordReaderMultiDataSetIterator rrmdsiStart = new RecordReaderMultiDataSetIterator.Builder(1).addSequenceReader("in", featureReader3).addSequenceReader("out", labelReader3).addInput("in").addOutputOneHot("out", 0, 4).sequenceAlignmentMode(RecordReaderMultiDataSetIterator.AlignmentMode.ALIGN_START).build();
RecordReaderMultiDataSetIterator rrmdsiEnd = new RecordReaderMultiDataSetIterator.Builder(1).addSequenceReader("in", featureReader4).addSequenceReader("out", labelReader4).addInput("in").addOutputOneHot("out", 0, 4).sequenceAlignmentMode(RecordReaderMultiDataSetIterator.AlignmentMode.ALIGN_END).build();
while (iterAlignStart.hasNext()) {
DataSet dsStart = iterAlignStart.next();
DataSet dsEnd = iterAlignEnd.next();
MultiDataSet mdsStart = rrmdsiStart.next();
MultiDataSet mdsEnd = rrmdsiEnd.next();
assertEquals(1, mdsStart.getFeatures().length);
assertEquals(1, mdsStart.getLabels().length);
//assertEquals(1, mdsStart.getFeaturesMaskArrays().length); //Features data is always longer -> don't need mask arrays for it
assertEquals(1, mdsStart.getLabelsMaskArrays().length);
assertEquals(1, mdsEnd.getFeatures().length);
assertEquals(1, mdsEnd.getLabels().length);
//assertEquals(1, mdsEnd.getFeaturesMaskArrays().length);
assertEquals(1, mdsEnd.getLabelsMaskArrays().length);
assertEquals(dsStart.getFeatureMatrix(), mdsStart.getFeatures(0));
assertEquals(dsStart.getLabels(), mdsStart.getLabels(0));
assertEquals(dsStart.getLabelsMaskArray(), mdsStart.getLabelsMaskArray(0));
assertEquals(dsEnd.getFeatureMatrix(), mdsEnd.getFeatures(0));
assertEquals(dsEnd.getLabels(), mdsEnd.getLabels(0));
assertEquals(dsEnd.getLabelsMaskArray(), mdsEnd.getLabelsMaskArray(0));
}
assertFalse(rrmdsiStart.hasNext());
assertFalse(rrmdsiEnd.hasNext());
}
use of org.datavec.api.records.reader.SequenceRecordReader in project deeplearning4j by deeplearning4j.
the class RecordReaderDataSetiteratorTest method testSequenceRecordReader.
@Test
public void testSequenceRecordReader() throws Exception {
//need to manually extract
for (int i = 0; i < 3; i++) {
new ClassPathResource(String.format("csvsequence_%d.txt", i)).getTempFileFromArchive();
new ClassPathResource(String.format("csvsequencelabels_%d.txt", i)).getTempFileFromArchive();
}
ClassPathResource resource = new ClassPathResource("csvsequence_0.txt");
String featuresPath = resource.getTempFileFromArchive().getAbsolutePath().replaceAll("0", "%d");
resource = new ClassPathResource("csvsequencelabels_0.txt");
String labelsPath = resource.getTempFileFromArchive().getAbsolutePath().replaceAll("0", "%d");
SequenceRecordReader featureReader = new CSVSequenceRecordReader(1, ",");
SequenceRecordReader labelReader = new CSVSequenceRecordReader(1, ",");
featureReader.initialize(new NumberedFileInputSplit(featuresPath, 0, 2));
labelReader.initialize(new NumberedFileInputSplit(labelsPath, 0, 2));
SequenceRecordReaderDataSetIterator iter = new SequenceRecordReaderDataSetIterator(featureReader, labelReader, 1, 4, false);
assertEquals(3, iter.inputColumns());
assertEquals(4, iter.totalOutcomes());
List<DataSet> dsList = new ArrayList<>();
while (iter.hasNext()) {
dsList.add(iter.next());
}
//3 files
assertEquals(3, dsList.size());
for (int i = 0; i < 3; i++) {
DataSet ds = dsList.get(i);
INDArray features = ds.getFeatureMatrix();
INDArray labels = ds.getLabels();
//1 example in mini-batch
assertEquals(1, features.size(0));
assertEquals(1, labels.size(0));
//3 values per line/time step
assertEquals(3, features.size(1));
//1 value per line, but 4 possible values -> one-hot vector
assertEquals(4, labels.size(1));
//sequence length = 4
assertEquals(4, features.size(2));
assertEquals(4, labels.size(2));
}
//Check features vs. expected:
INDArray expF0 = Nd4j.create(1, 3, 4);
expF0.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 0, 1, 2 }));
expF0.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 10, 11, 12 }));
expF0.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 20, 21, 22 }));
expF0.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 30, 31, 32 }));
assertEquals(dsList.get(0).getFeatureMatrix(), expF0);
INDArray expF1 = Nd4j.create(1, 3, 4);
expF1.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 100, 101, 102 }));
expF1.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 110, 111, 112 }));
expF1.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 120, 121, 122 }));
expF1.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 130, 131, 132 }));
assertEquals(dsList.get(1).getFeatureMatrix(), expF1);
INDArray expF2 = Nd4j.create(1, 3, 4);
expF2.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 200, 201, 202 }));
expF2.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 210, 211, 212 }));
expF2.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 220, 221, 222 }));
expF2.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 230, 231, 232 }));
assertEquals(dsList.get(2).getFeatureMatrix(), expF2);
//Check labels vs. expected:
INDArray expL0 = Nd4j.create(1, 4, 4);
expL0.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 1, 0, 0, 0 }));
expL0.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 0, 1, 0, 0 }));
expL0.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 0, 0, 1, 0 }));
expL0.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 0, 0, 0, 1 }));
assertEquals(dsList.get(0).getLabels(), expL0);
INDArray expL1 = Nd4j.create(1, 4, 4);
expL1.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 0, 0, 0, 1 }));
expL1.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 0, 0, 1, 0 }));
expL1.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 0, 1, 0, 0 }));
expL1.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 1, 0, 0, 0 }));
assertEquals(dsList.get(1).getLabels(), expL1);
INDArray expL2 = Nd4j.create(1, 4, 4);
expL2.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 0, 1, 0, 0 }));
expL2.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 1, 0, 0, 0 }));
expL2.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 0, 0, 0, 1 }));
expL2.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 0, 0, 1, 0 }));
assertEquals(dsList.get(2).getLabels(), expL2);
}
use of org.datavec.api.records.reader.SequenceRecordReader in project deeplearning4j by deeplearning4j.
the class RecordReaderDataSetiteratorTest method testSeqRRDSIArrayWritableOneReaderRegression.
@Test
public void testSeqRRDSIArrayWritableOneReaderRegression() {
//Regression, where the output is an array writable
List<List<Writable>> sequence1 = new ArrayList<>();
sequence1.add(Arrays.asList((Writable) new NDArrayWritable(Nd4j.create(new double[] { 1, 2, 3 })), new NDArrayWritable(Nd4j.create(new double[] { 100, 200, 300 }))));
sequence1.add(Arrays.asList((Writable) new NDArrayWritable(Nd4j.create(new double[] { 4, 5, 6 })), new NDArrayWritable(Nd4j.create(new double[] { 400, 500, 600 }))));
List<List<Writable>> sequence2 = new ArrayList<>();
sequence2.add(Arrays.asList((Writable) new NDArrayWritable(Nd4j.create(new double[] { 7, 8, 9 })), new NDArrayWritable(Nd4j.create(new double[] { 700, 800, 900 }))));
sequence2.add(Arrays.asList((Writable) new NDArrayWritable(Nd4j.create(new double[] { 10, 11, 12 })), new NDArrayWritable(Nd4j.create(new double[] { 1000, 1100, 1200 }))));
SequenceRecordReader rr = new CollectionSequenceRecordReader(Arrays.asList(sequence1, sequence2));
SequenceRecordReaderDataSetIterator iter = new SequenceRecordReaderDataSetIterator(rr, 2, -1, 1, true);
DataSet ds = iter.next();
//2 examples, 3 values per time step, 2 time steps
INDArray expFeatures = Nd4j.create(2, 3, 2);
expFeatures.tensorAlongDimension(0, 1, 2).assign(Nd4j.create(new double[][] { { 1, 4 }, { 2, 5 }, { 3, 6 } }));
expFeatures.tensorAlongDimension(1, 1, 2).assign(Nd4j.create(new double[][] { { 7, 10 }, { 8, 11 }, { 9, 12 } }));
INDArray expLabels = Nd4j.create(2, 3, 2);
expLabels.tensorAlongDimension(0, 1, 2).assign(Nd4j.create(new double[][] { { 100, 400 }, { 200, 500 }, { 300, 600 } }));
expLabels.tensorAlongDimension(1, 1, 2).assign(Nd4j.create(new double[][] { { 700, 1000 }, { 800, 1100 }, { 900, 1200 } }));
assertEquals(expFeatures, ds.getFeatureMatrix());
assertEquals(expLabels, ds.getLabels());
}
use of org.datavec.api.records.reader.SequenceRecordReader in project deeplearning4j by deeplearning4j.
the class RecordReaderDataSetiteratorTest method testSequenceRecordReaderSingleReader.
@Test
public void testSequenceRecordReaderSingleReader() throws Exception {
//need to manually extract
for (int i = 0; i < 3; i++) {
new ClassPathResource(String.format("csvsequenceSingle_%d.txt", i)).getTempFileFromArchive();
}
ClassPathResource resource = new ClassPathResource("csvsequenceSingle_0.txt");
String path = resource.getTempFileFromArchive().getAbsolutePath().replaceAll("0", "%d");
SequenceRecordReader reader = new CSVSequenceRecordReader(1, ",");
reader.initialize(new NumberedFileInputSplit(path, 0, 2));
SequenceRecordReaderDataSetIterator iteratorClassification = new SequenceRecordReaderDataSetIterator(reader, 1, 3, 0, false);
SequenceRecordReader reader2 = new CSVSequenceRecordReader(1, ",");
reader2.initialize(new NumberedFileInputSplit(path, 0, 2));
SequenceRecordReaderDataSetIterator iteratorRegression = new SequenceRecordReaderDataSetIterator(reader2, 1, 3, 0, true);
INDArray expF0 = Nd4j.create(1, 2, 4);
expF0.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 1, 2 }));
expF0.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 11, 12 }));
expF0.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 21, 22 }));
expF0.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 31, 32 }));
INDArray expF1 = Nd4j.create(1, 2, 4);
expF1.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 101, 102 }));
expF1.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 111, 112 }));
expF1.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 121, 122 }));
expF1.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 131, 132 }));
INDArray expF2 = Nd4j.create(1, 2, 4);
expF2.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 201, 202 }));
expF2.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 211, 212 }));
expF2.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 221, 222 }));
expF2.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 231, 232 }));
INDArray[] expF = new INDArray[] { expF0, expF1, expF2 };
//Expected out for classification:
INDArray expOut0 = Nd4j.create(1, 3, 4);
expOut0.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 1, 0, 0 }));
expOut0.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 0, 1, 0 }));
expOut0.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 0, 0, 1 }));
expOut0.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 1, 0, 0 }));
INDArray expOut1 = Nd4j.create(1, 3, 4);
expOut1.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 0, 1, 0 }));
expOut1.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 0, 0, 1 }));
expOut1.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 1, 0, 0 }));
expOut1.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 0, 0, 1 }));
INDArray expOut2 = Nd4j.create(1, 3, 4);
expOut2.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 0, 1, 0 }));
expOut2.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 1, 0, 0 }));
expOut2.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 0, 1, 0 }));
expOut2.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 0, 0, 1 }));
INDArray[] expOutClassification = new INDArray[] { expOut0, expOut1, expOut2 };
//Expected out for regression:
INDArray expOutR0 = Nd4j.create(1, 1, 4);
expOutR0.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 0 }));
expOutR0.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 1 }));
expOutR0.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 2 }));
expOutR0.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 0 }));
INDArray expOutR1 = Nd4j.create(1, 1, 4);
expOutR1.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 1 }));
expOutR1.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 2 }));
expOutR1.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 0 }));
expOutR1.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 2 }));
INDArray expOutR2 = Nd4j.create(1, 1, 4);
expOutR2.tensorAlongDimension(0, 1).assign(Nd4j.create(new double[] { 1 }));
expOutR2.tensorAlongDimension(1, 1).assign(Nd4j.create(new double[] { 0 }));
expOutR2.tensorAlongDimension(2, 1).assign(Nd4j.create(new double[] { 1 }));
expOutR2.tensorAlongDimension(3, 1).assign(Nd4j.create(new double[] { 2 }));
INDArray[] expOutRegression = new INDArray[] { expOutR0, expOutR1, expOutR2 };
int countC = 0;
while (iteratorClassification.hasNext()) {
DataSet ds = iteratorClassification.next();
INDArray f = ds.getFeatures();
INDArray l = ds.getLabels();
assertNull(ds.getFeaturesMaskArray());
assertNull(ds.getLabelsMaskArray());
assertArrayEquals(new int[] { 1, 2, 4 }, f.shape());
//One-hot representation
assertArrayEquals(new int[] { 1, 3, 4 }, l.shape());
assertEquals(expF[countC], f);
assertEquals(expOutClassification[countC++], l);
}
assertEquals(3, countC);
assertEquals(3, iteratorClassification.totalOutcomes());
int countF = 0;
while (iteratorRegression.hasNext()) {
DataSet ds = iteratorRegression.next();
INDArray f = ds.getFeatures();
INDArray l = ds.getLabels();
assertNull(ds.getFeaturesMaskArray());
assertNull(ds.getLabelsMaskArray());
assertArrayEquals(new int[] { 1, 2, 4 }, f.shape());
//Regression (single output)
assertArrayEquals(new int[] { 1, 1, 4 }, l.shape());
assertEquals(expF[countF], f);
assertEquals(expOutRegression[countF++], l);
}
assertEquals(3, countF);
assertEquals(1, iteratorRegression.totalOutcomes());
}
use of org.datavec.api.records.reader.SequenceRecordReader in project deeplearning4j by deeplearning4j.
the class RecordReaderDataSetiteratorTest method testSequenceRecordReaderSingleReaderWithEmptySequenceThrows.
@Test(expected = ZeroLengthSequenceException.class)
public void testSequenceRecordReaderSingleReaderWithEmptySequenceThrows() throws Exception {
SequenceRecordReader reader = new CSVSequenceRecordReader(1, ",");
reader.initialize(new FileSplit(new ClassPathResource("empty.txt").getTempFileFromArchive()));
new SequenceRecordReaderDataSetIterator(reader, 1, -1, 1, true).next();
}
Aggregations