use of org.apache.hadoop.io.Writable in project SQLWindowing by hbutani.
the class ByteBasedListTest method test1.
@Test
public void test1() throws Exception {
for (String r : table) {
bl.append(new Text(r));
}
Class<? extends Writable> cls = sd.getSerializedClass();
Writable wObj = cls.newInstance();
Iterator<Object> it = bl.iterator(sd, wObj);
while (it.hasNext()) {
sd.serialize(it.next(), sd.getObjectInspector());
}
for (int i = 0; i < table.length; i++) {
Object o = sd.serialize(bl.get(i, sd, wObj), sd.getObjectInspector());
Assert.assertEquals(table[i], o.toString());
}
}
use of org.apache.hadoop.io.Writable in project SQLWindowing by hbutani.
the class PartitionedByteBasedListTest method test2.
@Test
public void test2() throws Exception {
for (String r : ByteBasedListTest.table) {
bl.append(new Text(r));
}
Class<? extends Writable> cls = sd.getSerializedClass();
Writable wObj = cls.newInstance();
for (int i = 0; i < ByteBasedListTest.table.length; i++) {
Object o = bl.get(i, sd, wObj);
bl.append(o, sd.getObjectInspector(), sd);
}
for (int i = ByteBasedListTest.table.length; i < 2 * ByteBasedListTest.table.length; i++) {
Object o = sd.serialize(bl.get(i, sd, wObj), sd.getObjectInspector());
Assert.assertEquals(ByteBasedListTest.table[i - ByteBasedListTest.table.length], o.toString());
}
}
use of org.apache.hadoop.io.Writable in project SQLWindowing by hbutani.
the class PersistentByteBasedListTest method test.
@Test
public void test() throws Exception {
for (String r : ByteBasedListTest.table) {
bl.append(new Text(r));
}
File f = File.createTempFile("wdw", null);
f.deleteOnExit();
PersistentByteBasedList.store(bl, f);
bl = null;
PersistentByteBasedList pl = new PersistentByteBasedList(f);
Class<? extends Writable> cls = sd.getSerializedClass();
Writable wObj = cls.newInstance();
Iterator<Object> it = pl.iterator(sd, wObj);
while (it.hasNext()) {
sd.serialize(it.next(), sd.getObjectInspector());
}
for (int i = 0; i < ByteBasedListTest.table.length; i++) {
Object o = sd.serialize(pl.get(i, sd, wObj), sd.getObjectInspector());
Assert.assertEquals(ByteBasedListTest.table[i], o.toString());
}
}
use of org.apache.hadoop.io.Writable in project SQLWindowing by hbutani.
the class PersistentByteBasedSortedMapTest method testKeyItr.
@Test
public void testKeyItr() throws Exception {
int i = 0;
for (i = table.length - 1; i >= 0; i--) {
bm.put(new Text(table[i]), new VIntWritable(i));
}
File f = File.createTempFile("wdw", null);
f.deleteOnExit();
PersistentByteBasedSortedMap.store(bm, f);
bm = null;
PersistentByteBasedSortedMap pm = new PersistentByteBasedSortedMap(f, comparator);
Assert.assertEquals(table.length, pm.size());
Iterator<Writable> it = pm.keyIterator(wObj);
i = 0;
while (it.hasNext()) {
Writable we = it.next();
Assert.assertEquals(table[i], we.toString());
pm.getValue(we, value);
Assert.assertEquals(value.get(), i);
i++;
}
}
use of org.apache.hadoop.io.Writable in project asterixdb by apache.
the class RecordColumnarIndexer method reset.
@Override
public void reset(IIndexingDatasource reader) throws HyracksDataException {
try {
//TODO: Make this more generic. right now, it works because we only index hdfs files.
@SuppressWarnings("unchecked") HDFSRecordReader<?, Writable> hdfsReader = (HDFSRecordReader<?, Writable>) reader;
fileNumber.setValue(hdfsReader.getSnapshot().get(hdfsReader.getCurrentSplitIndex()).getFileNumber());
recordReader = hdfsReader.getReader();
offset.setValue(recordReader.getPos());
nextOffset = offset.getLongValue();
rowNumber.setValue(0);
} catch (IOException e) {
throw new HyracksDataException(e);
}
}
Aggregations