Search in sources :

Example 91 with Writable

use of org.apache.hadoop.io.Writable in project SQLWindowing by hbutani.

the class ByteBasedListTest method test1.

@Test
public void test1() throws Exception {
    for (String r : table) {
        bl.append(new Text(r));
    }
    Class<? extends Writable> cls = sd.getSerializedClass();
    Writable wObj = cls.newInstance();
    Iterator<Object> it = bl.iterator(sd, wObj);
    while (it.hasNext()) {
        sd.serialize(it.next(), sd.getObjectInspector());
    }
    for (int i = 0; i < table.length; i++) {
        Object o = sd.serialize(bl.get(i, sd, wObj), sd.getObjectInspector());
        Assert.assertEquals(table[i], o.toString());
    }
}
Also used : Writable(org.apache.hadoop.io.Writable) Text(org.apache.hadoop.io.Text) Test(org.junit.Test)

Example 92 with Writable

use of org.apache.hadoop.io.Writable in project SQLWindowing by hbutani.

the class PartitionedByteBasedListTest method test2.

@Test
public void test2() throws Exception {
    for (String r : ByteBasedListTest.table) {
        bl.append(new Text(r));
    }
    Class<? extends Writable> cls = sd.getSerializedClass();
    Writable wObj = cls.newInstance();
    for (int i = 0; i < ByteBasedListTest.table.length; i++) {
        Object o = bl.get(i, sd, wObj);
        bl.append(o, sd.getObjectInspector(), sd);
    }
    for (int i = ByteBasedListTest.table.length; i < 2 * ByteBasedListTest.table.length; i++) {
        Object o = sd.serialize(bl.get(i, sd, wObj), sd.getObjectInspector());
        Assert.assertEquals(ByteBasedListTest.table[i - ByteBasedListTest.table.length], o.toString());
    }
}
Also used : Writable(org.apache.hadoop.io.Writable) Text(org.apache.hadoop.io.Text) Test(org.junit.Test)

Example 93 with Writable

use of org.apache.hadoop.io.Writable in project SQLWindowing by hbutani.

the class PersistentByteBasedListTest method test.

@Test
public void test() throws Exception {
    for (String r : ByteBasedListTest.table) {
        bl.append(new Text(r));
    }
    File f = File.createTempFile("wdw", null);
    f.deleteOnExit();
    PersistentByteBasedList.store(bl, f);
    bl = null;
    PersistentByteBasedList pl = new PersistentByteBasedList(f);
    Class<? extends Writable> cls = sd.getSerializedClass();
    Writable wObj = cls.newInstance();
    Iterator<Object> it = pl.iterator(sd, wObj);
    while (it.hasNext()) {
        sd.serialize(it.next(), sd.getObjectInspector());
    }
    for (int i = 0; i < ByteBasedListTest.table.length; i++) {
        Object o = sd.serialize(pl.get(i, sd, wObj), sd.getObjectInspector());
        Assert.assertEquals(ByteBasedListTest.table[i], o.toString());
    }
}
Also used : Writable(org.apache.hadoop.io.Writable) Text(org.apache.hadoop.io.Text) File(java.io.File) Test(org.junit.Test)

Example 94 with Writable

use of org.apache.hadoop.io.Writable in project SQLWindowing by hbutani.

the class PersistentByteBasedSortedMapTest method testKeyItr.

@Test
public void testKeyItr() throws Exception {
    int i = 0;
    for (i = table.length - 1; i >= 0; i--) {
        bm.put(new Text(table[i]), new VIntWritable(i));
    }
    File f = File.createTempFile("wdw", null);
    f.deleteOnExit();
    PersistentByteBasedSortedMap.store(bm, f);
    bm = null;
    PersistentByteBasedSortedMap pm = new PersistentByteBasedSortedMap(f, comparator);
    Assert.assertEquals(table.length, pm.size());
    Iterator<Writable> it = pm.keyIterator(wObj);
    i = 0;
    while (it.hasNext()) {
        Writable we = it.next();
        Assert.assertEquals(table[i], we.toString());
        pm.getValue(we, value);
        Assert.assertEquals(value.get(), i);
        i++;
    }
}
Also used : VIntWritable(org.apache.hadoop.io.VIntWritable) Writable(org.apache.hadoop.io.Writable) VIntWritable(org.apache.hadoop.io.VIntWritable) Text(org.apache.hadoop.io.Text) File(java.io.File) Test(org.junit.Test) ByteBasedSortedMapTest(com.sap.hadoop.ds.sortedmap.ByteBasedSortedMapTest)

Example 95 with Writable

use of org.apache.hadoop.io.Writable in project asterixdb by apache.

the class RecordColumnarIndexer method reset.

@Override
public void reset(IIndexingDatasource reader) throws HyracksDataException {
    try {
        //TODO: Make this more generic. right now, it works because we only index hdfs files.
        @SuppressWarnings("unchecked") HDFSRecordReader<?, Writable> hdfsReader = (HDFSRecordReader<?, Writable>) reader;
        fileNumber.setValue(hdfsReader.getSnapshot().get(hdfsReader.getCurrentSplitIndex()).getFileNumber());
        recordReader = hdfsReader.getReader();
        offset.setValue(recordReader.getPos());
        nextOffset = offset.getLongValue();
        rowNumber.setValue(0);
    } catch (IOException e) {
        throw new HyracksDataException(e);
    }
}
Also used : Writable(org.apache.hadoop.io.Writable) IOException(java.io.IOException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) HDFSRecordReader(org.apache.asterix.external.input.record.reader.hdfs.HDFSRecordReader)

Aggregations

Writable (org.apache.hadoop.io.Writable)221 IntWritable (org.apache.hadoop.io.IntWritable)103 LongWritable (org.apache.hadoop.io.LongWritable)91 BooleanWritable (org.apache.hadoop.io.BooleanWritable)75 BytesWritable (org.apache.hadoop.io.BytesWritable)74 FloatWritable (org.apache.hadoop.io.FloatWritable)73 Test (org.junit.Test)68 IOException (java.io.IOException)43 Path (org.apache.hadoop.fs.Path)43 Text (org.apache.hadoop.io.Text)40 ArrayWritable (org.apache.hadoop.io.ArrayWritable)37 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)34 SequenceFile (org.apache.hadoop.io.SequenceFile)32 Configuration (org.apache.hadoop.conf.Configuration)31 DoubleWritable (org.apache.hadoop.io.DoubleWritable)30 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)29 ByteWritable (org.apache.hadoop.io.ByteWritable)28 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)25 FileSystem (org.apache.hadoop.fs.FileSystem)24 ArrayList (java.util.ArrayList)23