Search in sources :

Example 11 with ArrayReader

use of org.apache.drill.exec.vector.accessor.ArrayReader in project drill by apache.

the class AbstractArrayWriter method copy.

@Override
public void copy(ColumnReader from) {
    ArrayReader source = (ArrayReader) from;
    // Inefficient initial implementation
    ObjectReader entryReader = source.entry();
    while (source.next()) {
        elementObjWriter.writer().copy(entryReader.reader());
        save();
    }
}
Also used : ArrayReader(org.apache.drill.exec.vector.accessor.ArrayReader) ObjectReader(org.apache.drill.exec.vector.accessor.ObjectReader)

Example 12 with ArrayReader

use of org.apache.drill.exec.vector.accessor.ArrayReader in project drill by apache.

the class TestVariantAccessors method testUnionWithList.

/**
 * Test a variant (AKA "union vector") at the top level which includes
 * a list.
 */
@Test
public void testUnionWithList() {
    final TupleMetadata schema = new SchemaBuilder().addUnion("u").addType(MinorType.INT).addList().addType(MinorType.VARCHAR).resumeUnion().resumeSchema().buildSchema();
    SingleRowSet result;
    // Write values
    {
        final ExtendableRowSet rs = fixture.rowSet(schema);
        final RowSetWriter writer = rs.writer();
        final VariantWriter vw = writer.variant("u");
        assertTrue(vw.hasType(MinorType.INT));
        final ScalarWriter intWriter = vw.scalar(MinorType.INT);
        assertTrue(vw.hasType(MinorType.LIST));
        final ArrayWriter aWriter = vw.array();
        final ScalarWriter strWriter = aWriter.scalar();
        // Row 1: 1, ["fred", "barney"]
        intWriter.setInt(1);
        strWriter.setString("fred");
        aWriter.save();
        strWriter.setString("barney");
        aWriter.save();
        writer.save();
        // Row 2, 2, ["wilma", "betty"]
        intWriter.setInt(2);
        strWriter.setString("wilma");
        aWriter.save();
        strWriter.setString("betty");
        aWriter.save();
        writer.save();
        result = writer.done();
        assertEquals(2, result.rowCount());
    }
    // Read the values.
    {
        final RowSetReader reader = result.reader();
        final VariantReader vr = reader.variant("u");
        assertTrue(vr.hasType(MinorType.INT));
        final ScalarReader intReader = vr.scalar(MinorType.INT);
        assertTrue(vr.hasType(MinorType.LIST));
        final ArrayReader aReader = vr.array();
        final ScalarReader strReader = aReader.scalar();
        assertTrue(reader.next());
        assertEquals(1, intReader.getInt());
        assertEquals(2, aReader.size());
        assertTrue(aReader.next());
        assertEquals("fred", strReader.getString());
        assertTrue(aReader.next());
        assertEquals("barney", strReader.getString());
        assertFalse(aReader.next());
        assertTrue(reader.next());
        assertEquals(2, intReader.getInt());
        assertEquals(2, aReader.size());
        assertTrue(aReader.next());
        assertEquals("wilma", strReader.getString());
        assertTrue(aReader.next());
        assertEquals("betty", strReader.getString());
        assertFalse(aReader.next());
        assertFalse(reader.next());
    }
    result.clear();
}
Also used : ScalarReader(org.apache.drill.exec.vector.accessor.ScalarReader) ArrayReader(org.apache.drill.exec.vector.accessor.ArrayReader) SingleRowSet(org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet) VariantWriter(org.apache.drill.exec.vector.accessor.VariantWriter) TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) ArrayWriter(org.apache.drill.exec.vector.accessor.ArrayWriter) ScalarWriter(org.apache.drill.exec.vector.accessor.ScalarWriter) ExtendableRowSet(org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet) VariantReader(org.apache.drill.exec.vector.accessor.VariantReader) SubOperatorTest(org.apache.drill.test.SubOperatorTest) Test(org.junit.Test)

Example 13 with ArrayReader

use of org.apache.drill.exec.vector.accessor.ArrayReader in project drill by apache.

the class TestVariantAccessors method testScalarList.

/**
 * Test a scalar list. Should act just like a repeated type, with the
 * addition of allowing the list for a row to be null. But, a list
 * writer does not do auto-increment, so we must do that explicitly
 * after each write.
 */
@Test
public void testScalarList() {
    final TupleMetadata schema = new SchemaBuilder().addList("list").addType(MinorType.VARCHAR).resumeSchema().buildSchema();
    final ExtendableRowSet rowSet = fixture.rowSet(schema);
    final RowSetWriter writer = rowSet.writer();
    {
        final ObjectWriter listObj = writer.column(0);
        assertEquals(ObjectType.ARRAY, listObj.type());
        final ArrayWriter listArray = listObj.array();
        // The list contains only a scalar. But, because lists can,
        // in general, contain multiple contents, the list requires
        // an explicit save after each entry.
        final ObjectWriter itemObj = listArray.entry();
        assertEquals(ObjectType.SCALAR, itemObj.type());
        final ScalarWriter strWriter = itemObj.scalar();
        // First row: two strings and a null
        // Unlike a repeated type, a list can mark individual elements
        // as null.
        // List will automatically detect that data was written.
        strWriter.setString("fred");
        listArray.save();
        strWriter.setNull();
        listArray.save();
        strWriter.setString("wilma");
        listArray.save();
        writer.save();
        // Second row: null
        writer.save();
        // Third row: one string
        strWriter.setString("dino");
        listArray.save();
        writer.save();
        // Fourth row: empty array. Note that there is no trigger
        // to say that the column is not null, so we have to do it
        // explicitly.
        listArray.setNull(false);
        writer.save();
        // Last row: a null string and non-null
        strWriter.setNull();
        listArray.save();
        strWriter.setString("pebbles");
        listArray.save();
        writer.save();
    }
    final SingleRowSet result = writer.done();
    assertEquals(5, result.rowCount());
    {
        final RowSetReader reader = result.reader();
        final ObjectReader listObj = reader.column(0);
        assertEquals(ObjectType.ARRAY, listObj.type());
        final ArrayReader listArray = listObj.array();
        // The list is a repeated scalar
        assertEquals(ObjectType.SCALAR, listArray.entry().type());
        final ScalarReader strReader = listArray.scalar();
        // First row: two strings and a null
        assertTrue(reader.next());
        assertFalse(listArray.isNull());
        assertEquals(3, listArray.size());
        assertTrue(listArray.next());
        assertFalse(strReader.isNull());
        assertEquals("fred", strReader.getString());
        assertTrue(listArray.next());
        assertTrue(strReader.isNull());
        assertTrue(listArray.next());
        assertFalse(strReader.isNull());
        assertEquals("wilma", strReader.getString());
        assertFalse(listArray.next());
        // Second row: null
        assertTrue(reader.next());
        assertTrue(listArray.isNull());
        assertEquals(0, listArray.size());
        // Third row: one string
        assertTrue(reader.next());
        assertFalse(listArray.isNull());
        assertEquals(1, listArray.size());
        assertTrue(listArray.next());
        assertEquals("dino", strReader.getString());
        assertFalse(listArray.next());
        // Fourth row: empty array.
        assertTrue(reader.next());
        assertFalse(listArray.isNull());
        assertEquals(0, listArray.size());
        assertFalse(listArray.next());
        // Last row: a null string and non-null
        assertTrue(reader.next());
        assertFalse(listArray.isNull());
        assertEquals(2, listArray.size());
        assertTrue(listArray.next());
        assertTrue(strReader.isNull());
        assertTrue(listArray.next());
        assertFalse(strReader.isNull());
        assertEquals("pebbles", strReader.getString());
        assertFalse(listArray.next());
        assertFalse(reader.next());
    }
    result.clear();
}
Also used : ScalarReader(org.apache.drill.exec.vector.accessor.ScalarReader) ArrayReader(org.apache.drill.exec.vector.accessor.ArrayReader) SingleRowSet(org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet) TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) ObjectWriter(org.apache.drill.exec.vector.accessor.ObjectWriter) ObjectReader(org.apache.drill.exec.vector.accessor.ObjectReader) ArrayWriter(org.apache.drill.exec.vector.accessor.ArrayWriter) ScalarWriter(org.apache.drill.exec.vector.accessor.ScalarWriter) ExtendableRowSet(org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet) SubOperatorTest(org.apache.drill.test.SubOperatorTest) Test(org.junit.Test)

Example 14 with ArrayReader

use of org.apache.drill.exec.vector.accessor.ArrayReader in project drill by apache.

the class TestIndirectReaders method testArray.

/**
 * More complex case with two levels of offset vector (one for the
 * array, another for the Varchar values.) Only the top level goes
 * through the indirection.
 */
@Test
public void testArray() {
    TupleMetadata schema = new SchemaBuilder().addArray("a", MinorType.VARCHAR).buildSchema();
    ExtendableRowSet rowSet = fixture.rowSet(schema);
    RowSetWriter writer = rowSet.writer();
    ArrayWriter aWriter = writer.array(0);
    ScalarWriter strWriter = aWriter.scalar();
    for (int i = 0; i < 10; i++) {
        for (int j = 0; j < 5; j++) {
            strWriter.setString("value" + i + "." + j);
        }
        writer.save();
    }
    SingleRowSet result = writer.done().toIndirect();
    SelectionVector2 sv2 = result.getSv2();
    for (int i = 0; i < 10; i++) {
        sv2.setIndex(i, 9 - i);
    }
    RowSetReader reader = result.reader();
    ArrayReader aReader = reader.array(0);
    ScalarReader strReader = aReader.scalar();
    for (int i = 9; i >= 0; i--) {
        assertTrue(reader.next());
        for (int j = 0; j < 5; j++) {
            assertTrue(aReader.next());
            assertEquals("value" + i + "." + j, strReader.getString());
        }
    }
    result.clear();
}
Also used : ScalarReader(org.apache.drill.exec.vector.accessor.ScalarReader) ArrayReader(org.apache.drill.exec.vector.accessor.ArrayReader) SingleRowSet(org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet) TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) SelectionVector2(org.apache.drill.exec.record.selection.SelectionVector2) ArrayWriter(org.apache.drill.exec.vector.accessor.ArrayWriter) ScalarWriter(org.apache.drill.exec.vector.accessor.ScalarWriter) ExtendableRowSet(org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet) SubOperatorTest(org.apache.drill.test.SubOperatorTest) Test(org.junit.Test)

Example 15 with ArrayReader

use of org.apache.drill.exec.vector.accessor.ArrayReader in project drill by apache.

the class TestRowSet method example.

/**
 * The code below is not a test. Rather, it is a simple example of
 * how to write a batch of data using writers, then read it using
 * readers.
 */
@Test
public void example() {
    // Step 1: Define a schema. In a real app, this
    // will be provided by a reader, by an incoming batch,
    // etc.
    final TupleMetadata schema = new SchemaBuilder().add("a", MinorType.VARCHAR).addArray("b", MinorType.INT).addMap("c").add("c1", MinorType.INT).add("c2", MinorType.VARCHAR).resumeSchema().buildSchema();
    // Step 2: Create a batch. Done here because this is
    // a batch-oriented test. Done automatically in the
    // result set loader.
    final DirectRowSet drs = DirectRowSet.fromSchema(fixture.allocator(), schema);
    // Step 3: Create the writer.
    final RowSetWriter writer = drs.writer();
    // Step 4: Populate data. Here we do it the way an app would:
    // using the individual accessors. See tests above for the many
    // ways this can be done depending on the need of the app.
    // 
    // Write two rows:
    // ("fred", [10, 11], {12, "wilma"})
    // ("barney", [20, 21], {22, "betty"})
    // 
    // This example uses Java strings for Varchar. Real code might
    // use byte arrays.
    writer.scalar("a").setString("fred");
    final ArrayWriter bWriter = writer.array("b");
    bWriter.scalar().setInt(10);
    bWriter.scalar().setInt(11);
    final TupleWriter cWriter = writer.tuple("c");
    cWriter.scalar("c1").setInt(12);
    cWriter.scalar("c2").setString("wilma");
    writer.save();
    writer.scalar("a").setString("barney");
    bWriter.scalar().setInt(20);
    bWriter.scalar().setInt(21);
    cWriter.scalar("c1").setInt(22);
    cWriter.scalar("c2").setString("betty");
    writer.save();
    // Step 5: "Harvest" the batch. Done differently in the
    // result set loader.
    final SingleRowSet rowSet = writer.done();
    // Step 5: Create a reader.
    final RowSetReader reader = rowSet.reader();
    while (reader.next()) {
        final StringBuilder sb = new StringBuilder();
        sb.append(print(reader.scalar("a").getString()));
        final ArrayReader bReader = reader.array("b");
        while (bReader.next()) {
            sb.append(print(bReader.scalar().getInt()));
        }
        final TupleReader cReader = reader.tuple("c");
        sb.append(print(cReader.scalar("c1").getInt()));
        sb.append(print(cReader.scalar("c2").getString()));
        logger.debug(sb.toString());
    }
    // Step 7: Free memory.
    rowSet.clear();
}
Also used : ArrayReader(org.apache.drill.exec.vector.accessor.ArrayReader) TupleReader(org.apache.drill.exec.vector.accessor.TupleReader) SingleRowSet(org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet) TupleWriter(org.apache.drill.exec.vector.accessor.TupleWriter) TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) ArrayWriter(org.apache.drill.exec.vector.accessor.ArrayWriter) SubOperatorTest(org.apache.drill.test.SubOperatorTest) Test(org.junit.Test)

Aggregations

ArrayReader (org.apache.drill.exec.vector.accessor.ArrayReader)40 TupleMetadata (org.apache.drill.exec.record.metadata.TupleMetadata)36 Test (org.junit.Test)33 SchemaBuilder (org.apache.drill.exec.record.metadata.SchemaBuilder)31 SubOperatorTest (org.apache.drill.test.SubOperatorTest)30 ScalarReader (org.apache.drill.exec.vector.accessor.ScalarReader)29 SingleRowSet (org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet)27 ScalarWriter (org.apache.drill.exec.vector.accessor.ScalarWriter)22 ArrayWriter (org.apache.drill.exec.vector.accessor.ArrayWriter)19 TupleReader (org.apache.drill.exec.vector.accessor.TupleReader)12 TupleWriter (org.apache.drill.exec.vector.accessor.TupleWriter)12 ExtendableRowSet (org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet)11 RowSet (org.apache.drill.exec.physical.rowSet.RowSet)9 RowSetReader (org.apache.drill.exec.physical.rowSet.RowSetReader)9 ResultSetLoader (org.apache.drill.exec.physical.resultSet.ResultSetLoader)7 RowSetLoader (org.apache.drill.exec.physical.resultSet.RowSetLoader)7 ObjectReader (org.apache.drill.exec.vector.accessor.ObjectReader)7 SingleRowSet (org.apache.drill.test.rowSet.RowSet.SingleRowSet)5 VectorContainer (org.apache.drill.exec.record.VectorContainer)4 ObjectWriter (org.apache.drill.exec.vector.accessor.ObjectWriter)4