Search in sources :

Example 16 with ExtendableRowSet

use of org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet in project drill by apache.

the class TestRowSet method testDictStructure.

@Test
public void testDictStructure() {
    final String dictName = "d";
    final TupleMetadata schema = new SchemaBuilder().add("id", MinorType.INT).addDict(dictName, MinorType.INT).value(// required int
    MinorType.VARCHAR).resumeSchema().buildSchema();
    final ExtendableRowSet rowSet = fixture.rowSet(schema);
    final RowSetWriter writer = rowSet.writer();
    // Dict
    // Pick out components and lightly test. (Assumes structure
    // tested earlier is still valid, so no need to exhaustively
    // test again.)
    assertEquals(ObjectType.ARRAY, writer.column(dictName).type());
    assertTrue(writer.column(dictName).schema().isDict());
    final ScalarWriter idWriter = writer.column(0).scalar();
    final DictWriter dictWriter = writer.column(1).dict();
    assertEquals(ValueType.INTEGER, dictWriter.keyType());
    assertEquals(ObjectType.SCALAR, dictWriter.valueType());
    final ScalarWriter keyWriter = dictWriter.keyWriter();
    final ScalarWriter valueWriter = dictWriter.valueWriter().scalar();
    assertEquals(ValueType.INTEGER, keyWriter.valueType());
    assertEquals(ValueType.STRING, valueWriter.valueType());
    // Write data
    idWriter.setInt(1);
    keyWriter.setInt(11);
    valueWriter.setString("a");
    // Advance to next entry position
    dictWriter.save();
    keyWriter.setInt(12);
    valueWriter.setString("b");
    dictWriter.save();
    writer.save();
    idWriter.setInt(2);
    keyWriter.setInt(21);
    valueWriter.setString("c");
    dictWriter.save();
    writer.save();
    idWriter.setInt(3);
    keyWriter.setInt(31);
    valueWriter.setString("d");
    dictWriter.save();
    keyWriter.setInt(32);
    valueWriter.setString("e");
    dictWriter.save();
    writer.save();
    // Finish the row set and get a reader.
    final SingleRowSet actual = writer.done();
    final RowSetReader reader = actual.reader();
    // Verify reader structure
    assertEquals(ObjectType.ARRAY, reader.column(dictName).type());
    final DictReader dictReader = reader.dict(1);
    assertEquals(ObjectType.ARRAY, dictReader.type());
    assertEquals(ValueType.INTEGER, dictReader.keyColumnType());
    assertEquals(ObjectType.SCALAR, dictReader.valueColumnType());
    // Row 1: get value reader with its position set to entry corresponding to a key
    assertTrue(reader.next());
    // dict itself is not null
    assertFalse(dictReader.isNull());
    dictReader.getAsString();
    final KeyAccessor keyAccessor = dictReader.keyAccessor();
    final ScalarReader valueReader = dictReader.valueReader().scalar();
    assertTrue(keyAccessor.find(12));
    assertEquals("b", valueReader.getString());
    assertTrue(keyAccessor.find(11));
    assertEquals("a", valueReader.getString());
    // compare entire dict
    Map<Object, Object> map = map(11, "a", 12, "b");
    assertEquals(map, dictReader.getObject());
    // Row 2
    assertTrue(reader.next());
    // the dict does not contain an entry with the key
    assertFalse(keyAccessor.find(22));
    assertTrue(keyAccessor.find(21));
    assertEquals("c", valueReader.getString());
    map = map(21, "c");
    assertEquals(map, dictReader.getObject());
    // Row 3
    assertTrue(reader.next());
    assertTrue(keyAccessor.find(31));
    assertEquals("d", valueReader.getString());
    assertFalse(keyAccessor.find(33));
    assertTrue(keyAccessor.find(32));
    assertEquals("e", valueReader.getString());
    map = map(31, "d", 32, "e");
    assertEquals(map, dictReader.getObject());
    assertFalse(reader.next());
    // Verify that the dict accessor's value count was set.
    final DictVector dictVector = (DictVector) actual.container().getValueVector(1).getValueVector();
    assertEquals(3, dictVector.getAccessor().getValueCount());
    final SingleRowSet expected = fixture.rowSetBuilder(schema).addRow(1, map(11, "a", 12, "b")).addRow(2, map(21, "c")).addRow(3, map(31, "d", 32, "e")).build();
    RowSetUtilities.verify(expected, actual);
}
Also used : DictVector(org.apache.drill.exec.vector.complex.DictVector) RepeatedDictVector(org.apache.drill.exec.vector.complex.RepeatedDictVector) DictWriter(org.apache.drill.exec.vector.accessor.DictWriter) SingleRowSet(org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet) ScalarReader(org.apache.drill.exec.vector.accessor.ScalarReader) TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) KeyAccessor(org.apache.drill.exec.vector.accessor.KeyAccessor) DictReader(org.apache.drill.exec.vector.accessor.DictReader) ScalarWriter(org.apache.drill.exec.vector.accessor.ScalarWriter) ExtendableRowSet(org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet) SubOperatorTest(org.apache.drill.test.SubOperatorTest) Test(org.junit.Test)

Example 17 with ExtendableRowSet

use of org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet in project drill by apache.

the class TestRowSet method testRowBounds.

/**
 * Test filling a row set up to the maximum number of rows.
 * Values are small enough to prevent filling to the
 * maximum buffer size.
 */
@Test
public void testRowBounds() {
    final TupleMetadata schema = new SchemaBuilder().add("a", MinorType.INT).buildSchema();
    final ExtendableRowSet rs = fixture.rowSet(schema);
    final RowSetWriter writer = rs.writer();
    int count = 0;
    while (!writer.isFull()) {
        writer.scalar(0).setInt(count++);
        writer.save();
    }
    writer.done();
    assertEquals(ValueVector.MAX_ROW_COUNT, count);
    // The writer index points past the writable area.
    // But, this is fine, the valid() method says we can't
    // write at this location.
    assertEquals(ValueVector.MAX_ROW_COUNT, writer.rowIndex());
    assertEquals(ValueVector.MAX_ROW_COUNT, rs.rowCount());
    rs.clear();
}
Also used : TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) ExtendableRowSet(org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet) SubOperatorTest(org.apache.drill.test.SubOperatorTest) Test(org.junit.Test)

Example 18 with ExtendableRowSet

use of org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet in project drill by apache.

the class TestHyperVectorReaders method testRequired.

/**
 * Test the simplest case: a top-level required vector. Has no contained vectors.
 * This test focuses on the SV4 indirection mechanism itself.
 */
@Test
public void testRequired() {
    TupleMetadata schema = new SchemaBuilder().add("a", MinorType.INT).buildSchema();
    SingleRowSet rowSet1;
    {
        ExtendableRowSet rowSet = fixture.rowSet(schema);
        RowSetWriter writer = rowSet.writer();
        for (int i = 0; i < 10; i++) {
            writer.scalar(0).setInt(i * 10);
            writer.save();
        }
        rowSet1 = writer.done();
    }
    SingleRowSet rowSet2;
    {
        ExtendableRowSet rowSet = fixture.rowSet(schema);
        RowSetWriter writer = rowSet.writer();
        for (int i = 10; i < 20; i++) {
            writer.scalar(0).setInt(i * 10);
            writer.save();
        }
        rowSet2 = writer.done();
    }
    // Build the hyper batch
    // [0, 10, 20, ... 190]
    HyperRowSet hyperSet = HyperRowSetImpl.fromRowSets(fixture.allocator(), rowSet1, rowSet2);
    assertEquals(20, hyperSet.rowCount());
    // Populate the indirection vector:
    // (1, 9), (0, 9), (1, 8), (0, 8), ... (0, 0)
    SelectionVector4 sv4 = hyperSet.getSv4();
    for (int i = 0; i < 20; i++) {
        int batch = i % 2;
        int offset = 9 - i / 2;
        sv4.set(i, batch, offset);
    }
    // Sanity check.
    for (int i = 0; i < 20; i++) {
        int batch = i % 2;
        int offset = 9 - i / 2;
        int encoded = sv4.get(i);
        assertEquals(batch, SelectionVector4.getBatchIndex(encoded));
        assertEquals(offset, SelectionVector4.getRecordIndex(encoded));
    }
    // Verify reader
    // Expected: [190, 90, 180, 80, ... 0]
    RowSetReader reader = hyperSet.reader();
    for (int i = 0; i < 20; i++) {
        assertTrue(reader.next());
        int batch = i % 2;
        int offset = 9 - i / 2;
        int expected = batch * 100 + offset * 10;
        assertEquals(expected, reader.scalar(0).getInt());
    }
    assertFalse(reader.next());
    // Validate using an expected result set.
    RowSetBuilder rsBuilder = fixture.rowSetBuilder(schema);
    for (int i = 0; i < 20; i++) {
        int batch = i % 2;
        int offset = 9 - i / 2;
        int expected = batch * 100 + offset * 10;
        rsBuilder.addRow(expected);
    }
    RowSetUtilities.verify(rsBuilder.build(), hyperSet);
}
Also used : SingleRowSet(org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet) HyperRowSet(org.apache.drill.exec.physical.rowSet.RowSet.HyperRowSet) TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) ExtendableRowSet(org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet) SelectionVector4(org.apache.drill.exec.record.selection.SelectionVector4) SubOperatorTest(org.apache.drill.test.SubOperatorTest) Test(org.junit.Test)

Example 19 with ExtendableRowSet

use of org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet in project drill by apache.

the class TestRowSet method testTopFixedWidthArray.

/**
 * Test an array of ints (as an example fixed-width type)
 * at the top level of a schema.
 */
@Test
public void testTopFixedWidthArray() {
    final TupleMetadata schema = new SchemaBuilder().add("c", MinorType.INT).addArray("a", MinorType.INT).buildSchema();
    final ExtendableRowSet rs1 = fixture.rowSet(schema);
    final RowSetWriter writer = rs1.writer();
    writer.scalar(0).setInt(10);
    final ScalarWriter array = writer.array(1).scalar();
    array.setInt(100);
    array.setInt(110);
    writer.save();
    writer.scalar(0).setInt(20);
    array.setInt(200);
    array.setInt(120);
    array.setInt(220);
    writer.save();
    writer.scalar(0).setInt(30);
    writer.save();
    final SingleRowSet result = writer.done();
    final RowSetReader reader = result.reader();
    final ArrayReader arrayReader = reader.array(1);
    final ScalarReader elementReader = arrayReader.scalar();
    assertTrue(reader.next());
    assertEquals(10, reader.scalar(0).getInt());
    assertEquals(2, arrayReader.size());
    assertTrue(arrayReader.next());
    assertEquals(100, elementReader.getInt());
    assertTrue(arrayReader.next());
    assertEquals(110, elementReader.getInt());
    assertTrue(reader.next());
    assertEquals(20, reader.scalar(0).getInt());
    assertEquals(3, arrayReader.size());
    assertTrue(arrayReader.next());
    assertEquals(200, elementReader.getInt());
    assertTrue(arrayReader.next());
    assertEquals(120, elementReader.getInt());
    assertTrue(arrayReader.next());
    assertEquals(220, elementReader.getInt());
    assertTrue(reader.next());
    assertEquals(30, reader.scalar(0).getInt());
    assertEquals(0, arrayReader.size());
    assertFalse(reader.next());
    final SingleRowSet rs2 = fixture.rowSetBuilder(schema).addRow(10, intArray(100, 110)).addRow(20, intArray(200, 120, 220)).addRow(30, null).build();
    RowSetUtilities.verify(rs1, rs2);
}
Also used : ScalarReader(org.apache.drill.exec.vector.accessor.ScalarReader) ArrayReader(org.apache.drill.exec.vector.accessor.ArrayReader) SingleRowSet(org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet) TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) ScalarWriter(org.apache.drill.exec.vector.accessor.ScalarWriter) ExtendableRowSet(org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet) SubOperatorTest(org.apache.drill.test.SubOperatorTest) Test(org.junit.Test)

Example 20 with ExtendableRowSet

use of org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet in project drill by apache.

the class TestRowSet method testScalarArrayStructure.

/**
 * Test a record with a top level array. The focus here is on the
 * scalar array structure.
 *
 * @throws VectorOverflowException should never occur
 */
@Test
public void testScalarArrayStructure() {
    final TupleMetadata schema = new SchemaBuilder().addArray("a", MinorType.INT).buildSchema();
    final ExtendableRowSet rowSet = fixture.rowSet(schema);
    final RowSetWriter writer = rowSet.writer();
    // Repeated Int
    // Verify the invariants of the "full" and "simple" access paths
    assertEquals(ObjectType.ARRAY, writer.column("a").type());
    assertSame(writer.column("a"), writer.column(0));
    assertSame(writer.array("a"), writer.array(0));
    assertSame(writer.column("a").array(), writer.array("a"));
    assertSame(writer.column(0).array(), writer.array(0));
    assertEquals(ObjectType.SCALAR, writer.column("a").array().entry().type());
    assertEquals(ObjectType.SCALAR, writer.column("a").array().entryType());
    assertSame(writer.array(0).entry().scalar(), writer.array(0).scalar());
    assertEquals(ValueType.INTEGER, writer.array(0).scalar().valueType());
    try {
        writer.column(0).scalar();
        fail();
    } catch (final UnsupportedOperationException e) {
    // Expected
    }
    try {
        writer.column(0).tuple();
        fail();
    } catch (final UnsupportedOperationException e) {
    // Expected
    }
    // Write some data
    final ScalarWriter intWriter = writer.array("a").scalar();
    intWriter.setInt(10);
    intWriter.setInt(11);
    writer.save();
    intWriter.setInt(20);
    intWriter.setInt(21);
    intWriter.setInt(22);
    writer.save();
    intWriter.setInt(30);
    writer.save();
    intWriter.setInt(40);
    intWriter.setInt(41);
    writer.save();
    // Finish the row set and get a reader.
    final SingleRowSet actual = writer.done();
    final RowSetReader reader = actual.reader();
    // Verify the invariants of the "full" and "simple" access paths
    assertEquals(ObjectType.ARRAY, writer.column("a").type());
    assertSame(reader.column("a"), reader.column(0));
    assertSame(reader.array("a"), reader.array(0));
    assertSame(reader.column("a").array(), reader.array("a"));
    assertSame(reader.column(0).array(), reader.array(0));
    assertEquals(ObjectType.SCALAR, reader.column("a").array().entryType());
    assertEquals(ValueType.INTEGER, reader.array(0).scalar().valueType());
    // Read and verify the rows
    final ArrayReader arrayReader = reader.array(0);
    final ScalarReader intReader = arrayReader.scalar();
    assertTrue(reader.next());
    assertFalse(arrayReader.isNull());
    assertEquals(2, arrayReader.size());
    assertTrue(arrayReader.next());
    assertEquals(10, intReader.getInt());
    assertTrue(arrayReader.next());
    assertEquals(11, intReader.getInt());
    assertFalse(arrayReader.next());
    assertTrue(reader.next());
    assertFalse(arrayReader.isNull());
    assertEquals(3, arrayReader.size());
    assertTrue(arrayReader.next());
    assertEquals(20, intReader.getInt());
    assertTrue(arrayReader.next());
    assertEquals(21, intReader.getInt());
    assertTrue(arrayReader.next());
    assertEquals(22, intReader.getInt());
    assertFalse(arrayReader.next());
    assertTrue(reader.next());
    assertFalse(arrayReader.isNull());
    assertEquals(1, arrayReader.size());
    assertTrue(arrayReader.next());
    assertEquals(30, intReader.getInt());
    assertFalse(arrayReader.next());
    assertTrue(reader.next());
    assertFalse(arrayReader.isNull());
    assertEquals(2, arrayReader.size());
    assertTrue(arrayReader.next());
    assertEquals(40, intReader.getInt());
    assertTrue(arrayReader.next());
    assertEquals(41, intReader.getInt());
    assertFalse(arrayReader.next());
    assertFalse(reader.next());
    // Test the above again via the writer and reader
    // utility classes.
    final SingleRowSet expected = fixture.rowSetBuilder(schema).addSingleCol(intArray(10, 11)).addSingleCol(intArray(20, 21, 22)).addSingleCol(intArray(30)).addSingleCol(intArray(40, 41)).build();
    RowSetUtilities.verify(expected, actual);
}
Also used : ScalarReader(org.apache.drill.exec.vector.accessor.ScalarReader) ArrayReader(org.apache.drill.exec.vector.accessor.ArrayReader) SingleRowSet(org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet) TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) ScalarWriter(org.apache.drill.exec.vector.accessor.ScalarWriter) ExtendableRowSet(org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet) SubOperatorTest(org.apache.drill.test.SubOperatorTest) Test(org.junit.Test)

Aggregations

ExtendableRowSet (org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet)34 SchemaBuilder (org.apache.drill.exec.record.metadata.SchemaBuilder)31 TupleMetadata (org.apache.drill.exec.record.metadata.TupleMetadata)31 SubOperatorTest (org.apache.drill.test.SubOperatorTest)27 Test (org.junit.Test)27 SingleRowSet (org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet)24 ScalarReader (org.apache.drill.exec.vector.accessor.ScalarReader)19 ScalarWriter (org.apache.drill.exec.vector.accessor.ScalarWriter)19 ArrayReader (org.apache.drill.exec.vector.accessor.ArrayReader)11 ArrayWriter (org.apache.drill.exec.vector.accessor.ArrayWriter)7 ObjectReader (org.apache.drill.exec.vector.accessor.ObjectReader)6 ObjectWriter (org.apache.drill.exec.vector.accessor.ObjectWriter)6 TupleReader (org.apache.drill.exec.vector.accessor.TupleReader)5 TupleWriter (org.apache.drill.exec.vector.accessor.TupleWriter)5 VariantReader (org.apache.drill.exec.vector.accessor.VariantReader)5 VariantWriter (org.apache.drill.exec.vector.accessor.VariantWriter)5 MinorType (org.apache.drill.common.types.TypeProtos.MinorType)4 RowSetWriter (org.apache.drill.exec.physical.rowSet.RowSetWriter)4 ValueType (org.apache.drill.exec.vector.accessor.ValueType)4 VectorContainer (org.apache.drill.exec.record.VectorContainer)3