Search in sources :

Example 6 with RowSetComparison

use of org.apache.drill.test.rowSet.RowSetComparison in project drill by axbaretto.

the class TestResultSetLoaderMapArray method testBasics.

@Test
public void testBasics() {
    TupleMetadata schema = new SchemaBuilder().add("a", MinorType.INT).addMapArray("m").add("c", MinorType.INT).add("d", MinorType.VARCHAR).resumeSchema().buildSchema();
    ResultSetLoaderImpl.ResultSetOptions options = new OptionBuilder().setSchema(schema).build();
    ResultSetLoader rsLoader = new ResultSetLoaderImpl(fixture.allocator(), options);
    RowSetLoader rootWriter = rsLoader.writer();
    // Verify structure and schema
    TupleMetadata actualSchema = rootWriter.schema();
    assertEquals(2, actualSchema.size());
    assertTrue(actualSchema.metadata(1).isArray());
    assertTrue(actualSchema.metadata(1).isMap());
    assertEquals(2, actualSchema.metadata("m").mapSchema().size());
    assertEquals(2, actualSchema.column("m").getChildren().size());
    // Write a couple of rows with arrays.
    rsLoader.startBatch();
    rootWriter.addRow(10, objArray(objArray(110, "d1.1"), objArray(120, "d2.2"))).addRow(20, objArray()).addRow(30, objArray(objArray(310, "d3.1"), objArray(320, "d3.2"), objArray(330, "d3.3")));
    // Verify the first batch
    RowSet actual = fixture.wrap(rsLoader.harvest());
    SingleRowSet expected = fixture.rowSetBuilder(schema).addRow(10, objArray(objArray(110, "d1.1"), objArray(120, "d2.2"))).addRow(20, objArray()).addRow(30, objArray(objArray(310, "d3.1"), objArray(320, "d3.2"), objArray(330, "d3.3"))).build();
    new RowSetComparison(expected).verifyAndClearAll(actual);
    // In the second, create a row, then add a map member.
    // Should be back-filled to empty for the first row.
    rsLoader.startBatch();
    rootWriter.addRow(40, objArray(objArray(410, "d4.1"), objArray(420, "d4.2")));
    TupleWriter mapWriter = rootWriter.array("m").tuple();
    mapWriter.addColumn(SchemaBuilder.columnSchema("e", MinorType.VARCHAR, DataMode.OPTIONAL));
    rootWriter.addRow(50, objArray(objArray(510, "d5.1", "e5.1"), objArray(520, "d5.2", null))).addRow(60, objArray(objArray(610, "d6.1", "e6.1"), objArray(620, "d6.2", null), objArray(630, "d6.3", "e6.3")));
    // Verify the second batch
    actual = fixture.wrap(rsLoader.harvest());
    TupleMetadata expectedSchema = new SchemaBuilder().add("a", MinorType.INT).addMapArray("m").add("c", MinorType.INT).add("d", MinorType.VARCHAR).addNullable("e", MinorType.VARCHAR).resumeSchema().buildSchema();
    expected = fixture.rowSetBuilder(expectedSchema).addRow(40, objArray(objArray(410, "d4.1", null), objArray(420, "d4.2", null))).addRow(50, objArray(objArray(510, "d5.1", "e5.1"), objArray(520, "d5.2", null))).addRow(60, objArray(objArray(610, "d6.1", "e6.1"), objArray(620, "d6.2", null), objArray(630, "d6.3", "e6.3"))).build();
    new RowSetComparison(expected).verifyAndClearAll(actual);
    rsLoader.close();
}
Also used : SingleRowSet(org.apache.drill.test.rowSet.RowSet.SingleRowSet) RowSetComparison(org.apache.drill.test.rowSet.RowSetComparison) ResultSetLoader(org.apache.drill.exec.physical.rowSet.ResultSetLoader) TupleWriter(org.apache.drill.exec.vector.accessor.TupleWriter) TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) SchemaBuilder(org.apache.drill.test.rowSet.schema.SchemaBuilder) SingleRowSet(org.apache.drill.test.rowSet.RowSet.SingleRowSet) RowSet(org.apache.drill.test.rowSet.RowSet) RowSetLoader(org.apache.drill.exec.physical.rowSet.RowSetLoader) SubOperatorTest(org.apache.drill.test.SubOperatorTest) Test(org.junit.Test)

Example 7 with RowSetComparison

use of org.apache.drill.test.rowSet.RowSetComparison in project drill by axbaretto.

the class TestResultSetLoaderMapArray method testNestedArray.

@Test
public void testNestedArray() {
    TupleMetadata schema = new SchemaBuilder().add("a", MinorType.INT).addMapArray("m").add("c", MinorType.INT).addArray("d", MinorType.VARCHAR).resumeSchema().buildSchema();
    ResultSetLoaderImpl.ResultSetOptions options = new OptionBuilder().setSchema(schema).build();
    ResultSetLoader rsLoader = new ResultSetLoaderImpl(fixture.allocator(), options);
    RowSetLoader rootWriter = rsLoader.writer();
    // Write a couple of rows with arrays within arrays.
    // (And, of course, the Varchar is actually an array of
    // bytes, so that's three array levels.)
    rsLoader.startBatch();
    rootWriter.addRow(10, objArray(objArray(110, strArray("d1.1.1", "d1.1.2")), objArray(120, strArray("d1.2.1", "d1.2.2")))).addRow(20, objArray()).addRow(30, objArray(objArray(310, strArray("d3.1.1", "d3.2.2")), objArray(320, strArray()), objArray(330, strArray("d3.3.1", "d1.2.2"))));
    // Verify the batch
    RowSet actual = fixture.wrap(rsLoader.harvest());
    SingleRowSet expected = fixture.rowSetBuilder(schema).addRow(10, objArray(objArray(110, strArray("d1.1.1", "d1.1.2")), objArray(120, strArray("d1.2.1", "d1.2.2")))).addRow(20, objArray()).addRow(30, objArray(objArray(310, strArray("d3.1.1", "d3.2.2")), objArray(320, strArray()), objArray(330, strArray("d3.3.1", "d1.2.2")))).build();
    new RowSetComparison(expected).verifyAndClearAll(actual);
    rsLoader.close();
}
Also used : SingleRowSet(org.apache.drill.test.rowSet.RowSet.SingleRowSet) RowSetComparison(org.apache.drill.test.rowSet.RowSetComparison) ResultSetLoader(org.apache.drill.exec.physical.rowSet.ResultSetLoader) TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) SchemaBuilder(org.apache.drill.test.rowSet.schema.SchemaBuilder) SingleRowSet(org.apache.drill.test.rowSet.RowSet.SingleRowSet) RowSet(org.apache.drill.test.rowSet.RowSet) RowSetLoader(org.apache.drill.exec.physical.rowSet.RowSetLoader) SubOperatorTest(org.apache.drill.test.SubOperatorTest) Test(org.junit.Test)

Example 8 with RowSetComparison

use of org.apache.drill.test.rowSet.RowSetComparison in project drill by axbaretto.

the class TestResultSetLoaderMaps method testEmptyMapAddition.

/**
 * Test adding an empty map to a loader after writing the first row.
 * Then add columns in another batch. Yes, this is a bizarre condition,
 * but we must check it anyway for robustness.
 */
@Test
public void testEmptyMapAddition() {
    TupleMetadata schema = new SchemaBuilder().add("a", MinorType.INT).buildSchema();
    ResultSetLoaderImpl.ResultSetOptions options = new OptionBuilder().setSchema(schema).build();
    ResultSetLoader rsLoader = new ResultSetLoaderImpl(fixture.allocator(), options);
    assertEquals(1, rsLoader.schemaVersion());
    RowSetLoader rootWriter = rsLoader.writer();
    // Start without the map. Add a map after the first row.
    rsLoader.startBatch();
    rootWriter.addRow(10);
    int mapIndex = rootWriter.addColumn(SchemaBuilder.columnSchema("m", MinorType.MAP, DataMode.REQUIRED));
    TupleWriter mapWriter = rootWriter.tuple(mapIndex);
    rootWriter.addRow(20, objArray()).addRow(30, objArray());
    RowSet actual = fixture.wrap(rsLoader.harvest());
    assertEquals(2, rsLoader.schemaVersion());
    assertEquals(3, actual.rowCount());
    // Validate first batch
    TupleMetadata expectedSchema = new SchemaBuilder().add("a", MinorType.INT).addMap("m").resumeSchema().buildSchema();
    SingleRowSet expected = fixture.rowSetBuilder(expectedSchema).addRow(10, objArray()).addRow(20, objArray()).addRow(30, objArray()).build();
    new RowSetComparison(expected).verifyAndClearAll(actual);
    // Now add another column to the map
    rsLoader.startBatch();
    mapWriter.addColumn(SchemaBuilder.columnSchema("a", MinorType.VARCHAR, DataMode.REQUIRED));
    rootWriter.addRow(40, objArray("fred")).addRow(50, objArray("barney"));
    actual = fixture.wrap(rsLoader.harvest());
    assertEquals(3, rsLoader.schemaVersion());
    assertEquals(2, actual.rowCount());
    // Validate first batch
    expectedSchema = new SchemaBuilder().add("a", MinorType.INT).addMap("m").add("a", MinorType.VARCHAR).resumeSchema().buildSchema();
    expected = fixture.rowSetBuilder(expectedSchema).addRow(40, objArray("fred")).addRow(50, objArray("barney")).build();
    new RowSetComparison(expected).verifyAndClearAll(actual);
    rsLoader.close();
}
Also used : SingleRowSet(org.apache.drill.test.rowSet.RowSet.SingleRowSet) RowSetComparison(org.apache.drill.test.rowSet.RowSetComparison) ResultSetLoader(org.apache.drill.exec.physical.rowSet.ResultSetLoader) TupleWriter(org.apache.drill.exec.vector.accessor.TupleWriter) TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) SchemaBuilder(org.apache.drill.test.rowSet.schema.SchemaBuilder) SingleRowSet(org.apache.drill.test.rowSet.RowSet.SingleRowSet) RowSet(org.apache.drill.test.rowSet.RowSet) RowSetLoader(org.apache.drill.exec.physical.rowSet.RowSetLoader) SubOperatorTest(org.apache.drill.test.SubOperatorTest) Test(org.junit.Test)

Example 9 with RowSetComparison

use of org.apache.drill.test.rowSet.RowSetComparison in project drill by axbaretto.

the class TestResultSetLoaderProtocol method testCaseInsensitiveSchema.

/**
 * Schemas are case insensitive by default. Verify that
 * the schema mechanism works, with emphasis on the
 * case insensitive case.
 * <p>
 * The tests here and elsewhere build columns from a
 * <tt>MaterializedField</tt>. Doing so is rather old-school;
 * better to use the newer <tt>ColumnMetadata</tt> which provides
 * additional information. The code here simply uses the <tt>MaterializedField</tt>
 * to create a <tt>ColumnMetadata</tt> implicitly.
 */
@Test
public void testCaseInsensitiveSchema() {
    ResultSetLoader rsLoader = new ResultSetLoaderImpl(fixture.allocator());
    RowSetLoader rootWriter = rsLoader.writer();
    TupleMetadata schema = rootWriter.schema();
    assertEquals(0, rsLoader.schemaVersion());
    // No columns defined in schema
    assertNull(schema.metadata("a"));
    try {
        schema.column(0);
        fail();
    } catch (IndexOutOfBoundsException e) {
    // Expected
    }
    try {
        rootWriter.column("a");
        fail();
    } catch (UndefinedColumnException e) {
    // Expected
    }
    try {
        rootWriter.column(0);
        fail();
    } catch (IndexOutOfBoundsException e) {
    // Expected
    }
    // Define a column
    assertEquals(0, rsLoader.schemaVersion());
    MaterializedField colSchema = SchemaBuilder.columnSchema("a", MinorType.VARCHAR, DataMode.REQUIRED);
    rootWriter.addColumn(colSchema);
    assertEquals(1, rsLoader.schemaVersion());
    // Can now be found, case insensitive
    assertTrue(colSchema.isEquivalent(schema.column(0)));
    ColumnMetadata colMetadata = schema.metadata(0);
    assertSame(colMetadata, schema.metadata("a"));
    assertSame(colMetadata, schema.metadata("A"));
    assertNotNull(rootWriter.column(0));
    assertNotNull(rootWriter.column("a"));
    assertNotNull(rootWriter.column("A"));
    assertEquals(1, schema.size());
    assertEquals(0, schema.index("a"));
    assertEquals(0, schema.index("A"));
    try {
        rootWriter.addColumn(colSchema);
        fail();
    } catch (IllegalArgumentException e) {
    // Expected
    }
    try {
        MaterializedField testCol = SchemaBuilder.columnSchema("A", MinorType.VARCHAR, DataMode.REQUIRED);
        rootWriter.addColumn(testCol);
        fail();
    } catch (IllegalArgumentException e) {
        // Expected
        assertTrue(e.getMessage().contains("Duplicate"));
    }
    // Can still add required fields while writing the first row.
    rsLoader.startBatch();
    rootWriter.start();
    rootWriter.scalar(0).setString("foo");
    MaterializedField col2 = SchemaBuilder.columnSchema("b", MinorType.VARCHAR, DataMode.REQUIRED);
    rootWriter.addColumn(col2);
    assertTrue(col2.isEquivalent(schema.column(1)));
    ColumnMetadata col2Metadata = schema.metadata(1);
    assertSame(col2Metadata, schema.metadata("b"));
    assertSame(col2Metadata, schema.metadata("B"));
    assertEquals(2, schema.size());
    assertEquals(1, schema.index("b"));
    assertEquals(1, schema.index("B"));
    rootWriter.scalar(1).setString("second");
    // After first row, can add an optional or repeated.
    // Also allows a required field: values will be back-filled.
    rootWriter.save();
    rootWriter.start();
    rootWriter.scalar(0).setString("bar");
    rootWriter.scalar(1).setString("");
    MaterializedField col3 = SchemaBuilder.columnSchema("c", MinorType.VARCHAR, DataMode.REQUIRED);
    rootWriter.addColumn(col3);
    assertTrue(col3.isEquivalent(schema.column(2)));
    ColumnMetadata col3Metadata = schema.metadata(2);
    assertSame(col3Metadata, schema.metadata("c"));
    assertSame(col3Metadata, schema.metadata("C"));
    assertEquals(3, schema.size());
    assertEquals(2, schema.index("c"));
    assertEquals(2, schema.index("C"));
    rootWriter.scalar("c").setString("c.2");
    MaterializedField col4 = SchemaBuilder.columnSchema("d", MinorType.VARCHAR, DataMode.OPTIONAL);
    rootWriter.addColumn(col4);
    assertTrue(col4.isEquivalent(schema.column(3)));
    ColumnMetadata col4Metadata = schema.metadata(3);
    assertSame(col4Metadata, schema.metadata("d"));
    assertSame(col4Metadata, schema.metadata("D"));
    assertEquals(4, schema.size());
    assertEquals(3, schema.index("d"));
    assertEquals(3, schema.index("D"));
    rootWriter.scalar("d").setString("d.2");
    MaterializedField col5 = SchemaBuilder.columnSchema("e", MinorType.VARCHAR, DataMode.REPEATED);
    rootWriter.addColumn(col5);
    assertTrue(col5.isEquivalent(schema.column(4)));
    ColumnMetadata col5Metadata = schema.metadata(4);
    assertSame(col5Metadata, schema.metadata("e"));
    assertSame(col5Metadata, schema.metadata("E"));
    assertEquals(5, schema.size());
    assertEquals(4, schema.index("e"));
    assertEquals(4, schema.index("E"));
    rootWriter.array(4).set("e1", "e2", "e3");
    rootWriter.save();
    // Verify. No reason to expect problems, but might as well check.
    RowSet result = fixture.wrap(rsLoader.harvest());
    assertEquals(5, rsLoader.schemaVersion());
    SingleRowSet expected = fixture.rowSetBuilder(result.batchSchema()).addRow("foo", "second", "", null, strArray()).addRow("bar", "", "c.2", "d.2", strArray("e1", "e2", "e3")).build();
    new RowSetComparison(expected).verifyAndClearAll(result);
    // Handy way to test that close works to abort an in-flight batch
    // and clean up.
    rsLoader.close();
}
Also used : ColumnMetadata(org.apache.drill.exec.record.metadata.ColumnMetadata) SingleRowSet(org.apache.drill.test.rowSet.RowSet.SingleRowSet) RowSetComparison(org.apache.drill.test.rowSet.RowSetComparison) ResultSetLoader(org.apache.drill.exec.physical.rowSet.ResultSetLoader) TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) SingleRowSet(org.apache.drill.test.rowSet.RowSet.SingleRowSet) RowSet(org.apache.drill.test.rowSet.RowSet) MaterializedField(org.apache.drill.exec.record.MaterializedField) RowSetLoader(org.apache.drill.exec.physical.rowSet.RowSetLoader) UndefinedColumnException(org.apache.drill.exec.vector.accessor.TupleWriter.UndefinedColumnException) SubOperatorTest(org.apache.drill.test.SubOperatorTest) Test(org.junit.Test)

Example 10 with RowSetComparison

use of org.apache.drill.test.rowSet.RowSetComparison in project drill by axbaretto.

the class TestResultSetLoaderProtocol method testInitialSchema.

/**
 * Provide a schema up front to the loader; schema is built before
 * the first row.
 * <p>
 * Also verifies the test-time method to set a row of values using
 * a single method.
 */
@Test
public void testInitialSchema() {
    TupleMetadata schema = new SchemaBuilder().add("a", MinorType.INT).addNullable("b", MinorType.INT).add("c", MinorType.VARCHAR).buildSchema();
    ResultSetLoaderImpl.ResultSetOptions options = new OptionBuilder().setSchema(schema).build();
    ResultSetLoader rsLoader = new ResultSetLoaderImpl(fixture.allocator(), options);
    RowSetLoader rootWriter = rsLoader.writer();
    rsLoader.startBatch();
    rootWriter.addRow(10, 100, "fred").addRow(20, null, "barney").addRow(30, 300, "wilma");
    RowSet actual = fixture.wrap(rsLoader.harvest());
    RowSet expected = fixture.rowSetBuilder(schema).addRow(10, 100, "fred").addRow(20, null, "barney").addRow(30, 300, "wilma").build();
    new RowSetComparison(expected).verifyAndClearAll(actual);
    rsLoader.close();
}
Also used : RowSetComparison(org.apache.drill.test.rowSet.RowSetComparison) ResultSetLoader(org.apache.drill.exec.physical.rowSet.ResultSetLoader) TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) SchemaBuilder(org.apache.drill.test.rowSet.schema.SchemaBuilder) SingleRowSet(org.apache.drill.test.rowSet.RowSet.SingleRowSet) RowSet(org.apache.drill.test.rowSet.RowSet) RowSetLoader(org.apache.drill.exec.physical.rowSet.RowSetLoader) SubOperatorTest(org.apache.drill.test.SubOperatorTest) Test(org.junit.Test)

Aggregations

RowSetComparison (org.apache.drill.test.rowSet.RowSetComparison)289 Test (org.junit.Test)271 RowSet (org.apache.drill.exec.physical.rowSet.RowSet)232 TupleMetadata (org.apache.drill.exec.record.metadata.TupleMetadata)211 SchemaBuilder (org.apache.drill.exec.record.metadata.SchemaBuilder)191 RowSetBuilder (org.apache.drill.exec.physical.rowSet.RowSetBuilder)142 ClusterTest (org.apache.drill.test.ClusterTest)138 MockRecordBatch (org.apache.drill.exec.physical.impl.MockRecordBatch)54 SubOperatorTest (org.apache.drill.test.SubOperatorTest)53 QueryBuilder (org.apache.drill.test.QueryBuilder)48 DirectRowSet (org.apache.drill.exec.physical.rowSet.DirectRowSet)42 OperatorTest (org.apache.drill.categories.OperatorTest)38 SingleRowSet (org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet)29 RowSet (org.apache.drill.test.rowSet.RowSet)26 SingleRowSet (org.apache.drill.test.rowSet.RowSet.SingleRowSet)26 SchemaBuilder (org.apache.drill.test.rowSet.schema.SchemaBuilder)25 StreamingAggregate (org.apache.drill.exec.physical.config.StreamingAggregate)19 StreamingAggBatch (org.apache.drill.exec.physical.impl.aggregate.StreamingAggBatch)19 RowSetLoader (org.apache.drill.exec.physical.rowSet.RowSetLoader)18 BatchSchema (org.apache.drill.exec.record.BatchSchema)18