Search in sources :

Example 6 with SchemaBuilder

use of org.apache.drill.test.rowSet.schema.SchemaBuilder in project drill by axbaretto.

the class TestEmptyInputSql method testQueryEmptyCsv.

/**
 * Test select * against empty csv file. * is exapnede into "columns : repeated-varchar",
 * which is the default column from reading a csv file.
 * @throws Exception
 */
@Test
public void testQueryEmptyCsv() throws Exception {
    final BatchSchema expectedSchema = new SchemaBuilder().addArray("columns", TypeProtos.MinorType.VARCHAR).build();
    testBuilder().sqlQuery("select * from cp.`%s`", SINGLE_EMPTY_CSV).schemaBaseLine(expectedSchema).build().run();
}
Also used : BatchSchema(org.apache.drill.exec.record.BatchSchema) SchemaBuilder(org.apache.drill.test.rowSet.schema.SchemaBuilder) Test(org.junit.Test) UnlikelyTest(org.apache.drill.categories.UnlikelyTest)

Example 7 with SchemaBuilder

use of org.apache.drill.test.rowSet.schema.SchemaBuilder in project drill by axbaretto.

the class TestStarQueries method testSchemaForStarOrderByLimit.

// DRILL-5845
@Test
public void testSchemaForStarOrderByLimit() throws Exception {
    final String query = "select * from cp.`tpch/nation.parquet` order by n_name limit 1";
    final BatchSchema expectedSchema = new SchemaBuilder().add("n_nationkey", TypeProtos.MinorType.INT).add("n_name", TypeProtos.MinorType.VARCHAR).add("n_regionkey", TypeProtos.MinorType.INT).add("n_comment", TypeProtos.MinorType.VARCHAR).build();
    testBuilder().sqlQuery(query).schemaBaseLine(expectedSchema).build().run();
}
Also used : BatchSchema(org.apache.drill.exec.record.BatchSchema) SchemaBuilder(org.apache.drill.test.rowSet.schema.SchemaBuilder) Test(org.junit.Test) UnlikelyTest(org.apache.drill.categories.UnlikelyTest) PlannerTest(org.apache.drill.categories.PlannerTest) SqlTest(org.apache.drill.categories.SqlTest)

Example 8 with SchemaBuilder

use of org.apache.drill.test.rowSet.schema.SchemaBuilder in project drill by axbaretto.

the class TestExternalSort method testNumericTypes.

/**
 * Test union type support in sort using numeric types: BIGINT and FLOAT8
 * Drill does not support union types fully. Sort was adapted to handle them.
 * This test simply verifies that the sort handles these types, even though
 * Drill does not.
 *
 * @param testLegacy
 *          true to test the old (pre-1.11) sort, false to test the new (1.11
 *          and later) sort
 * @throws Exception
 */
private void testNumericTypes(boolean testLegacy) throws Exception {
    final int record_count = 10000;
    final String tableDirName = "numericTypes";
    {
        final BatchSchema schema = new SchemaBuilder().add("a", Types.required(TypeProtos.MinorType.INT)).build();
        final RowSetBuilder rowSetBuilder = new RowSetBuilder(allocator, schema);
        for (int i = 0; i <= record_count; i += 2) {
            rowSetBuilder.addRow(i);
        }
        final RowSet rowSet = rowSetBuilder.build();
        final File tableFile = createTableFile(tableDirName, "a.json");
        new JsonFileBuilder(rowSet).build(tableFile);
        rowSet.clear();
    }
    {
        final BatchSchema schema = new SchemaBuilder().add("a", Types.required(TypeProtos.MinorType.FLOAT4)).build();
        final RowSetBuilder rowSetBuilder = new RowSetBuilder(allocator, schema);
        for (int i = 1; i <= record_count; i += 2) {
            rowSetBuilder.addRow((float) i);
        }
        final RowSet rowSet = rowSetBuilder.build();
        final File tableFile = createTableFile(tableDirName, "b.json");
        new JsonFileBuilder(rowSet).setCustomFormatter("a", "%.2f").build(tableFile);
        rowSet.clear();
    }
    TestBuilder builder = testBuilder().sqlQuery("select * from dfs.`%s` order by a desc", tableDirName).optionSettingQueriesForTestQuery(getOptions(testLegacy)).ordered().baselineColumns("a");
    for (int i = record_count; i >= 0; ) {
        builder.baselineValues((long) i--);
        if (i >= 0) {
            builder.baselineValues((double) i--);
        }
    }
    builder.go();
}
Also used : RowSetBuilder(org.apache.drill.test.rowSet.RowSetBuilder) BatchSchema(org.apache.drill.exec.record.BatchSchema) SchemaBuilder(org.apache.drill.test.rowSet.schema.SchemaBuilder) RowSet(org.apache.drill.test.rowSet.RowSet) JsonFileBuilder(org.apache.drill.test.rowSet.file.JsonFileBuilder) File(java.io.File) TestBuilder(org.apache.drill.test.TestBuilder)

Example 9 with SchemaBuilder

use of org.apache.drill.test.rowSet.schema.SchemaBuilder in project drill by axbaretto.

the class TestOperatorRecordBatch method testBatchAccessor.

/**
 * The record batch abstraction has a bunch of methods to work with a vector container.
 * Rather than simply exposing the container itself, the batch instead exposes various
 * container operations. Probably an artifact of its history. In any event, make
 * sure those methods are passed through to the container accessor.
 */
@Test
public void testBatchAccessor() {
    BatchSchema schema = new SchemaBuilder().add("a", MinorType.INT).add("b", MinorType.VARCHAR).build();
    SingleRowSet rs = fixture.rowSetBuilder(schema).addRow(10, "fred").addRow(20, "wilma").build();
    MockOperatorExec opExec = new MockOperatorExec(rs.container());
    opExec.nextCalls = 1;
    try (OperatorRecordBatch opBatch = makeOpBatch(opExec)) {
        assertEquals(IterOutcome.OK_NEW_SCHEMA, opBatch.next());
        assertEquals(schema, opBatch.getSchema());
        assertEquals(2, opBatch.getRecordCount());
        assertSame(rs.container(), opBatch.getOutgoingContainer());
        Iterator<VectorWrapper<?>> iter = opBatch.iterator();
        assertEquals("a", iter.next().getValueVector().getField().getName());
        assertEquals("b", iter.next().getValueVector().getField().getName());
        // Not a full test of the schema path; just make sure that the
        // pass-through to the Vector Container works.
        SchemaPath path = SchemaPath.create(NamePart.newBuilder().setName("a").build());
        TypedFieldId id = opBatch.getValueVectorId(path);
        assertEquals(MinorType.INT, id.getFinalType().getMinorType());
        assertEquals(1, id.getFieldIds().length);
        assertEquals(0, id.getFieldIds()[0]);
        path = SchemaPath.create(NamePart.newBuilder().setName("b").build());
        id = opBatch.getValueVectorId(path);
        assertEquals(MinorType.VARCHAR, id.getFinalType().getMinorType());
        assertEquals(1, id.getFieldIds().length);
        assertEquals(1, id.getFieldIds()[0]);
        // Sanity check of getValueAccessorById()
        VectorWrapper<?> w = opBatch.getValueAccessorById(IntVector.class, 0);
        assertNotNull(w);
        assertEquals("a", w.getValueVector().getField().getName());
        w = opBatch.getValueAccessorById(VarCharVector.class, 1);
        assertNotNull(w);
        assertEquals("b", w.getValueVector().getField().getName());
        try {
            opBatch.getSelectionVector2();
            fail();
        } catch (UnsupportedOperationException e) {
        // Expected
        }
        try {
            opBatch.getSelectionVector4();
            fail();
        } catch (UnsupportedOperationException e) {
        // Expected
        }
    } catch (Exception e) {
        fail(e.getMessage());
    }
    assertTrue(opExec.closeCalled);
}
Also used : SingleRowSet(org.apache.drill.test.rowSet.RowSet.SingleRowSet) VectorWrapper(org.apache.drill.exec.record.VectorWrapper) VarCharVector(org.apache.drill.exec.vector.VarCharVector) UserException(org.apache.drill.common.exceptions.UserException) BatchSchema(org.apache.drill.exec.record.BatchSchema) SchemaPath(org.apache.drill.common.expression.SchemaPath) TypedFieldId(org.apache.drill.exec.record.TypedFieldId) SchemaBuilder(org.apache.drill.test.rowSet.schema.SchemaBuilder) SubOperatorTest(org.apache.drill.test.SubOperatorTest) Test(org.junit.Test)

Example 10 with SchemaBuilder

use of org.apache.drill.test.rowSet.schema.SchemaBuilder in project drill by axbaretto.

the class TestOperatorRecordBatch method testSv2.

/**
 * Test that an SV2 is properly handled by the proper container accessor.
 */
@Test
public void testSv2() {
    BatchSchema schema = new SchemaBuilder().add("a", MinorType.INT).add("b", MinorType.VARCHAR).build();
    SingleRowSet rs = fixture.rowSetBuilder(schema).addRow(10, "fred").addRow(20, "wilma").withSv2().build();
    ContainerAndSv2Accessor accessor = new ContainerAndSv2Accessor();
    accessor.setContainer(rs.container());
    accessor.setSelectionVector(rs.getSv2());
    MockOperatorExec opExec = new MockOperatorExec(accessor);
    opExec.nextCalls = 1;
    try (OperatorRecordBatch opBatch = makeOpBatch(opExec)) {
        assertEquals(IterOutcome.OK_NEW_SCHEMA, opBatch.next());
        assertSame(rs.getSv2(), opBatch.getSelectionVector2());
    } catch (Exception e) {
        fail();
    }
    assertTrue(opExec.closeCalled);
    // Must release SV2
    rs.clear();
}
Also used : SingleRowSet(org.apache.drill.test.rowSet.RowSet.SingleRowSet) ContainerAndSv2Accessor(org.apache.drill.exec.physical.impl.protocol.VectorContainerAccessor.ContainerAndSv2Accessor) BatchSchema(org.apache.drill.exec.record.BatchSchema) SchemaBuilder(org.apache.drill.test.rowSet.schema.SchemaBuilder) UserException(org.apache.drill.common.exceptions.UserException) SubOperatorTest(org.apache.drill.test.SubOperatorTest) Test(org.junit.Test)

Aggregations

SchemaBuilder (org.apache.drill.test.rowSet.schema.SchemaBuilder)175 Test (org.junit.Test)154 BatchSchema (org.apache.drill.exec.record.BatchSchema)102 SingleRowSet (org.apache.drill.test.rowSet.RowSet.SingleRowSet)91 SubOperatorTest (org.apache.drill.test.SubOperatorTest)86 TupleMetadata (org.apache.drill.exec.record.metadata.TupleMetadata)65 RowSet (org.apache.drill.test.rowSet.RowSet)52 RowSetReader (org.apache.drill.test.rowSet.RowSetReader)50 ResultSetLoader (org.apache.drill.exec.physical.rowSet.ResultSetLoader)38 RowSetLoader (org.apache.drill.exec.physical.rowSet.RowSetLoader)38 ScalarReader (org.apache.drill.exec.vector.accessor.ScalarReader)26 RowSetComparison (org.apache.drill.test.rowSet.RowSetComparison)25 DrillTest (org.apache.drill.test.DrillTest)21 ColumnMetadata (org.apache.drill.exec.record.metadata.ColumnMetadata)20 ScalarWriter (org.apache.drill.exec.vector.accessor.ScalarWriter)18 RowSetBuilder (org.apache.drill.test.rowSet.RowSetBuilder)18 ScalarElementReader (org.apache.drill.exec.vector.accessor.ScalarElementReader)17 AbstractColumnMetadata (org.apache.drill.exec.record.metadata.AbstractColumnMetadata)16 TupleWriter (org.apache.drill.exec.vector.accessor.TupleWriter)16 RecordBatch (org.apache.drill.exec.record.RecordBatch)14