Search in sources :

Example 11 with BatchSchemaBuilder

use of org.apache.drill.exec.record.BatchSchemaBuilder in project drill by apache.

the class TestEmptyInputSql method testQueryEmptyCsvH.

/**
 * Test select * against empty csv with empty header. * is expanded into empty list of fields.
 */
@Test
public void testQueryEmptyCsvH() throws Exception {
    BatchSchema expectedSchema = new BatchSchemaBuilder().withSchemaBuilder(new SchemaBuilder()).build();
    testBuilder().sqlQuery("select * from cp.`%s`", SINGLE_EMPTY_CSVH).schemaBaseLine(expectedSchema).build().run();
}
Also used : BatchSchema(org.apache.drill.exec.record.BatchSchema) BatchSchemaBuilder(org.apache.drill.exec.record.BatchSchemaBuilder) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) BatchSchemaBuilder(org.apache.drill.exec.record.BatchSchemaBuilder) Test(org.junit.Test) UnlikelyTest(org.apache.drill.categories.UnlikelyTest)

Example 12 with BatchSchemaBuilder

use of org.apache.drill.exec.record.BatchSchemaBuilder in project drill by apache.

the class TestEmptyInputSql method testQueryEmptyCsv.

/**
 * Test select * against empty csv file. * is expanded into "columns : repeated-varchar",
 * which is the default column from reading a csv file.
 */
@Test
public void testQueryEmptyCsv() throws Exception {
    SchemaBuilder schemaBuilder = new SchemaBuilder().addArray("columns", TypeProtos.MinorType.VARCHAR);
    BatchSchema expectedSchema = new BatchSchemaBuilder().withSchemaBuilder(schemaBuilder).build();
    testBuilder().sqlQuery("select * from cp.`%s`", SINGLE_EMPTY_CSV).schemaBaseLine(expectedSchema).build().run();
}
Also used : BatchSchema(org.apache.drill.exec.record.BatchSchema) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) BatchSchemaBuilder(org.apache.drill.exec.record.BatchSchemaBuilder) BatchSchemaBuilder(org.apache.drill.exec.record.BatchSchemaBuilder) Test(org.junit.Test) UnlikelyTest(org.apache.drill.categories.UnlikelyTest)

Example 13 with BatchSchemaBuilder

use of org.apache.drill.exec.record.BatchSchemaBuilder in project drill by apache.

the class TestLoad method testSchemaChange.

@Test
public void testSchemaChange() throws SchemaChangeException {
    final BufferAllocator allocator = RootAllocatorFactory.newRoot(drillConfig);
    final RecordBatchLoader batchLoader = new RecordBatchLoader(allocator);
    // Initial schema: a: INT, b: VARCHAR
    // Schema change: N/A
    SchemaBuilder schemaBuilder1 = new SchemaBuilder().add("a", MinorType.INT).add("b", MinorType.VARCHAR);
    BatchSchema schema1 = new BatchSchemaBuilder().withSchemaBuilder(schemaBuilder1).build();
    {
        assertTrue(loadBatch(allocator, batchLoader, schema1));
        assertTrue(schema1.isEquivalent(batchLoader.getSchema()));
        batchLoader.getContainer().zeroVectors();
    }
    // Same schema
    // Schema change: No
    {
        assertFalse(loadBatch(allocator, batchLoader, schema1));
        assertTrue(schema1.isEquivalent(batchLoader.getSchema()));
        batchLoader.getContainer().zeroVectors();
    }
    // Reverse columns: b: VARCHAR, a: INT
    // Schema change: No
    {
        SchemaBuilder schemaBuilder = new SchemaBuilder().add("b", MinorType.VARCHAR).add("a", MinorType.INT);
        BatchSchema schema = new BatchSchemaBuilder().withSchemaBuilder(schemaBuilder).build();
        assertFalse(loadBatch(allocator, batchLoader, schema));
        // Potential bug: see DRILL-5828
        assertTrue(schema.isEquivalent(batchLoader.getSchema()));
        batchLoader.getContainer().zeroVectors();
    }
    // Drop a column: a: INT
    // Schema change: Yes
    {
        SchemaBuilder schemaBuilder = new SchemaBuilder().add("a", MinorType.INT);
        BatchSchema schema = new BatchSchemaBuilder().withSchemaBuilder(schemaBuilder).build();
        assertTrue(loadBatch(allocator, batchLoader, schema));
        assertTrue(schema.isEquivalent(batchLoader.getSchema()));
        batchLoader.getContainer().zeroVectors();
    }
    // Add a column: a: INT, b: VARCHAR, c: INT
    // Schema change: Yes
    {
        assertTrue(loadBatch(allocator, batchLoader, schema1));
        assertTrue(schema1.isEquivalent(batchLoader.getSchema()));
        batchLoader.getContainer().zeroVectors();
        SchemaBuilder schemaBuilder = new SchemaBuilder().add("a", MinorType.INT).add("b", MinorType.VARCHAR).add("c", MinorType.INT);
        BatchSchema schema = new BatchSchemaBuilder().withSchemaBuilder(schemaBuilder).build();
        assertTrue(loadBatch(allocator, batchLoader, schema));
        assertTrue(schema.isEquivalent(batchLoader.getSchema()));
        batchLoader.getContainer().zeroVectors();
    }
    // Change a column type: a: INT, b: VARCHAR, c: VARCHAR
    // Schema change: Yes
    {
        SchemaBuilder schemaBuilder = new SchemaBuilder().add("a", MinorType.INT).add("b", MinorType.VARCHAR).add("c", MinorType.VARCHAR);
        BatchSchema schema = new BatchSchemaBuilder().withSchemaBuilder(schemaBuilder).build();
        assertTrue(loadBatch(allocator, batchLoader, schema));
        assertTrue(schema.isEquivalent(batchLoader.getSchema()));
        batchLoader.getContainer().zeroVectors();
    }
    // Empty schema
    // Schema change: Yes
    {
        BatchSchema schema = new BatchSchemaBuilder().withSchemaBuilder(new SchemaBuilder()).build();
        assertTrue(loadBatch(allocator, batchLoader, schema));
        assertTrue(schema.isEquivalent(batchLoader.getSchema()));
        batchLoader.getContainer().zeroVectors();
    }
    batchLoader.clear();
    allocator.close();
}
Also used : BatchSchema(org.apache.drill.exec.record.BatchSchema) RecordBatchLoader(org.apache.drill.exec.record.RecordBatchLoader) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) BatchSchemaBuilder(org.apache.drill.exec.record.BatchSchemaBuilder) BatchSchemaBuilder(org.apache.drill.exec.record.BatchSchemaBuilder) BufferAllocator(org.apache.drill.exec.memory.BufferAllocator) ExecTest(org.apache.drill.exec.ExecTest) Test(org.junit.Test) VectorTest(org.apache.drill.categories.VectorTest)

Example 14 with BatchSchemaBuilder

use of org.apache.drill.exec.record.BatchSchemaBuilder in project drill by apache.

the class TestNullInputMiniPlan method testJsonInputMixedWithEmptyFiles2.

/**
 * Test ScanBatch with mixed json files.
 * input is empty, empty, data_file, data_file
 */
@Test
public void testJsonInputMixedWithEmptyFiles2() throws Exception {
    RecordBatch scanBatch = createScanBatchFromJson(SINGLE_EMPTY_JSON, SINGLE_EMPTY_JSON2, SINGLE_JSON, SINGLE_JSON2);
    SchemaBuilder schemaBuilder = new SchemaBuilder().addNullable("id", TypeProtos.MinorType.BIGINT).addNullable("name", TypeProtos.MinorType.VARCHAR);
    BatchSchema expectedSchema = new BatchSchemaBuilder().withSchemaBuilder(schemaBuilder).build();
    new MiniPlanTestBuilder().root(scanBatch).expectSchema(expectedSchema).baselineValues(100L, "John").baselineValues(1000L, "Joe").expectBatchNum(2).go();
}
Also used : BatchSchema(org.apache.drill.exec.record.BatchSchema) RecordBatch(org.apache.drill.exec.record.RecordBatch) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) BatchSchemaBuilder(org.apache.drill.exec.record.BatchSchemaBuilder) BatchSchemaBuilder(org.apache.drill.exec.record.BatchSchemaBuilder) Test(org.junit.Test)

Example 15 with BatchSchemaBuilder

use of org.apache.drill.exec.record.BatchSchemaBuilder in project drill by apache.

the class TestCastFunctions method testCastUntypedNull.

@Test
public void testCastUntypedNull() throws Exception {
    String query = "select cast(coalesce(unk1, unk2) as %s) as coal from cp.`tpch/nation.parquet` limit 1";
    Map<String, TypeProtos.MajorType> typesMap = createCastTypeMap();
    for (Map.Entry<String, TypeProtos.MajorType> entry : typesMap.entrySet()) {
        String q = String.format(query, entry.getKey());
        MaterializedField field = MaterializedField.create("coal", entry.getValue());
        SchemaBuilder schemaBuilder = new SchemaBuilder().add(field);
        BatchSchema expectedSchema = new BatchSchemaBuilder().withSchemaBuilder(schemaBuilder).build();
        // Validate schema
        testBuilder().sqlQuery(q).schemaBaseLine(expectedSchema).go();
        // Validate result
        testBuilder().sqlQuery(q).unOrdered().baselineColumns("coal").baselineValues(new Object[] { null }).go();
    }
}
Also used : BatchSchema(org.apache.drill.exec.record.BatchSchema) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) BatchSchemaBuilder(org.apache.drill.exec.record.BatchSchemaBuilder) BatchSchemaBuilder(org.apache.drill.exec.record.BatchSchemaBuilder) MaterializedField(org.apache.drill.exec.record.MaterializedField) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) HashMap(java.util.HashMap) Map(java.util.Map) ClusterTest(org.apache.drill.test.ClusterTest) Test(org.junit.Test) SqlFunctionTest(org.apache.drill.categories.SqlFunctionTest) UnlikelyTest(org.apache.drill.categories.UnlikelyTest)

Aggregations

BatchSchemaBuilder (org.apache.drill.exec.record.BatchSchemaBuilder)58 SchemaBuilder (org.apache.drill.exec.record.metadata.SchemaBuilder)58 BatchSchema (org.apache.drill.exec.record.BatchSchema)56 Test (org.junit.Test)56 UnlikelyTest (org.apache.drill.categories.UnlikelyTest)20 RecordBatch (org.apache.drill.exec.record.RecordBatch)14 SubOperatorTest (org.apache.drill.test.SubOperatorTest)10 SingleRowSet (org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet)7 PlannerTest (org.apache.drill.categories.PlannerTest)5 SqlFunctionTest (org.apache.drill.categories.SqlFunctionTest)5 MaterializedField (org.apache.drill.exec.record.MaterializedField)5 SqlTest (org.apache.drill.categories.SqlTest)4 HashJoinPOP (org.apache.drill.exec.physical.config.HashJoinPOP)4 ScanFixture (org.apache.drill.exec.physical.impl.scan.ScanTestUtils.ScanFixture)4 RecordBatchLoader (org.apache.drill.exec.record.RecordBatchLoader)4 ClusterTest (org.apache.drill.test.ClusterTest)4 OperatorTest (org.apache.drill.categories.OperatorTest)3 ParquetTest (org.apache.drill.categories.ParquetTest)3 VectorTest (org.apache.drill.categories.VectorTest)3 ExecTest (org.apache.drill.exec.ExecTest)3