Search in sources :

Example 31 with BatchSchemaBuilder

use of org.apache.drill.exec.record.BatchSchemaBuilder in project drill by apache.

the class TestEmptyInputSql method testRenameProjectEmptyJson.

@Test
public void testRenameProjectEmptyJson() throws Exception {
    SchemaBuilder schemaBuilder = new SchemaBuilder().addNullable("WeekId", TypeProtos.MinorType.INT).addNullable("ProductName", TypeProtos.MinorType.INT);
    final BatchSchema expectedSchema = new BatchSchemaBuilder().withSchemaBuilder(schemaBuilder).build();
    testBuilder().sqlQuery("select WeekId, Product as ProductName from (select CAST(`dir0` as INT) AS WeekId, " + "Product from cp.`%s`)", SINGLE_EMPTY_JSON).schemaBaseLine(expectedSchema).build().run();
}
Also used : BatchSchema(org.apache.drill.exec.record.BatchSchema) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) BatchSchemaBuilder(org.apache.drill.exec.record.BatchSchemaBuilder) BatchSchemaBuilder(org.apache.drill.exec.record.BatchSchemaBuilder) Test(org.junit.Test) UnlikelyTest(org.apache.drill.categories.UnlikelyTest)

Example 32 with BatchSchemaBuilder

use of org.apache.drill.exec.record.BatchSchemaBuilder in project drill by apache.

the class TestEmptyInputSql method testQueryConstExprEmptyJson.

/**
 * Test with query against an empty file. Select clause has three expressions.
 * 1.0 + 100.0 as constant expression, is resolved to required FLOAT8/VARDECIMAL
 * cast(100 as varchar(100) is resolved to required varchar(100)
 * cast(columns as varchar(100)) is resolved to nullable varchar(100).
 */
@Test
public void testQueryConstExprEmptyJson() throws Exception {
    try {
        alterSession(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY, false);
        SchemaBuilder schemaBuilder = new SchemaBuilder().add("key", TypeProtos.MinorType.FLOAT8).add("name", TypeProtos.MinorType.VARCHAR, 100).addNullable("name2", TypeProtos.MinorType.VARCHAR, 100);
        BatchSchema expectedSchema = new BatchSchemaBuilder().withSchemaBuilder(schemaBuilder).build();
        testBuilder().sqlQuery("select 1.0 + 100.0 as key, " + " cast(100 as varchar(100)) as name, " + " cast(columns as varchar(100)) as name2 " + " from cp.`%s` ", SINGLE_EMPTY_JSON).schemaBaseLine(expectedSchema).build().run();
        alterSession(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY, true);
        schemaBuilder = new SchemaBuilder().add("key", TypeProtos.MajorType.newBuilder().setMinorType(TypeProtos.MinorType.VARDECIMAL).setMode(TypeProtos.DataMode.REQUIRED).setPrecision(5).setScale(1).build()).add("name", TypeProtos.MinorType.VARCHAR, 100).addNullable("name2", TypeProtos.MinorType.VARCHAR, 100);
        expectedSchema = new BatchSchemaBuilder().withSchemaBuilder(schemaBuilder).build();
        testBuilder().sqlQuery("select 1.0 + 100.0 as key, " + " cast(100 as varchar(100)) as name, " + " cast(columns as varchar(100)) as name2 " + " from cp.`%s` ", SINGLE_EMPTY_JSON).schemaBaseLine(expectedSchema).build().run();
    } finally {
        resetSessionOption(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY);
    }
}
Also used : BatchSchema(org.apache.drill.exec.record.BatchSchema) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) BatchSchemaBuilder(org.apache.drill.exec.record.BatchSchemaBuilder) BatchSchemaBuilder(org.apache.drill.exec.record.BatchSchemaBuilder) Test(org.junit.Test) UnlikelyTest(org.apache.drill.categories.UnlikelyTest)

Example 33 with BatchSchemaBuilder

use of org.apache.drill.exec.record.BatchSchemaBuilder in project drill by apache.

the class TestEmptyInputSql method testQueryMapArrayEmptyJson.

@Test
public void testQueryMapArrayEmptyJson() throws Exception {
    SchemaBuilder schemaBuilder = new SchemaBuilder().addNullable("col1", TypeProtos.MinorType.INT).addNullable("col2", TypeProtos.MinorType.INT).addNullable("col3", TypeProtos.MinorType.INT);
    BatchSchema expectedSchema = new BatchSchemaBuilder().withSchemaBuilder(schemaBuilder).build();
    testBuilder().sqlQuery("select foo.a.b as col1, foo.columns[2] as col2, foo.bar.columns[3] as col3 from cp.`%s` as foo", SINGLE_EMPTY_JSON).schemaBaseLine(expectedSchema).build().run();
}
Also used : BatchSchema(org.apache.drill.exec.record.BatchSchema) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) BatchSchemaBuilder(org.apache.drill.exec.record.BatchSchemaBuilder) BatchSchemaBuilder(org.apache.drill.exec.record.BatchSchemaBuilder) Test(org.junit.Test) UnlikelyTest(org.apache.drill.categories.UnlikelyTest)

Example 34 with BatchSchemaBuilder

use of org.apache.drill.exec.record.BatchSchemaBuilder in project drill by apache.

the class TestUnionAll method testUnionAllBothEmptyDirs.

@Test
public void testUnionAllBothEmptyDirs() throws Exception {
    SchemaBuilder schemaBuilder = new SchemaBuilder().addNullable("key", TypeProtos.MinorType.INT);
    BatchSchema expectedSchema = new BatchSchemaBuilder().withSchemaBuilder(schemaBuilder).build();
    testBuilder().sqlQuery("SELECT key FROM dfs.tmp.`%1$s` UNION ALL SELECT key FROM dfs.tmp.`%1$s`", EMPTY_DIR_NAME).schemaBaseLine(expectedSchema).build().run();
}
Also used : BatchSchema(org.apache.drill.exec.record.BatchSchema) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) BatchSchemaBuilder(org.apache.drill.exec.record.BatchSchemaBuilder) BatchSchemaBuilder(org.apache.drill.exec.record.BatchSchemaBuilder) OperatorTest(org.apache.drill.categories.OperatorTest) Test(org.junit.Test) UnlikelyTest(org.apache.drill.categories.UnlikelyTest) SqlTest(org.apache.drill.categories.SqlTest)

Example 35 with BatchSchemaBuilder

use of org.apache.drill.exec.record.BatchSchemaBuilder in project drill by apache.

the class TestStarQueries method testSchemaForStarOrderByLimit.

// DRILL-5845
@Test
public void testSchemaForStarOrderByLimit() throws Exception {
    final String query = "select * from cp.`tpch/nation.parquet` order by n_name limit 1";
    SchemaBuilder schemaBuilder = new SchemaBuilder().add("n_nationkey", TypeProtos.MinorType.INT).add("n_name", TypeProtos.MinorType.VARCHAR).add("n_regionkey", TypeProtos.MinorType.INT).add("n_comment", TypeProtos.MinorType.VARCHAR);
    BatchSchema expectedSchema = new BatchSchemaBuilder().withSchemaBuilder(schemaBuilder).build();
    testBuilder().sqlQuery(query).schemaBaseLine(expectedSchema).build().run();
}
Also used : BatchSchema(org.apache.drill.exec.record.BatchSchema) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) BatchSchemaBuilder(org.apache.drill.exec.record.BatchSchemaBuilder) BatchSchemaBuilder(org.apache.drill.exec.record.BatchSchemaBuilder) Test(org.junit.Test) UnlikelyTest(org.apache.drill.categories.UnlikelyTest) PlannerTest(org.apache.drill.categories.PlannerTest) SqlTest(org.apache.drill.categories.SqlTest)

Aggregations

BatchSchemaBuilder (org.apache.drill.exec.record.BatchSchemaBuilder)58 SchemaBuilder (org.apache.drill.exec.record.metadata.SchemaBuilder)58 BatchSchema (org.apache.drill.exec.record.BatchSchema)56 Test (org.junit.Test)56 UnlikelyTest (org.apache.drill.categories.UnlikelyTest)20 RecordBatch (org.apache.drill.exec.record.RecordBatch)14 SubOperatorTest (org.apache.drill.test.SubOperatorTest)10 SingleRowSet (org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet)7 PlannerTest (org.apache.drill.categories.PlannerTest)5 SqlFunctionTest (org.apache.drill.categories.SqlFunctionTest)5 MaterializedField (org.apache.drill.exec.record.MaterializedField)5 SqlTest (org.apache.drill.categories.SqlTest)4 HashJoinPOP (org.apache.drill.exec.physical.config.HashJoinPOP)4 ScanFixture (org.apache.drill.exec.physical.impl.scan.ScanTestUtils.ScanFixture)4 RecordBatchLoader (org.apache.drill.exec.record.RecordBatchLoader)4 ClusterTest (org.apache.drill.test.ClusterTest)4 OperatorTest (org.apache.drill.categories.OperatorTest)3 ParquetTest (org.apache.drill.categories.ParquetTest)3 VectorTest (org.apache.drill.categories.VectorTest)3 ExecTest (org.apache.drill.exec.ExecTest)3