use of org.apache.drill.exec.record.metadata.SchemaBuilder in project drill by apache.
the class TestNullColumnLoader method testNullColumnBuilderWithSchema.
/**
* Test using an output schema, along with a default value property,
* to define a default value for missing columns.
*/
@Test
public void testNullColumnBuilderWithSchema() {
// Note: upper case names in schema, lower case in "projection" list
final TupleMetadata outputSchema = new SchemaBuilder().add("IntReq", MinorType.INT).add("StrReq", MinorType.VARCHAR).addNullable("IntOpt", MinorType.INT).addNullable("StrOpt", MinorType.VARCHAR).addNullable("DubOpt", // No default
MinorType.FLOAT8).buildSchema();
outputSchema.metadata("intReq").setDefaultValue("10");
outputSchema.metadata("strReq").setDefaultValue("foo");
outputSchema.metadata("intOpt").setDefaultValue("20");
outputSchema.metadata("strOpt").setDefaultValue("bar");
final ResultVectorCache cache = new NullResultVectorCacheImpl(fixture.allocator());
final NullColumnBuilder builder = new NullBuilderBuilder().setNullType(Types.optional(MinorType.VARCHAR)).setOutputSchema(outputSchema).build();
builder.add("strReq");
builder.add("strOpt");
builder.add("dubOpt");
builder.add("intReq");
builder.add("intOpt");
builder.add("extra");
builder.build(cache);
// Create a batch
builder.load(2);
// Verify values and types
final TupleMetadata expectedSchema = new SchemaBuilder().add("strReq", MinorType.VARCHAR).addNullable("strOpt", MinorType.VARCHAR).addNullable("dubOpt", MinorType.FLOAT8).add("intReq", MinorType.INT).addNullable("intOpt", MinorType.INT).addNullable("extra", MinorType.VARCHAR).buildSchema();
final SingleRowSet expected = fixture.rowSetBuilder(expectedSchema).addRow("foo", null, null, 10, null, null).addRow("foo", null, null, 10, null, null).build();
RowSetUtilities.verify(expected, fixture.wrap(builder.output()));
builder.close();
}
use of org.apache.drill.exec.record.metadata.SchemaBuilder in project drill by apache.
the class TestColumnsArray method testReqularCol.
/**
* Verify that if the columns column is not required, that `columns`
* is treated like any other column.
*/
@Test
public void testReqularCol() {
ScanSchemaOrchestrator scanner = buildScan(false, RowSetTestUtils.projectList(ColumnsScanFramework.COLUMNS_COL));
TupleMetadata tableSchema = new SchemaBuilder().add(ColumnsScanFramework.COLUMNS_COL, MinorType.VARCHAR).buildSchema();
ReaderSchemaOrchestrator reader = scanner.startReader();
ResultSetLoader rsLoader = reader.makeTableLoader(tableSchema);
reader.defineSchema();
reader.startBatch();
rsLoader.writer().addRow("fred");
reader.endBatch();
SingleRowSet expected = fixture.rowSetBuilder(tableSchema).addRow("fred").build();
RowSetUtilities.verify(expected, fixture.wrap(scanner.output()));
scanner.close();
}
use of org.apache.drill.exec.record.metadata.SchemaBuilder in project drill by apache.
the class TestScanOperExecOuputSchema method testStrictProvidedSchemaWithWildcardAndSpecialCols.
@Test
public void testStrictProvidedSchemaWithWildcardAndSpecialCols() {
TupleMetadata providedSchema = new SchemaBuilder().add("a", // Projected, in reader
MinorType.INT).add("d", // Projected, not in reader
MinorType.BIGINT).add("e", // Not projected, not in reader
MinorType.BIGINT).buildSchema();
providedSchema.metadata("d").setDefaultValue("20");
providedSchema.metadata("e").setDefaultValue("30");
providedSchema.setProperty(TupleMetadata.IS_STRICT_SCHEMA_PROP, Boolean.TRUE.toString());
providedSchema.metadata("a").setBooleanProperty(ColumnMetadata.EXCLUDE_FROM_WILDCARD, true);
BaseScanFixtureBuilder builder = new BaseScanFixtureBuilder();
// Project schema only
builder.setProjection(RowSetTestUtils.projectAll());
builder.addReader(new MockSimpleReader());
builder.builder.providedSchema(providedSchema);
builder.builder.nullType(Types.optional(MinorType.VARCHAR));
ScanFixture scanFixture = builder.build();
ScanOperatorExec scan = scanFixture.scanOp;
TupleMetadata expectedSchema = new SchemaBuilder().add("d", MinorType.BIGINT).add("e", MinorType.BIGINT).buildSchema();
// Initial schema
assertTrue(scan.buildSchema());
{
SingleRowSet expected = fixture.rowSetBuilder(expectedSchema).build();
RowSetUtilities.verify(expected, fixture.wrap(scan.batchAccessor().container()));
}
// Batch with defaults and null types
assertTrue(scan.next());
{
SingleRowSet expected = fixture.rowSetBuilder(expectedSchema).addRow(20L, 30L).build();
RowSetUtilities.verify(expected, fixture.wrap(scan.batchAccessor().container()));
}
assertFalse(scan.next());
scanFixture.close();
}
use of org.apache.drill.exec.record.metadata.SchemaBuilder in project drill by apache.
the class TestScanOperExecOuputSchema method testProvidedSchema.
/**
* Test an output schema.
* <ul>
* <li>Column a has an input type of VARCHAR, and output type of INT,
* and the framework will insert an implicit conversion.</li>
* <li>Column b has an output type of BIGINT, is projected, but is
* not provided by the reader. It will use the default value of 20L.</li>
* <li>Column c is not in the output schema, is not provided by the
* reader, but is projected, so it will use the default null type
* of VARCHAR, with a null value.</li>
* </ul>
*/
@Test
public void testProvidedSchema() {
TupleMetadata providedSchema = new SchemaBuilder().add("a", // Projected, in reader
MinorType.INT).add("d", // Projected, not in reader
MinorType.BIGINT).add("e", // Not projected, not in reader
MinorType.BIGINT).buildSchema();
providedSchema.metadata("d").setDefaultValue("20");
providedSchema.metadata("e").setDefaultValue("30");
BaseScanFixtureBuilder builder = new BaseScanFixtureBuilder();
builder.setProjection(new String[] { "a", "b", "d", "f" });
builder.addReader(new MockSimpleReader());
builder.builder.providedSchema(providedSchema);
builder.builder.nullType(Types.optional(MinorType.VARCHAR));
ScanFixture scanFixture = builder.build();
ScanOperatorExec scan = scanFixture.scanOp;
TupleMetadata expectedSchema = new SchemaBuilder().add("a", MinorType.INT).add("b", MinorType.VARCHAR).add("d", MinorType.BIGINT).addNullable("f", MinorType.VARCHAR).buildSchema();
// Initial schema
assertTrue(scan.buildSchema());
{
SingleRowSet expected = fixture.rowSetBuilder(expectedSchema).build();
RowSetUtilities.verify(expected, fixture.wrap(scan.batchAccessor().container()));
}
// Batch with defaults and null types
assertTrue(scan.next());
{
SingleRowSet expected = fixture.rowSetBuilder(expectedSchema).addRow(10, "foo", 20L, null).build();
RowSetUtilities.verify(expected, fixture.wrap(scan.batchAccessor().container()));
}
assertFalse(scan.next());
scanFixture.close();
}
use of org.apache.drill.exec.record.metadata.SchemaBuilder in project drill by apache.
the class TestScanOperExecOuputSchema method testProvidedSchemaWithWildcard.
/**
* Test non-strict specified schema, with a wildcard, with extra
* reader columns. Reader columns are included in output.
*/
@Test
public void testProvidedSchemaWithWildcard() {
TupleMetadata providedSchema = new SchemaBuilder().add("a", // Projected, in reader
MinorType.INT).add("d", // Projected, not in reader
MinorType.BIGINT).add("e", // Not projected, not in reader
MinorType.BIGINT).buildSchema();
providedSchema.metadata("d").setDefaultValue("20");
providedSchema.metadata("e").setDefaultValue("30");
BaseScanFixtureBuilder builder = new BaseScanFixtureBuilder();
builder.setProjection(RowSetTestUtils.projectAll());
builder.addReader(new MockSimpleReader());
builder.builder.providedSchema(providedSchema);
builder.builder.nullType(Types.optional(MinorType.VARCHAR));
ScanFixture scanFixture = builder.build();
ScanOperatorExec scan = scanFixture.scanOp;
TupleMetadata expectedSchema = new SchemaBuilder().add("a", MinorType.INT).add("d", MinorType.BIGINT).add("e", MinorType.BIGINT).add("b", MinorType.VARCHAR).add("c", MinorType.VARCHAR).buildSchema();
// Initial schema
assertTrue(scan.buildSchema());
{
SingleRowSet expected = fixture.rowSetBuilder(expectedSchema).build();
RowSetUtilities.verify(expected, fixture.wrap(scan.batchAccessor().container()));
}
// Batch with defaults and null types
assertTrue(scan.next());
{
SingleRowSet expected = fixture.rowSetBuilder(expectedSchema).addRow(10, 20L, 30L, "foo", "bar").build();
RowSetUtilities.verify(expected, fixture.wrap(scan.batchAccessor().container()));
}
assertFalse(scan.next());
scanFixture.close();
}
Aggregations