use of org.apache.drill.exec.physical.impl.scan.project.NullColumnBuilder.NullBuilderBuilder in project drill by apache.
the class TestReaderLevelProjection method testDisjoint.
/**
* Drill is unique in that we can select (a, b) from a data source
* that only offers (c, d). We get null columns as a result.
*/
@Test
public void testDisjoint() {
// Simulate SELECT c, a ...
final ScanLevelProjection scanProj = ScanLevelProjection.build(RowSetTestUtils.projectList("b"), ScanTestUtils.parsers());
assertEquals(1, scanProj.columns().size());
// Simulate a data source, with early schema, of (a)
final TupleMetadata tableSchema = new SchemaBuilder().add("A", MinorType.VARCHAR).buildSchema();
final NullColumnBuilder builder = new NullBuilderBuilder().build();
final ResolvedRow rootTuple = new ResolvedRow(builder);
new ExplicitSchemaProjection(scanProj, tableSchema, rootTuple, ScanTestUtils.resolvers());
final List<ResolvedColumn> columns = rootTuple.columns();
assertEquals(1, columns.size());
final VectorSource nullBuilder = rootTuple.nullBuilder();
assertEquals("b", columns.get(0).name());
assertEquals(0, columns.get(0).sourceIndex());
assertSame(nullBuilder, columns.get(0).source());
}
use of org.apache.drill.exec.physical.impl.scan.project.NullColumnBuilder.NullBuilderBuilder in project drill by apache.
the class TestRowBatchMerger method testFlatWithNulls.
@Test
public void testFlatWithNulls() {
// Create the first batch
RowSetSource first = makeFirst();
// Create null columns
NullColumnBuilder builder = new NullBuilderBuilder().build();
ResolvedRow resolvedTuple = new ResolvedRow(builder);
resolvedTuple.add(new TestProjection(resolvedTuple, 1));
resolvedTuple.add(resolvedTuple.nullBuilder().add("null1"));
resolvedTuple.add(resolvedTuple.nullBuilder().add("null2", Types.optional(MinorType.VARCHAR)));
resolvedTuple.add(new TestProjection(resolvedTuple, 0));
// Build the null values
ResultVectorCache cache = new NullResultVectorCacheImpl(fixture.allocator());
builder.build(cache);
builder.load(first.rowSet().rowCount());
// Do the merge
VectorContainer output = new VectorContainer(fixture.allocator());
resolvedTuple.project(first.rowSet().container(), output);
output.setRecordCount(first.rowSet().rowCount());
RowSet result = fixture.wrap(output);
// Verify
TupleMetadata expectedSchema = new SchemaBuilder().add("a", MinorType.INT).addNullable("null1", MinorType.INT).addNullable("null2", MinorType.VARCHAR).add("d", MinorType.VARCHAR).buildSchema();
SingleRowSet expected = fixture.rowSetBuilder(expectedSchema).addRow(10, null, null, "barney").addRow(20, null, null, "wilma").build();
new RowSetComparison(expected).verifyAndClearAll(result);
builder.close();
}
use of org.apache.drill.exec.physical.impl.scan.project.NullColumnBuilder.NullBuilderBuilder in project drill by apache.
the class TestSchemaSmoothing method testSmaller.
/**
* Case in which the table schema is a superset of the prior
* schema. Discard prior schema. Turn off auto expansion of
* metadata for a simpler test.
*/
@Test
public void testSmaller() {
final ScanLevelProjection scanProj = ScanLevelProjection.build(RowSetTestUtils.projectAll(), ScanTestUtils.parsers());
final SchemaSmoother smoother = new SchemaSmoother(scanProj, ScanTestUtils.resolvers());
final TupleMetadata priorSchema = new SchemaBuilder().add("a", MinorType.INT).buildSchema();
final TupleMetadata tableSchema = new SchemaBuilder().add("a", MinorType.INT).add("b", MinorType.VARCHAR).buildSchema();
{
final NullColumnBuilder builder = new NullBuilderBuilder().build();
final ResolvedRow rootTuple = new ResolvedRow(builder);
smoother.resolve(priorSchema, rootTuple);
assertEquals(1, smoother.schemaVersion());
assertTrue(ScanTestUtils.schema(rootTuple).isEquivalent(priorSchema));
}
{
final NullColumnBuilder builder = new NullBuilderBuilder().build();
final ResolvedRow rootTuple = new ResolvedRow(builder);
smoother.resolve(tableSchema, rootTuple);
assertEquals(2, smoother.schemaVersion());
assertTrue(ScanTestUtils.schema(rootTuple).isEquivalent(tableSchema));
}
}
use of org.apache.drill.exec.physical.impl.scan.project.NullColumnBuilder.NullBuilderBuilder in project drill by apache.
the class TestSchemaSmoothing method doResolve.
private ResolvedRow doResolve(SchemaSmoother smoother, TupleMetadata schema) {
final NullColumnBuilder builder = new NullBuilderBuilder().build();
final ResolvedRow rootTuple = new ResolvedRow(builder);
smoother.resolve(schema, rootTuple);
return rootTuple;
}
use of org.apache.drill.exec.physical.impl.scan.project.NullColumnBuilder.NullBuilderBuilder in project drill by apache.
the class TestImplicitColumnProjection method testFileMetadata.
/**
* Test a query with explicit mention of file metadata columns.
*/
@Test
public void testFileMetadata() {
Path filePath = new Path("hdfs:///w/x/y/z.csv");
ImplicitColumnManager metadataManager = new ImplicitColumnManager(fixture.getOptionManager(), standardOptions(filePath));
ScanLevelProjection scanProj = ScanLevelProjection.build(RowSetTestUtils.projectList("a", ScanTestUtils.FULLY_QUALIFIED_NAME_COL, // Sic, to test case sensitivity
"filEPath", ScanTestUtils.FILE_NAME_COL, ScanTestUtils.SUFFIX_COL), ScanTestUtils.parsers(metadataManager.projectionParser()));
assertEquals(5, scanProj.columns().size());
assertEquals(ScanTestUtils.FULLY_QUALIFIED_NAME_COL, scanProj.columns().get(1).name());
assertEquals("filEPath", scanProj.columns().get(2).name());
assertEquals(ScanTestUtils.FILE_NAME_COL, scanProj.columns().get(3).name());
assertEquals(ScanTestUtils.SUFFIX_COL, scanProj.columns().get(4).name());
// Schema-level projection, fills in values.
TupleMetadata tableSchema = new SchemaBuilder().add("a", MinorType.VARCHAR).buildSchema();
metadataManager.startFile(filePath);
NullColumnBuilder builder = new NullBuilderBuilder().build();
ResolvedRow rootTuple = new ResolvedRow(builder);
new ExplicitSchemaProjection(scanProj, tableSchema, rootTuple, ScanTestUtils.resolvers(metadataManager));
List<ResolvedColumn> columns = rootTuple.columns();
assertEquals(5, columns.size());
assertEquals("/w/x/y/z.csv", ((MetadataColumn) columns.get(1)).value());
assertEquals("/w/x/y", ((MetadataColumn) columns.get(2)).value());
assertEquals("z.csv", ((MetadataColumn) columns.get(3)).value());
assertEquals("csv", ((MetadataColumn) columns.get(4)).value());
}
Aggregations