use of org.apache.drill.exec.physical.impl.scan.project.ResolvedTuple.ResolvedRow in project drill by apache.
the class TestRowBatchMerger method testImplicitFlat.
@Test
public void testImplicitFlat() {
// Create the first batch
RowSetSource first = makeFirst();
// Create the second batch
RowSetSource second = makeSecond();
ResolvedRow resolvedTuple = new ResolvedRow(null);
resolvedTuple.add(new TestProjection(resolvedTuple, 1));
resolvedTuple.add(new TestProjection(second, 0));
resolvedTuple.add(new TestProjection(second, 1));
resolvedTuple.add(new TestProjection(resolvedTuple, 0));
// Do the merge
VectorContainer output = new VectorContainer(fixture.allocator());
resolvedTuple.project(first.rowSet().container(), output);
output.setRecordCount(first.rowSet().rowCount());
RowSet result = fixture.wrap(output);
// Verify
TupleMetadata expectedSchema = new SchemaBuilder().add("a", MinorType.INT).add("b", MinorType.INT).add("c", MinorType.VARCHAR).add("d", MinorType.VARCHAR).buildSchema();
SingleRowSet expected = fixture.rowSetBuilder(expectedSchema).addRow(10, 1, "foo.csv", "barney").addRow(20, 2, "foo.csv", "wilma").build();
new RowSetComparison(expected).verifyAndClearAll(result);
}
use of org.apache.drill.exec.physical.impl.scan.project.ResolvedTuple.ResolvedRow in project drill by apache.
the class TestRowBatchMerger method testNullMaps.
/**
* Test the ability to create maps from whole cloth if requested in
* the projection list, and the map is not available from the data
* source.
*/
@Test
public void testNullMaps() {
// Create the first batch
RowSetSource first = makeFirst();
// Create null columns
NullColumnBuilder builder = new NullBuilderBuilder().build();
ResolvedRow resolvedTuple = new ResolvedRow(builder);
resolvedTuple.add(new TestProjection(resolvedTuple, 1));
ResolvedMapColumn nullMapCol = new ResolvedMapColumn(resolvedTuple, "map1");
ResolvedTuple nullMap = nullMapCol.members();
nullMap.add(nullMap.nullBuilder().add("null1"));
nullMap.add(nullMap.nullBuilder().add("null2", Types.optional(MinorType.VARCHAR)));
ResolvedMapColumn nullMapCol2 = new ResolvedMapColumn(nullMap, "map2");
ResolvedTuple nullMap2 = nullMapCol2.members();
nullMap2.add(nullMap2.nullBuilder().add("null3"));
nullMap.add(nullMapCol2);
resolvedTuple.add(nullMapCol);
resolvedTuple.add(new TestProjection(resolvedTuple, 0));
// Build the null values
ResultVectorCache cache = new NullResultVectorCacheImpl(fixture.allocator());
resolvedTuple.buildNulls(cache);
// LoadNulls
resolvedTuple.loadNulls(first.rowSet().rowCount());
// Do the merge
VectorContainer output = new VectorContainer(fixture.allocator());
resolvedTuple.project(first.rowSet().container(), output);
resolvedTuple.setRowCount(first.rowSet().rowCount());
RowSet result = fixture.wrap(output);
// Verify
TupleMetadata expectedSchema = new SchemaBuilder().add("a", MinorType.INT).addMap("map1").addNullable("null1", MinorType.INT).addNullable("null2", MinorType.VARCHAR).addMap("map2").addNullable("null3", MinorType.INT).resumeMap().resumeSchema().add("d", MinorType.VARCHAR).buildSchema();
SingleRowSet expected = fixture.rowSetBuilder(expectedSchema).addRow(10, mapValue(null, null, singleMap(null)), "barney").addRow(20, mapValue(null, null, singleMap(null)), "wilma").build();
new RowSetComparison(expected).verifyAndClearAll(result);
resolvedTuple.close();
}
Aggregations