use of org.apache.drill.exec.vector.complex.impl.VectorContainerWriter in project drill by axbaretto.
the class TestPromotableWriter method list.
@Test
public void list() throws Exception {
BufferAllocator allocator = RootAllocatorFactory.newRoot(DrillConfig.create());
TestOutputMutator output = new TestOutputMutator(allocator);
ComplexWriter rootWriter = new VectorContainerWriter(output, true);
MapWriter writer = rootWriter.rootAsMap();
rootWriter.setPosition(0);
{
writer.map("map").bigInt("a").writeBigInt(1);
}
rootWriter.setPosition(1);
{
writer.map("map").float4("a").writeFloat4(2.0f);
}
rootWriter.setPosition(2);
{
writer.map("map").list("a").startList();
writer.map("map").list("a").endList();
}
rootWriter.setPosition(3);
{
writer.map("map").list("a").startList();
writer.map("map").list("a").bigInt().writeBigInt(3);
writer.map("map").list("a").float4().writeFloat4(4);
writer.map("map").list("a").endList();
}
rootWriter.setValueCount(4);
BatchPrinter.printBatch(output.getContainer());
}
use of org.apache.drill.exec.vector.complex.impl.VectorContainerWriter in project drill by apache.
the class TestScanBatchWriters method sanityTest.
@Test
public void sanityTest() throws Exception {
Scan scanConfig = new AbstractSubScan("bob") {
@Override
public String getOperatorType() {
return "";
}
};
OperatorContext opContext = fixture.newOperatorContext(scanConfig);
// Setup: normally done by ScanBatch
VectorContainer container = new VectorContainer(fixture.allocator());
OutputMutator output = new ScanBatch.Mutator(opContext, fixture.allocator(), container);
DrillBuf buffer = opContext.getManagedBuffer();
try (VectorContainerWriter writer = new VectorContainerWriter(output)) {
// Per-batch
writer.allocate();
writer.reset();
BaseWriter.MapWriter map = writer.rootAsMap();
// Write one record (10, "Fred", [100, 110, 120] )
map.integer("a").writeInt(10);
byte[] bytes = "Fred".getBytes("UTF-8");
buffer.setBytes(0, bytes, 0, bytes.length);
map.varChar("b").writeVarChar(0, bytes.length, buffer);
try (ListWriter list = map.list("c")) {
list.startList();
list.integer().writeInt(100);
list.integer().writeInt(110);
list.integer().writeInt(120);
list.endList();
// Write another record: (20, "Wilma", [])
writer.setPosition(1);
map.integer("a").writeInt(20);
bytes = "Wilma".getBytes("UTF-8");
buffer.setBytes(0, bytes, 0, bytes.length);
map.varChar("b").writeVarChar(0, bytes.length, buffer);
writer.setValueCount(2);
// Wrap-up done by ScanBatch
container.setRecordCount(2);
container.buildSchema(SelectionVectorMode.NONE);
RowSet rowSet = fixture.wrap(container);
// Expected
TupleMetadata schema = new SchemaBuilder().addNullable("a", MinorType.INT).addNullable("b", MinorType.VARCHAR).addArray("c", MinorType.INT).buildSchema();
RowSet expected = fixture.rowSetBuilder(schema).addRow(10, "Fred", new int[] { 100, 110, 120 }).addRow(20, "Wilma", null).build();
new RowSetComparison(expected).verifyAndClearAll(rowSet);
}
} finally {
opContext.close();
}
}
use of org.apache.drill.exec.vector.complex.impl.VectorContainerWriter in project drill by apache.
the class DruidRecordReader method setup.
@Override
public void setup(OperatorContext context, OutputMutator output) {
this.writer = new VectorContainerWriter(output);
this.jsonReader = new JsonReader.Builder(fragmentContext.getManagedBuffer()).schemaPathColumns(ImmutableList.copyOf(getColumns())).skipOuterList(true).build();
}
use of org.apache.drill.exec.vector.complex.impl.VectorContainerWriter in project drill by apache.
the class TestBsonRecordReader method setUp.
@Before
public void setUp() {
allocator = new RootAllocator(Long.MAX_VALUE);
TestOutputMutator mutator = new TestOutputMutator(allocator);
writer = new VectorContainerWriter(mutator);
bufferManager = new BufferManagerImpl(allocator);
bsonReader = new BsonRecordReader(bufferManager.getManagedBuffer(1024), false, false);
}
use of org.apache.drill.exec.vector.complex.impl.VectorContainerWriter in project drill by apache.
the class AvroRecordReader method setup.
@Override
public void setup(final OperatorContext context, final OutputMutator output) throws ExecutionSetupException {
writer = new VectorContainerWriter(output);
try {
reader = getReader(hadoop, fs);
logger.debug("Processing file : {}, start position : {}, end position : {} ", hadoop, start, end);
reader.sync(this.start);
} catch (IOException e) {
throw new ExecutionSetupException(e);
}
}
Aggregations