use of org.apache.drill.exec.ops.OperatorContext in project drill by apache.
the class TestScanBatchWriters method sanityTest.
@Test
public void sanityTest() throws Exception {
Scan scanConfig = new AbstractSubScan("bob") {
@Override
public String getOperatorType() {
return "";
}
};
OperatorContext opContext = fixture.newOperatorContext(scanConfig);
// Setup: normally done by ScanBatch
VectorContainer container = new VectorContainer(fixture.allocator());
OutputMutator output = new ScanBatch.Mutator(opContext, fixture.allocator(), container);
DrillBuf buffer = opContext.getManagedBuffer();
try (VectorContainerWriter writer = new VectorContainerWriter(output)) {
// Per-batch
writer.allocate();
writer.reset();
BaseWriter.MapWriter map = writer.rootAsMap();
// Write one record (10, "Fred", [100, 110, 120] )
map.integer("a").writeInt(10);
byte[] bytes = "Fred".getBytes("UTF-8");
buffer.setBytes(0, bytes, 0, bytes.length);
map.varChar("b").writeVarChar(0, bytes.length, buffer);
try (ListWriter list = map.list("c")) {
list.startList();
list.integer().writeInt(100);
list.integer().writeInt(110);
list.integer().writeInt(120);
list.endList();
// Write another record: (20, "Wilma", [])
writer.setPosition(1);
map.integer("a").writeInt(20);
bytes = "Wilma".getBytes("UTF-8");
buffer.setBytes(0, bytes, 0, bytes.length);
map.varChar("b").writeVarChar(0, bytes.length, buffer);
writer.setValueCount(2);
// Wrap-up done by ScanBatch
container.setRecordCount(2);
container.buildSchema(SelectionVectorMode.NONE);
RowSet rowSet = fixture.wrap(container);
// Expected
TupleMetadata schema = new SchemaBuilder().addNullable("a", MinorType.INT).addNullable("b", MinorType.VARCHAR).addArray("c", MinorType.INT).buildSchema();
RowSet expected = fixture.rowSetBuilder(schema).addRow(10, "Fred", new int[] { 100, 110, 120 }).addRow(20, "Wilma", null).build();
new RowSetComparison(expected).verifyAndClearAll(rowSet);
}
} finally {
opContext.close();
}
}
use of org.apache.drill.exec.ops.OperatorContext in project drill by apache.
the class ParquetScanBatchCreator method getBatch.
@Override
public ScanBatch getBatch(ExecutorFragmentContext context, ParquetRowGroupScan rowGroupScan, List<RecordBatch> children) throws ExecutionSetupException {
Preconditions.checkArgument(children.isEmpty());
OperatorContext oContext = context.newOperatorContext(rowGroupScan);
return getBatch(context, rowGroupScan, oContext);
}
use of org.apache.drill.exec.ops.OperatorContext in project drill by apache.
the class TestSorter method runSorterTest.
public void runSorterTest(Sort popConfig, SingleRowSet rowSet, SingleRowSet expected) throws Exception {
OperatorContext opContext = fixture.newOperatorContext(popConfig);
SorterWrapper sorter = new SorterWrapper(opContext);
try {
sorter.sortBatch(rowSet.container(), rowSet.getSv2());
RowSetUtilities.verify(expected, rowSet);
sorter.close();
} finally {
opContext.close();
}
}
use of org.apache.drill.exec.ops.OperatorContext in project drill by apache.
the class TestSortEmitOutcome method testSpillWithNoEmitOutcome.
/**
* Verifies successful spilling in absence of EMIT outcome
* @throws Exception
*/
@Test
public void testSpillWithNoEmitOutcome() throws Exception {
final OperatorFixture.Builder builder = OperatorFixture.builder(dirTestWatcher);
// Configuration that forces Sort to spill after buffering 2 incoming batches with data
builder.configBuilder().put(ExecConstants.EXTERNAL_SORT_BATCH_LIMIT, 2);
final OperatorFixture fixture_local = builder.build();
final RowSet.SingleRowSet local_nonEmptyInputRowSet1 = fixture_local.rowSetBuilder(inputSchema).addRow(3, 30, "item3").addRow(2, 20, "item2").build();
final RowSet.SingleRowSet local_nonEmptyInputRowSet2 = fixture_local.rowSetBuilder(inputSchema).addRow(1, 10, "item1").build();
final RowSet.SingleRowSet local_nonEmptyInputRowSet3 = fixture_local.rowSetBuilder(inputSchema).addRow(4, 40, "item4").build();
inputContainer.add(local_nonEmptyInputRowSet1.container());
inputContainer.add(local_nonEmptyInputRowSet2.container());
inputContainer.add(local_nonEmptyInputRowSet3.container());
inputOutcomes.add(OK_NEW_SCHEMA);
inputOutcomes.add(OK);
inputOutcomes.add(OK);
final PhysicalOperator mockPopConfig_local = new MockStorePOP(null);
final OperatorContext opContext_local = fixture_local.getFragmentContext().newOperatorContext(mockPopConfig_local);
final MockRecordBatch mockInputBatch = new MockRecordBatch(fixture_local.getFragmentContext(), opContext_local, inputContainer, inputOutcomes, local_nonEmptyInputRowSet1.container().getSchema());
final ExternalSortBatch sortBatch_local = new ExternalSortBatch(sortPopConfig, fixture_local.getFragmentContext(), mockInputBatch);
assertTrue(sortBatch_local.next() == OK_NEW_SCHEMA);
assertTrue(sortBatch_local.next() == OK_NEW_SCHEMA);
assertTrue(sortBatch_local.getRecordCount() == 4);
assertTrue(sortBatch_local.getSchema().getSelectionVectorMode() == BatchSchema.SelectionVectorMode.NONE);
assertTrue(sortBatch_local.next() == NONE);
// Release memory for row sets
local_nonEmptyInputRowSet1.clear();
local_nonEmptyInputRowSet2.clear();
local_nonEmptyInputRowSet3.clear();
sortBatch_local.close();
fixture_local.close();
}
use of org.apache.drill.exec.ops.OperatorContext in project drill by apache.
the class HashPartitionTest method noSpillBuildSideTest.
@Test
public void noSpillBuildSideTest() throws Exception {
new HashPartitionFixture().run(new HashPartitionTestCase() {
private RowSet buildRowSet;
private RowSet probeRowSet;
@Override
public CloseableRecordBatch createBuildBatch(BatchSchema schema, FragmentContext context) {
buildRowSet = new RowSetBuilder(context.getAllocator(), schema).addRow(1, "green").addRow(3, "red").addRow(2, "blue").build();
return new MockRecordBatch.Builder().sendData(buildRowSet).build(context);
}
@Override
public void createResultBuildBatch(BatchSchema schema, FragmentContext context) {
}
@Override
public CloseableRecordBatch createProbeBatch(BatchSchema schema, FragmentContext context) {
probeRowSet = new RowSetBuilder(context.getAllocator(), schema).addRow(.5f, "yellow").addRow(1.5f, "blue").addRow(2.5f, "black").build();
return new MockRecordBatch.Builder().sendData(probeRowSet).build(context);
}
@Override
public void run(SpillSet spillSet, BatchSchema buildSchema, BatchSchema probeSchema, RecordBatch buildBatch, RecordBatch probeBatch, ChainedHashTable baseHashTable, FragmentContext context, OperatorContext operatorContext) throws Exception {
final HashPartition hashPartition = new HashPartition(context, context.getAllocator(), baseHashTable, buildBatch, probeBatch, false, 10, spillSet, 0, 0, // only '1' has a special treatment
2);
final HashJoinMemoryCalculator.BuildSidePartitioning noopCalc = new HashJoinMemoryCalculatorImpl.NoopBuildSidePartitioningImpl();
hashPartition.appendInnerRow(buildBatch.getContainer(), 0, 10, noopCalc);
hashPartition.appendInnerRow(buildBatch.getContainer(), 1, 11, noopCalc);
hashPartition.appendInnerRow(buildBatch.getContainer(), 2, 12, noopCalc);
hashPartition.completeAnInnerBatch(false, false);
hashPartition.buildContainersHashTableAndHelper();
{
int compositeIndex = hashPartition.probeForKey(0, 16);
Assert.assertEquals(-1, compositeIndex);
}
{
int compositeIndex = hashPartition.probeForKey(1, 12);
int startIndex = hashPartition.getStartIndex(compositeIndex).getLeft();
int nextIndex = hashPartition.getNextIndex(startIndex);
Assert.assertEquals(2, startIndex);
Assert.assertEquals(-1, nextIndex);
}
{
int compositeIndex = hashPartition.probeForKey(2, 15);
Assert.assertEquals(-1, compositeIndex);
}
buildRowSet.clear();
probeRowSet.clear();
hashPartition.close();
}
});
}
Aggregations