use of org.apache.drill.exec.record.RecordBatch in project drill by apache.
the class IteratorValidatorCreator method getBatch.
@Override
public IteratorValidatorBatchIterator getBatch(FragmentContext context, IteratorValidator config, List<RecordBatch> children) throws ExecutionSetupException {
Preconditions.checkArgument(children.size() == 1);
RecordBatch child = children.iterator().next();
IteratorValidatorBatchIterator iter = new IteratorValidatorBatchIterator(child);
boolean validateBatches = context.getOptionSet().getOption(ExecConstants.ENABLE_VECTOR_VALIDATOR) || context.getConfig().getBoolean(ExecConstants.ENABLE_VECTOR_VALIDATION);
iter.enableBatchValidation(validateBatches);
logger.trace("Iterator validation enabled for " + child.getClass().getSimpleName() + (validateBatches ? " with vector validation" : ""));
return iter;
}
use of org.apache.drill.exec.record.RecordBatch in project drill by apache.
the class OrderedPartitionSenderCreator method getRoot.
@SuppressWarnings("resource")
@Override
public RootExec getRoot(FragmentContext context, OrderedPartitionSender config, List<RecordBatch> children) throws ExecutionSetupException {
Preconditions.checkArgument(children.size() == 1);
List<RecordBatch> ordered_children = Lists.newArrayList();
ordered_children.add(new OrderedPartitionRecordBatch(config, children.iterator().next(), context));
HashPartitionSender hpc = new HashPartitionSender(config.getOppositeMajorFragmentId(), config, config.getRef(), config.getDestinations());
return new PartitionSenderRootExec(context, ordered_children.iterator().next(), hpc);
}
use of org.apache.drill.exec.record.RecordBatch in project drill by apache.
the class TestMiniPlan method testUnionFilterAll.
@Test
@Ignore("DRILL-5327: A bug in UnionAll handling empty inputs from both sides")
public void testUnionFilterAll() throws Exception {
List<String> leftJsonBatches = Lists.newArrayList("[{\"a\": 5, \"b\" : 1 }]");
List<String> rightJsonBatches = Lists.newArrayList("[{\"a\": 50, \"b\" : 10 }]");
RecordBatch leftScan = new JsonScanBuilder().jsonBatches(leftJsonBatches).columnsToRead("a", "b").build();
RecordBatch leftFilter = new PopBuilder().physicalOperator(new Filter(null, parseExpr("a < 0"), 1.0f)).addInput(leftScan).build();
RecordBatch rightScan = new JsonScanBuilder().jsonBatches(rightJsonBatches).columnsToRead("a", "b").build();
RecordBatch rightFilter = new PopBuilder().physicalOperator(new Filter(null, parseExpr("a < 0"), 1.0f)).addInput(rightScan).build();
RecordBatch batch = new PopBuilder().physicalOperator(// Children list is provided through RecordBatch
new UnionAll(Collections.EMPTY_LIST)).addInput(leftFilter).addInput(rightFilter).build();
BatchSchema expectedSchema = new SchemaBuilder().addNullable("a", TypeProtos.MinorType.BIGINT).addNullable("b", TypeProtos.MinorType.BIGINT).withSVMode(BatchSchema.SelectionVectorMode.NONE).build();
new MiniPlanTestBuilder().root(batch).expectedSchema(expectedSchema).go();
}
use of org.apache.drill.exec.record.RecordBatch in project drill by apache.
the class TestMiniPlan method testEmptyJsonInput.
@Test
@Ignore("DRILL-5464: A bug in JsonRecordReader handling empty file")
public void testEmptyJsonInput() throws Exception {
String emptyFile = FileUtils.getResourceAsFile("/project/pushdown/empty.json").toURI().toString();
RecordBatch scanBatch = new JsonScanBuilder().fileSystem(fs).inputPaths(Lists.newArrayList(emptyFile)).build();
new MiniPlanTestBuilder().root(scanBatch).expectZeroBatch(true).go();
}
use of org.apache.drill.exec.record.RecordBatch in project drill by apache.
the class TestMiniPlan method testSimpleJson.
@Test
public void testSimpleJson() throws Exception {
List<String> jsonBatches = Lists.newArrayList("{\"a\":100}");
RecordBatch scanBatch = new JsonScanBuilder().jsonBatches(jsonBatches).build();
BatchSchema expectedSchema = new SchemaBuilder().addNullable("a", TypeProtos.MinorType.BIGINT).build();
new MiniPlanTestBuilder().root(scanBatch).expectedSchema(expectedSchema).baselineValues(100L).go();
}
Aggregations