use of org.apache.drill.exec.physical.impl.ScanBatch in project drill by axbaretto.
the class MockScanBatchCreator method getBatch.
@Override
public ScanBatch getBatch(ExecutorFragmentContext context, MockSubScanPOP config, List<RecordBatch> children) throws ExecutionSetupException {
Preconditions.checkArgument(children.isEmpty());
final List<MockScanEntry> entries = config.getReadEntries();
final List<RecordReader> readers = new LinkedList<>();
for (final MockTableDef.MockScanEntry e : entries) {
if (e.isExtended()) {
readers.add(new ExtendedMockRecordReader(e));
} else {
readers.add(new MockRecordReader(context, e));
}
}
return new ScanBatch(config, context, readers);
}
use of org.apache.drill.exec.physical.impl.ScanBatch in project drill by axbaretto.
the class EasyFormatPlugin method getReaderBatch.
@SuppressWarnings("resource")
CloseableRecordBatch getReaderBatch(FragmentContext context, EasySubScan scan) throws ExecutionSetupException {
final ColumnExplorer columnExplorer = new ColumnExplorer(context.getOptions(), scan.getColumns());
if (!columnExplorer.isStarQuery()) {
scan = new EasySubScan(scan.getUserName(), scan.getWorkUnits(), scan.getFormatPlugin(), columnExplorer.getTableColumns(), scan.getSelectionRoot());
scan.setOperatorId(scan.getOperatorId());
}
OperatorContext oContext = context.newOperatorContext(scan);
final DrillFileSystem dfs;
try {
dfs = oContext.newFileSystem(fsConf);
} catch (IOException e) {
throw new ExecutionSetupException(String.format("Failed to create FileSystem: %s", e.getMessage()), e);
}
List<RecordReader> readers = new LinkedList<>();
List<Map<String, String>> implicitColumns = Lists.newArrayList();
Map<String, String> mapWithMaxColumns = Maps.newLinkedHashMap();
for (FileWork work : scan.getWorkUnits()) {
RecordReader recordReader = getRecordReader(context, dfs, work, scan.getColumns(), scan.getUserName());
readers.add(recordReader);
Map<String, String> implicitValues = columnExplorer.populateImplicitColumns(work, scan.getSelectionRoot());
implicitColumns.add(implicitValues);
if (implicitValues.size() > mapWithMaxColumns.size()) {
mapWithMaxColumns = implicitValues;
}
}
// all readers should have the same number of implicit columns, add missing ones with value null
Map<String, String> diff = Maps.transformValues(mapWithMaxColumns, Functions.constant((String) null));
for (Map<String, String> map : implicitColumns) {
map.putAll(Maps.difference(map, diff).entriesOnlyOnRight());
}
return new ScanBatch(context, oContext, readers, implicitColumns);
}
use of org.apache.drill.exec.physical.impl.ScanBatch in project drill by axbaretto.
the class MapRDBScanBatchCreator method getBatch.
@Override
public ScanBatch getBatch(ExecutorFragmentContext context, MapRDBSubScan subScan, List<RecordBatch> children) throws ExecutionSetupException {
Preconditions.checkArgument(children.isEmpty());
List<RecordReader> readers = new LinkedList<>();
for (MapRDBSubScanSpec scanSpec : subScan.getRegionScanSpecList()) {
try {
if (BinaryTableGroupScan.TABLE_BINARY.equals(subScan.getTableType())) {
readers.add(new HBaseRecordReader(subScan.getFormatPlugin().getConnection(), getHBaseSubScanSpec(scanSpec), subScan.getColumns()));
} else {
readers.add(new MaprDBJsonRecordReader(scanSpec, subScan.getFormatPluginConfig(), subScan.getColumns(), context));
}
} catch (Exception e) {
throw new ExecutionSetupException(e);
}
}
return new ScanBatch(subScan, context, readers);
}
use of org.apache.drill.exec.physical.impl.ScanBatch in project drill by apache.
the class LegacyOperatorTestBuilder method go.
@SuppressWarnings("unchecked")
public void go() {
BatchCreator<PhysicalOperator> opCreator;
RecordBatch testOperator;
try {
physicalOpUnitTestBase.mockOpContext(popConfig, initReservation, maxAllocation);
opCreator = (BatchCreator<PhysicalOperator>) physicalOpUnitTestBase.opCreatorReg.getOperatorCreator(popConfig.getClass());
List<RecordBatch> incomingStreams = Lists.newArrayList();
if (inputStreamsJSON != null) {
for (List<String> batchesJson : inputStreamsJSON) {
incomingStreams.add(new ScanBatch(popConfig, physicalOpUnitTestBase.fragContext, physicalOpUnitTestBase.getReaderListForJsonBatches(batchesJson, physicalOpUnitTestBase.fragContext)));
}
}
testOperator = opCreator.getBatch(physicalOpUnitTestBase.fragContext, popConfig, incomingStreams);
Map<String, List<Object>> actualSuperVectors = DrillTestWrapper.addToCombinedVectorResults(new PhysicalOpUnitTestBase.BatchIterator(testOperator), expectedBatchSize, expectedNumBatches, expectedTotalRows);
// when checking total rows, don't compare actual results
if (expectedTotalRows != null) {
return;
}
Map<String, List<Object>> expectedSuperVectors;
if (expectNoRows) {
expectedSuperVectors = new TreeMap<>();
for (String column : baselineColumns) {
expectedSuperVectors.put(column, new ArrayList<>());
}
} else {
expectedSuperVectors = DrillTestWrapper.translateRecordListToHeapVectors(baselineRecords);
}
DrillTestWrapper.compareMergedVectors(expectedSuperVectors, actualSuperVectors);
} catch (ExecutionSetupException e) {
throw new RuntimeException(e);
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
use of org.apache.drill.exec.physical.impl.ScanBatch in project drill by apache.
the class SystemTableBatchCreator method getBatch.
@SuppressWarnings({ "rawtypes", "unchecked" })
@Override
public ScanBatch getBatch(final ExecutorFragmentContext context, final SystemTableScan scan, final List<RecordBatch> children) throws ExecutionSetupException {
final SystemTable table = scan.getTable();
final Iterator<Object> iterator = table.getIterator(context, scan.getMaxRecordsToRead());
final RecordReader reader = new PojoRecordReader(table.getPojoClass(), ImmutableList.copyOf(iterator), scan.getMaxRecordsToRead());
return new ScanBatch(scan, context, Collections.singletonList(reader));
}
Aggregations