use of org.apache.drill.exec.record.HyperVectorWrapper in project drill by apache.
the class DrillTestWrapper method addToHyperVectorMap.
private Map<String, HyperVectorValueIterator> addToHyperVectorMap(final List<QueryDataBatch> records, final RecordBatchLoader loader) throws SchemaChangeException, UnsupportedEncodingException {
// TODO - this does not handle schema changes
Map<String, HyperVectorValueIterator> combinedVectors = new TreeMap<>();
long totalRecords = 0;
QueryDataBatch batch;
int size = records.size();
for (int i = 0; i < size; i++) {
batch = records.get(i);
loader.load(batch.getHeader().getDef(), batch.getData());
logger.debug("reading batch with " + loader.getRecordCount() + " rows, total read so far " + totalRecords);
totalRecords += loader.getRecordCount();
for (VectorWrapper<?> w : loader) {
String field = SchemaPath.getSimplePath(w.getField().getPath()).toExpr();
if (!combinedVectors.containsKey(field)) {
MaterializedField mf = w.getField();
ValueVector[] vvList = (ValueVector[]) Array.newInstance(mf.getValueClass(), 1);
vvList[0] = w.getValueVector();
combinedVectors.put(field, new HyperVectorValueIterator(mf, new HyperVectorWrapper<>(mf, vvList)));
} else {
combinedVectors.get(field).getHyperVector().addVector(w.getValueVector());
}
}
}
for (HyperVectorValueIterator hvi : combinedVectors.values()) {
hvi.determineTotalSize();
}
return combinedVectors;
}
Aggregations