use of org.apache.drill.exec.HyperVectorValueIterator in project drill by apache.
the class DrillTestWrapper method compareHyperVectors.
private void compareHyperVectors(Map<String, HyperVectorValueIterator> expectedRecords, Map<String, HyperVectorValueIterator> actualRecords) throws Exception {
for (String s : expectedRecords.keySet()) {
assertNotNull("Expected column '" + s + "' not found.", actualRecords.get(s));
assertEquals(expectedRecords.get(s).getTotalRecords(), actualRecords.get(s).getTotalRecords());
HyperVectorValueIterator expectedValues = expectedRecords.get(s);
HyperVectorValueIterator actualValues = actualRecords.get(s);
int i = 0;
while (expectedValues.hasNext()) {
compareValuesErrorOnMismatch(expectedValues.next(), actualValues.next(), i, s);
i++;
}
}
cleanupHyperValueIterators(expectedRecords.values());
cleanupHyperValueIterators(actualRecords.values());
}
use of org.apache.drill.exec.HyperVectorValueIterator in project drill by apache.
the class DrillTestWrapper method compareResultsHyperVector.
public void compareResultsHyperVector() throws Exception {
RecordBatchLoader loader = new RecordBatchLoader(getAllocator());
test(testOptionSettingQueries);
List<QueryDataBatch> results = testRunAndReturn(queryType, query);
checkNumBatches(results);
// To avoid extra work for test writers, types can optionally be inferred from the test query
addTypeInfoIfMissing(results.get(0), testBuilder);
Map<String, HyperVectorValueIterator> actualSuperVectors = addToHyperVectorMap(results, loader);
test(baselineOptionSettingQueries);
List<QueryDataBatch> expected = testRunAndReturn(baselineQueryType, testBuilder.getValidationQuery());
Map<String, HyperVectorValueIterator> expectedSuperVectors = addToHyperVectorMap(expected, loader);
compareHyperVectors(expectedSuperVectors, actualSuperVectors);
cleanupBatches(results, expected);
}
use of org.apache.drill.exec.HyperVectorValueIterator in project drill by apache.
the class DrillTestWrapper method addToHyperVectorMap.
private Map<String, HyperVectorValueIterator> addToHyperVectorMap(final List<QueryDataBatch> records, final RecordBatchLoader loader) throws SchemaChangeException, UnsupportedEncodingException {
// TODO - this does not handle schema changes
Map<String, HyperVectorValueIterator> combinedVectors = new TreeMap<>();
long totalRecords = 0;
QueryDataBatch batch;
int size = records.size();
for (int i = 0; i < size; i++) {
batch = records.get(i);
loader.load(batch.getHeader().getDef(), batch.getData());
logger.debug("reading batch with " + loader.getRecordCount() + " rows, total read so far " + totalRecords);
totalRecords += loader.getRecordCount();
for (VectorWrapper<?> w : loader) {
String field = SchemaPath.getSimplePath(w.getField().getPath()).toExpr();
if (!combinedVectors.containsKey(field)) {
MaterializedField mf = w.getField();
ValueVector[] vvList = (ValueVector[]) Array.newInstance(mf.getValueClass(), 1);
vvList[0] = w.getValueVector();
combinedVectors.put(field, new HyperVectorValueIterator(mf, new HyperVectorWrapper<>(mf, vvList)));
} else {
combinedVectors.get(field).getHyperVector().addVector(w.getValueVector());
}
}
}
for (HyperVectorValueIterator hvi : combinedVectors.values()) {
hvi.determineTotalSize();
}
return combinedVectors;
}
Aggregations