use of org.apache.drill.exec.rpc.user.QueryDataBatch in project drill by axbaretto.
the class ParquetRecordReaderTest method testNullableAgg.
@Test
public void testNullableAgg() throws Exception {
final List<QueryDataBatch> result = testSqlWithResults("select sum(a) as total_sum from dfs.`tmp/parquet_with_nulls_should_sum_100000_nulls_first.parquet`");
assertEquals("Only expected one batch with data, and then the empty finishing batch.", 2, result.size());
final RecordBatchLoader loader = new RecordBatchLoader(getDrillbitContext().getAllocator());
final QueryDataBatch b = result.get(0);
loader.load(b.getHeader().getDef(), b.getData());
final VectorWrapper vw = loader.getValueAccessorById(NullableBigIntVector.class, loader.getValueVectorId(SchemaPath.getCompoundPath("total_sum")).getFieldIds());
assertEquals(4999950000l, vw.getValueVector().getAccessor().getObject(0));
b.release();
loader.clear();
}
use of org.apache.drill.exec.rpc.user.QueryDataBatch in project drill by axbaretto.
the class TestJsonReaderWithSparseFiles method query.
protected void query(final String query, final Function<RecordBatchLoader> testBody) throws Exception {
final List<QueryDataBatch> batches = testSqlWithResults(query);
final RecordBatchLoader loader = new RecordBatchLoader(client.getAllocator());
try {
// first batch at index 0 is empty and used for fast schema return. Load the second one for the tests
final QueryDataBatch batch = batches.get(0);
loader.load(batch.getHeader().getDef(), batch.getData());
testBody.apply(loader);
} finally {
for (final QueryDataBatch batch : batches) {
batch.release();
}
loader.clear();
}
}
use of org.apache.drill.exec.rpc.user.QueryDataBatch in project drill by axbaretto.
the class TestParquetPhysicalPlan method testParseParquetPhysicalPlan.
@Test
@Ignore
public void testParseParquetPhysicalPlan() throws Exception {
RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
DrillConfig config = DrillConfig.create();
try (Drillbit bit1 = new Drillbit(config, serviceSet);
DrillClient client = new DrillClient(config, serviceSet.getCoordinator())) {
bit1.run();
client.connect();
List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL, Resources.toString(Resources.getResource(fileName), Charsets.UTF_8));
RecordBatchLoader loader = new RecordBatchLoader(bit1.getContext().getAllocator());
int count = 0;
for (QueryDataBatch b : results) {
System.out.println(String.format("Got %d results", b.getHeader().getRowCount()));
count += b.getHeader().getRowCount();
loader.load(b.getHeader().getDef(), b.getData());
for (VectorWrapper vw : loader) {
System.out.print(vw.getValueVector().getField().getName() + ": ");
ValueVector vv = vw.getValueVector();
for (int i = 0; i < vv.getAccessor().getValueCount(); i++) {
Object o = vv.getAccessor().getObject(i);
if (o instanceof byte[]) {
System.out.print(" [" + new String((byte[]) o) + "]");
} else {
System.out.print(" [" + vv.getAccessor().getObject(i) + "]");
}
// break;
}
System.out.println();
}
loader.clear();
b.release();
}
client.close();
System.out.println(String.format("Got %d total results", count));
}
}
use of org.apache.drill.exec.rpc.user.QueryDataBatch in project drill by axbaretto.
the class TextRecordReaderTest method testFullExecution.
@Test
public void testFullExecution() throws Exception {
RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
try (Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
bit1.run();
client.connect();
List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL, Files.toString(DrillFileUtils.getResourceAsFile("/store/text/test.json"), Charsets.UTF_8).replace("#{DATA_FILE}", DrillFileUtils.getResourceAsFile("/store/text/data/regions.csv").toURI().toString()));
int count = 0;
RecordBatchLoader loader = new RecordBatchLoader(bit1.getContext().getAllocator());
for (QueryDataBatch b : results) {
if (b.getHeader().getRowCount() != 0) {
count += b.getHeader().getRowCount();
}
loader.load(b.getHeader().getDef(), b.getData());
VectorUtil.showVectorAccessibleContent(loader);
loader.clear();
b.release();
}
assertEquals(5, count);
}
}
use of org.apache.drill.exec.rpc.user.QueryDataBatch in project drill by axbaretto.
the class DrillTestWrapper method compareMergedOnHeapVectors.
public void compareMergedOnHeapVectors() throws Exception {
RecordBatchLoader loader = new RecordBatchLoader(getAllocator());
List<QueryDataBatch> actual = Collections.emptyList();
List<QueryDataBatch> expected = Collections.emptyList();
Map<String, List<Object>> actualSuperVectors;
Map<String, List<Object>> expectedSuperVectors = null;
try {
test(testOptionSettingQueries);
actual = testRunAndReturn(queryType, query);
checkNumBatches(actual);
// To avoid extra work for test writers, types can optionally be inferred from the test query
addTypeInfoIfMissing(actual.get(0), testBuilder);
BatchIterator batchIter = new BatchIterator(actual, loader);
actualSuperVectors = addToCombinedVectorResults(batchIter, null, null);
batchIter.close();
// the cases where the baseline is stored in a file.
if (baselineRecords == null) {
if (baselineQueryType == null && baselineColumns != null) {
checkAscendingOrdering(actualSuperVectors);
return;
} else {
test(baselineOptionSettingQueries);
expected = testRunAndReturn(baselineQueryType, testBuilder.getValidationQuery());
BatchIterator exBatchIter = new BatchIterator(expected, loader);
expectedSuperVectors = addToCombinedVectorResults(exBatchIter, null, null);
exBatchIter.close();
}
} else {
// data is built in the TestBuilder in a row major format as it is provided by the user
// translate it here to vectorized, the representation expected by the ordered comparison
expectedSuperVectors = translateRecordListToHeapVectors(baselineRecords);
}
compareMergedVectors(expectedSuperVectors, actualSuperVectors);
} catch (Exception e) {
throw new Exception(e.getMessage() + "\nFor query: " + query, e);
} finally {
cleanupBatches(expected, actual);
}
}
Aggregations