use of org.apache.drill.exec.record.RecordBatchLoader in project drill by apache.
the class ParquetRecordReaderTest method testNullableAgg.
@Test
public void testNullableAgg() throws Exception {
final List<QueryDataBatch> result = testSqlWithResults("select sum(a) as total_sum from dfs.`/tmp/parquet_with_nulls_should_sum_100000_nulls_first.parquet`");
assertEquals("Only expected one batch with data, and then the empty finishing batch.", 2, result.size());
final RecordBatchLoader loader = new RecordBatchLoader(getDrillbitContext().getAllocator());
final QueryDataBatch b = result.get(0);
loader.load(b.getHeader().getDef(), b.getData());
final VectorWrapper vw = loader.getValueAccessorById(NullableBigIntVector.class, loader.getValueVectorId(SchemaPath.getCompoundPath("total_sum")).getFieldIds());
assertEquals(4999950000l, vw.getValueVector().getAccessor().getObject(0));
b.release();
loader.clear();
}
use of org.apache.drill.exec.record.RecordBatchLoader in project drill by apache.
the class TestHiveUDFs method testGenericUDF.
@Test
public void testGenericUDF() throws Throwable {
int numRecords = 0;
String planString = Resources.toString(Resources.getResource("functions/hive/GenericUDF.json"), Charsets.UTF_8);
List<QueryDataBatch> results = testPhysicalWithResults(planString);
RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
for (QueryDataBatch result : results) {
batchLoader.load(result.getHeader().getDef(), result.getData());
if (batchLoader.getRecordCount() <= 0) {
result.release();
batchLoader.clear();
continue;
}
// Output columns and types
// 1. str1 : VarChar
// 2. upperStr1 : NullableVarChar
// 3. concat : NullableVarChar
// 4. flt1 : Float4
// 5. format_number : NullableFloat8
// 6. nullableStr1 : NullableVarChar
// 7. upperNullableStr1 : NullableVarChar
VarCharVector str1V = (VarCharVector) batchLoader.getValueAccessorById(VarCharVector.class, 0).getValueVector();
NullableVarCharVector upperStr1V = (NullableVarCharVector) batchLoader.getValueAccessorById(NullableVarCharVector.class, 1).getValueVector();
NullableVarCharVector concatV = (NullableVarCharVector) batchLoader.getValueAccessorById(NullableVarCharVector.class, 2).getValueVector();
Float4Vector flt1V = (Float4Vector) batchLoader.getValueAccessorById(Float4Vector.class, 3).getValueVector();
NullableVarCharVector format_numberV = (NullableVarCharVector) batchLoader.getValueAccessorById(NullableVarCharVector.class, 4).getValueVector();
NullableVarCharVector nullableStr1V = (NullableVarCharVector) batchLoader.getValueAccessorById(NullableVarCharVector.class, 5).getValueVector();
NullableVarCharVector upperNullableStr1V = (NullableVarCharVector) batchLoader.getValueAccessorById(NullableVarCharVector.class, 6).getValueVector();
for (int i = 0; i < batchLoader.getRecordCount(); i++) {
String in = new String(str1V.getAccessor().get(i), Charsets.UTF_8);
String upper = new String(upperStr1V.getAccessor().get(i), Charsets.UTF_8);
assertTrue(in.toUpperCase().equals(upper));
String concat = new String(concatV.getAccessor().get(i), Charsets.UTF_8);
assertTrue(concat.equals(in + "-" + in));
float flt1 = flt1V.getAccessor().get(i);
String format_number = new String(format_numberV.getAccessor().get(i), Charsets.UTF_8);
String nullableStr1 = null;
if (!nullableStr1V.getAccessor().isNull(i)) {
nullableStr1 = new String(nullableStr1V.getAccessor().get(i), Charsets.UTF_8);
}
String upperNullableStr1 = null;
if (!upperNullableStr1V.getAccessor().isNull(i)) {
upperNullableStr1 = new String(upperNullableStr1V.getAccessor().get(i), Charsets.UTF_8);
}
assertEquals(nullableStr1 != null, upperNullableStr1 != null);
if (nullableStr1 != null) {
assertEquals(nullableStr1.toUpperCase(), upperNullableStr1);
}
System.out.println(in + ", " + upper + ", " + concat + ", " + flt1 + ", " + format_number + ", " + nullableStr1 + ", " + upperNullableStr1);
numRecords++;
}
result.release();
batchLoader.clear();
}
System.out.println("Processed " + numRecords + " records");
}
use of org.apache.drill.exec.record.RecordBatchLoader in project drill by apache.
the class TestHashJoin method simpleEqualityJoin.
@Test
public void simpleEqualityJoin() throws Throwable {
// Function checks hash join with single equality condition
try (RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
Drillbit bit = new Drillbit(CONFIG, serviceSet);
DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
// run query.
bit.run();
client.connect();
List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL, Files.toString(FileUtils.getResourceAsFile("/join/hash_join.json"), Charsets.UTF_8).replace("#{TEST_FILE_1}", FileUtils.getResourceAsFile("/build_side_input.json").toURI().toString()).replace("#{TEST_FILE_2}", FileUtils.getResourceAsFile("/probe_side_input.json").toURI().toString()));
RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
QueryDataBatch batch = results.get(1);
assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
Iterator<VectorWrapper<?>> itr = batchLoader.iterator();
// Just test the join key
long[] colA = { 1, 1, 2, 2, 1, 1 };
// Check the output of decimal9
ValueVector.Accessor intAccessor1 = itr.next().getValueVector().getAccessor();
for (int i = 0; i < intAccessor1.getValueCount(); i++) {
assertEquals(intAccessor1.getObject(i), colA[i]);
}
assertEquals(6, intAccessor1.getValueCount());
batchLoader.clear();
for (QueryDataBatch result : results) {
result.release();
}
}
}
use of org.apache.drill.exec.record.RecordBatchLoader in project drill by apache.
the class TestHashJoin method multipleConditionJoin.
@Test
public void multipleConditionJoin(@Injectable final DrillbitContext bitContext, @Injectable UserClientConnection connection) throws Throwable {
// Function tests hash join with multiple join conditions
try (final RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
final Drillbit bit = new Drillbit(CONFIG, serviceSet);
final DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
// run query.
bit.run();
client.connect();
final List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL, Files.toString(FileUtils.getResourceAsFile("/join/hj_multi_condition_join.json"), Charsets.UTF_8).replace("#{TEST_FILE_1}", FileUtils.getResourceAsFile("/build_side_input.json").toURI().toString()).replace("#{TEST_FILE_2}", FileUtils.getResourceAsFile("/probe_side_input.json").toURI().toString()));
final RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
final QueryDataBatch batch = results.get(1);
assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
final Iterator<VectorWrapper<?>> itr = batchLoader.iterator();
// Just test the join key
final long[] colA = { 1, 2, 1 };
final long[] colC = { 100, 200, 500 };
// Check the output of decimal9
final ValueVector.Accessor intAccessor1 = itr.next().getValueVector().getAccessor();
final ValueVector.Accessor intAccessor2 = itr.next().getValueVector().getAccessor();
for (int i = 0; i < intAccessor1.getValueCount(); i++) {
assertEquals(intAccessor1.getObject(i), colA[i]);
assertEquals(intAccessor2.getObject(i), colC[i]);
}
assertEquals(3, intAccessor1.getValueCount());
batchLoader.clear();
for (final QueryDataBatch result : results) {
result.release();
}
}
}
use of org.apache.drill.exec.record.RecordBatchLoader in project drill by apache.
the class TestMergingReceiver method handleEmptyBatch.
@Test
public void handleEmptyBatch() throws Exception {
@SuppressWarnings("resource") final RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
try (final Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
final Drillbit bit2 = new Drillbit(CONFIG, serviceSet);
final DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
bit1.run();
bit2.run();
client.connect();
final List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL, Files.toString(FileUtils.getResourceAsFile("/mergerecv/empty_batch.json"), Charsets.UTF_8));
int count = 0;
final RecordBatchLoader batchLoader = new RecordBatchLoader(client.getAllocator());
// print the results
for (final QueryDataBatch b : results) {
final QueryData queryData = b.getHeader();
// loaded but not used, for testing
batchLoader.load(queryData.getDef(), b.getData());
count += queryData.getRowCount();
b.release();
batchLoader.clear();
}
assertEquals(100000, count);
}
}
Aggregations