use of org.apache.drill.exec.rpc.user.QueryDataBatch in project drill by axbaretto.
the class TestHiveUDFs method testGenericUDF.
@Test
public void testGenericUDF() throws Throwable {
int numRecords = 0;
String planString = Resources.toString(Resources.getResource("functions/hive/GenericUDF.json"), Charsets.UTF_8);
List<QueryDataBatch> results = testPhysicalWithResults(planString);
RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
for (QueryDataBatch result : results) {
batchLoader.load(result.getHeader().getDef(), result.getData());
if (batchLoader.getRecordCount() <= 0) {
result.release();
batchLoader.clear();
continue;
}
// Output columns and types
// 1. str1 : VarChar
// 2. upperStr1 : NullableVarChar
// 3. concat : NullableVarChar
// 4. flt1 : Float4
// 5. format_number : NullableFloat8
// 6. nullableStr1 : NullableVarChar
// 7. upperNullableStr1 : NullableVarChar
VarCharVector str1V = (VarCharVector) batchLoader.getValueAccessorById(VarCharVector.class, 0).getValueVector();
NullableVarCharVector upperStr1V = (NullableVarCharVector) batchLoader.getValueAccessorById(NullableVarCharVector.class, 1).getValueVector();
NullableVarCharVector concatV = (NullableVarCharVector) batchLoader.getValueAccessorById(NullableVarCharVector.class, 2).getValueVector();
Float4Vector flt1V = (Float4Vector) batchLoader.getValueAccessorById(Float4Vector.class, 3).getValueVector();
NullableVarCharVector format_numberV = (NullableVarCharVector) batchLoader.getValueAccessorById(NullableVarCharVector.class, 4).getValueVector();
NullableVarCharVector nullableStr1V = (NullableVarCharVector) batchLoader.getValueAccessorById(NullableVarCharVector.class, 5).getValueVector();
NullableVarCharVector upperNullableStr1V = (NullableVarCharVector) batchLoader.getValueAccessorById(NullableVarCharVector.class, 6).getValueVector();
for (int i = 0; i < batchLoader.getRecordCount(); i++) {
String in = new String(str1V.getAccessor().get(i), Charsets.UTF_8);
String upper = new String(upperStr1V.getAccessor().get(i), Charsets.UTF_8);
assertTrue(in.toUpperCase().equals(upper));
String concat = new String(concatV.getAccessor().get(i), Charsets.UTF_8);
assertTrue(concat.equals(in + "-" + in));
float flt1 = flt1V.getAccessor().get(i);
String format_number = new String(format_numberV.getAccessor().get(i), Charsets.UTF_8);
String nullableStr1 = null;
if (!nullableStr1V.getAccessor().isNull(i)) {
nullableStr1 = new String(nullableStr1V.getAccessor().get(i), Charsets.UTF_8);
}
String upperNullableStr1 = null;
if (!upperNullableStr1V.getAccessor().isNull(i)) {
upperNullableStr1 = new String(upperNullableStr1V.getAccessor().get(i), Charsets.UTF_8);
}
assertEquals(nullableStr1 != null, upperNullableStr1 != null);
if (nullableStr1 != null) {
assertEquals(nullableStr1.toUpperCase(), upperNullableStr1);
}
System.out.println(in + ", " + upper + ", " + concat + ", " + flt1 + ", " + format_number + ", " + nullableStr1 + ", " + upperNullableStr1);
numRecords++;
}
result.release();
batchLoader.clear();
}
System.out.println("Processed " + numRecords + " records");
}
use of org.apache.drill.exec.rpc.user.QueryDataBatch in project drill by axbaretto.
the class BaseTestQuery method testNoResult.
protected static void testNoResult(int interation, String query, Object... args) throws Exception {
query = String.format(query, args);
logger.debug("Running query:\n--------------\n" + query);
for (int i = 0; i < interation; i++) {
final List<QueryDataBatch> results = client.runQuery(QueryType.SQL, query);
for (final QueryDataBatch queryDataBatch : results) {
queryDataBatch.release();
}
}
}
use of org.apache.drill.exec.rpc.user.QueryDataBatch in project drill by axbaretto.
the class DrillTestWrapper method compareSchemaOnly.
protected void compareSchemaOnly() throws Exception {
RecordBatchLoader loader = new RecordBatchLoader(getAllocator());
List<QueryDataBatch> actual = null;
QueryDataBatch batch = null;
try {
test(testOptionSettingQueries);
actual = testRunAndReturn(queryType, query);
batch = actual.get(0);
loader.load(batch.getHeader().getDef(), batch.getData());
final BatchSchema schema = loader.getSchema();
final List<Pair<SchemaPath, TypeProtos.MajorType>> expectedSchema = testBuilder.getExpectedSchema();
if (schema.getFieldCount() != expectedSchema.size()) {
throw new Exception("Expected and actual numbers of columns do not match.");
}
for (int i = 0; i < schema.getFieldCount(); ++i) {
final String actualSchemaPath = schema.getColumn(i).getName();
final TypeProtos.MajorType actualMajorType = schema.getColumn(i).getType();
final String expectedSchemaPath = expectedSchema.get(i).getLeft().getRootSegmentPath();
final TypeProtos.MajorType expectedMajorType = expectedSchema.get(i).getValue();
if (!actualSchemaPath.equals(expectedSchemaPath) || !actualMajorType.equals(expectedMajorType)) {
throw new Exception(String.format("Schema path or type mismatch for column #%d:\n" + "Expected schema path: %s\nActual schema path: %s\nExpected type: %s\nActual type: %s", i, expectedSchemaPath, actualSchemaPath, Types.toString(expectedMajorType), Types.toString(actualMajorType)));
}
}
} finally {
if (actual != null) {
for (QueryDataBatch b : actual) {
b.release();
}
}
loader.clear();
}
}
use of org.apache.drill.exec.rpc.user.QueryDataBatch in project drill by axbaretto.
the class DrillTestWrapper method compareResultsHyperVector.
public void compareResultsHyperVector() throws Exception {
RecordBatchLoader loader = new RecordBatchLoader(getAllocator());
test(testOptionSettingQueries);
List<QueryDataBatch> results = testRunAndReturn(queryType, query);
checkNumBatches(results);
// To avoid extra work for test writers, types can optionally be inferred from the test query
addTypeInfoIfMissing(results.get(0), testBuilder);
Map<String, HyperVectorValueIterator> actualSuperVectors = addToHyperVectorMap(results, loader);
test(baselineOptionSettingQueries);
List<QueryDataBatch> expected = testRunAndReturn(baselineQueryType, testBuilder.getValidationQuery());
Map<String, HyperVectorValueIterator> expectedSuperVectors = addToHyperVectorMap(expected, loader);
compareHyperVectors(expectedSuperVectors, actualSuperVectors);
cleanupBatches(results, expected);
}
use of org.apache.drill.exec.rpc.user.QueryDataBatch in project drill by axbaretto.
the class DrillTestWrapper method compareMergedOnHeapVectors.
public void compareMergedOnHeapVectors() throws Exception {
RecordBatchLoader loader = new RecordBatchLoader(getAllocator());
List<QueryDataBatch> actual = Collections.emptyList();
List<QueryDataBatch> expected = Collections.emptyList();
Map<String, List<Object>> actualSuperVectors;
Map<String, List<Object>> expectedSuperVectors;
try {
test(testOptionSettingQueries);
actual = testRunAndReturn(queryType, query);
checkNumBatches(actual);
// To avoid extra work for test writers, types can optionally be inferred from the test query
addTypeInfoIfMissing(actual.get(0), testBuilder);
BatchIterator batchIter = new BatchIterator(actual, loader);
actualSuperVectors = addToCombinedVectorResults(batchIter);
batchIter.close();
// the cases where the baseline is stored in a file.
if (baselineRecords == null) {
test(baselineOptionSettingQueries);
expected = testRunAndReturn(baselineQueryType, testBuilder.getValidationQuery());
BatchIterator exBatchIter = new BatchIterator(expected, loader);
expectedSuperVectors = addToCombinedVectorResults(exBatchIter);
exBatchIter.close();
} else {
// data is built in the TestBuilder in a row major format as it is provided by the user
// translate it here to vectorized, the representation expected by the ordered comparison
expectedSuperVectors = translateRecordListToHeapVectors(baselineRecords);
}
compareMergedVectors(expectedSuperVectors, actualSuperVectors);
} catch (Exception e) {
throw new Exception(e.getMessage() + "\nFor query: " + query, e);
} finally {
cleanupBatches(expected, actual);
}
}
Aggregations