use of org.apache.drill.exec.vector.BigIntVector in project drill by apache.
the class TestSimpleLimit method verifySum.
private void verifySum(DrillbitContext bitContext, UserClientConnection connection, String testPlan, int expectedCount, long expectedSum) throws Throwable {
final PhysicalPlanReader reader = PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/limit/" + testPlan), Charsets.UTF_8));
final FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
final FragmentContext context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);
final SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));
int recordCount = 0;
long sum = 0;
while (exec.next()) {
recordCount += exec.getRecordCount();
final BigIntVector v = (BigIntVector) exec.iterator().next();
for (int i = 0; i < v.getAccessor().getValueCount(); i++) {
sum += v.getAccessor().get(i);
}
}
assertEquals(expectedCount, recordCount);
assertEquals(expectedSum, sum);
if (context.getFailureCause() != null) {
throw context.getFailureCause();
}
assertTrue(!context.isFailed());
}
use of org.apache.drill.exec.vector.BigIntVector in project drill by apache.
the class TestAgg method twoKeyAgg.
@Test
public void twoKeyAgg(@Injectable final DrillbitContext bitContext, @Injectable UserClientConnection connection) throws Throwable {
SimpleRootExec exec = doTest(bitContext, connection, "/agg/twokey.json");
while (exec.next()) {
final IntVector key1 = exec.getValueVectorById(SchemaPath.getSimplePath("key1"), IntVector.class);
final BigIntVector key2 = exec.getValueVectorById(SchemaPath.getSimplePath("key2"), BigIntVector.class);
final BigIntVector cnt = exec.getValueVectorById(SchemaPath.getSimplePath("cnt"), BigIntVector.class);
final NullableBigIntVector total = exec.getValueVectorById(SchemaPath.getSimplePath("total"), NullableBigIntVector.class);
final Integer[] keyArr1 = { Integer.MIN_VALUE, Integer.MIN_VALUE, Integer.MIN_VALUE, Integer.MAX_VALUE, Integer.MAX_VALUE, Integer.MAX_VALUE };
final long[] keyArr2 = { 0, 1, 2, 0, 1, 2 };
final long[] cntArr = { 34, 34, 34, 34, 34, 34 };
final long[] totalArr = { 0, 34, 68, 0, 34, 68 };
for (int i = 0; i < exec.getRecordCount(); i++) {
// System.out.print(key1.getAccessor().getObject(i));
// System.out.print("\t");
// System.out.print(key2.getAccessor().getObject(i));
// System.out.print("\t");
// System.out.print(cnt.getAccessor().getObject(i));
// System.out.print("\t");
// System.out.print(total.getAccessor().getObject(i));
// System.out.println();
assertEquals((Long) cntArr[i], cnt.getAccessor().getObject(i));
assertEquals(keyArr1[i], key1.getAccessor().getObject(i));
assertEquals((Long) keyArr2[i], key2.getAccessor().getObject(i));
assertEquals((Long) totalArr[i], total.getAccessor().getObject(i));
}
}
if (exec.getContext().getFailureCause() != null) {
throw exec.getContext().getFailureCause();
}
assertTrue(!exec.getContext().isFailed());
}
use of org.apache.drill.exec.vector.BigIntVector in project drill by apache.
the class TestSimpleExternalSort method validateResults.
private void validateResults(BufferAllocator allocator, List<QueryDataBatch> results) throws SchemaChangeException {
long previousBigInt = Long.MAX_VALUE;
int recordCount = 0;
int batchCount = 0;
for (QueryDataBatch b : results) {
RecordBatchLoader loader = new RecordBatchLoader(allocator);
if (b.getHeader().getRowCount() > 0) {
batchCount++;
loader.load(b.getHeader().getDef(), b.getData());
@SuppressWarnings("resource") BigIntVector c1 = (BigIntVector) loader.getValueAccessorById(BigIntVector.class, loader.getValueVectorId(new SchemaPath("blue", ExpressionPosition.UNKNOWN)).getFieldIds()).getValueVector();
BigIntVector.Accessor a1 = c1.getAccessor();
for (int i = 0; i < c1.getAccessor().getValueCount(); i++) {
recordCount++;
assertTrue(String.format("%d > %d", previousBigInt, a1.get(i)), previousBigInt >= a1.get(i));
previousBigInt = a1.get(i);
}
}
loader.clear();
b.release();
}
System.out.println(String.format("Sorted %,d records in %d batches.", recordCount, batchCount));
}
use of org.apache.drill.exec.vector.BigIntVector in project drill by apache.
the class TestWriter method simpleCsv.
@Test
public void simpleCsv() throws Exception {
// before executing the test deleting the existing CSV files in /tmp/csvtest
Path path = new Path("/tmp/csvtest");
if (fs.exists(path)) {
fs.delete(path, true);
}
String plan = Files.toString(FileUtils.getResourceAsFile("/writer/simple_csv_writer.json"), Charsets.UTF_8);
List<QueryDataBatch> results = testPhysicalWithResults(plan);
RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
QueryDataBatch batch = results.get(0);
assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
VarCharVector fragmentIdV = (VarCharVector) batchLoader.getValueAccessorById(VarCharVector.class, 0).getValueVector();
BigIntVector recordWrittenV = (BigIntVector) batchLoader.getValueAccessorById(BigIntVector.class, 1).getValueVector();
// expected only one row in output
assertEquals(1, batchLoader.getRecordCount());
assertEquals("0_0", fragmentIdV.getAccessor().getObject(0).toString());
assertEquals(132000, recordWrittenV.getAccessor().get(0));
// now verify csv files are written to disk
assertTrue(fs.exists(path));
// expect two files
FileStatus[] fileStatuses = fs.globStatus(new Path(path.toString(), "*.csv"));
assertTrue(2 == fileStatuses.length);
for (QueryDataBatch b : results) {
b.release();
}
batchLoader.clear();
}
use of org.apache.drill.exec.vector.BigIntVector in project drill by apache.
the class TestWriter method testCTASQueryHelper.
private void testCTASQueryHelper(String tableName, String testQuery, int expectedOutputCount) throws Exception {
try {
List<QueryDataBatch> results = testSqlWithResults(testQuery);
RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
int recordsWritten = 0;
for (QueryDataBatch batch : results) {
batchLoader.load(batch.getHeader().getDef(), batch.getData());
if (batchLoader.getRecordCount() <= 0) {
continue;
}
BigIntVector recordWrittenV = (BigIntVector) batchLoader.getValueAccessorById(BigIntVector.class, 1).getValueVector();
for (int i = 0; i < batchLoader.getRecordCount(); i++) {
recordsWritten += recordWrittenV.getAccessor().get(i);
}
batchLoader.clear();
batch.release();
}
assertEquals(expectedOutputCount, recordsWritten);
} finally {
try {
Path path = new Path(getDfsTestTmpSchemaLocation(), tableName);
if (fs.exists(path)) {
fs.delete(path, true);
}
} catch (Exception e) {
// ignore exceptions.
logger.warn("Failed to delete the table [{}, {}] created as part of the test", getDfsTestTmpSchemaLocation(), tableName);
}
}
}
Aggregations