Search in sources :

Example 6 with BigIntVector

use of org.apache.drill.exec.vector.BigIntVector in project drill by apache.

the class TestSimpleLimit method verifySum.

private void verifySum(DrillbitContext bitContext, UserClientConnection connection, String testPlan, int expectedCount, long expectedSum) throws Throwable {
    final PhysicalPlanReader reader = PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
    final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/limit/" + testPlan), Charsets.UTF_8));
    final FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
    final FragmentContext context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);
    final SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));
    int recordCount = 0;
    long sum = 0;
    while (exec.next()) {
        recordCount += exec.getRecordCount();
        final BigIntVector v = (BigIntVector) exec.iterator().next();
        for (int i = 0; i < v.getAccessor().getValueCount(); i++) {
            sum += v.getAccessor().get(i);
        }
    }
    assertEquals(expectedCount, recordCount);
    assertEquals(expectedSum, sum);
    if (context.getFailureCause() != null) {
        throw context.getFailureCause();
    }
    assertTrue(!context.isFailed());
}
Also used : SimpleRootExec(org.apache.drill.exec.physical.impl.SimpleRootExec) PhysicalPlan(org.apache.drill.exec.physical.PhysicalPlan) FragmentContext(org.apache.drill.exec.ops.FragmentContext) PhysicalPlanReader(org.apache.drill.exec.planner.PhysicalPlanReader) FragmentRoot(org.apache.drill.exec.physical.base.FragmentRoot) FunctionImplementationRegistry(org.apache.drill.exec.expr.fn.FunctionImplementationRegistry) BigIntVector(org.apache.drill.exec.vector.BigIntVector)

Example 7 with BigIntVector

use of org.apache.drill.exec.vector.BigIntVector in project drill by apache.

the class TestAgg method twoKeyAgg.

@Test
public void twoKeyAgg(@Injectable final DrillbitContext bitContext, @Injectable UserClientConnection connection) throws Throwable {
    SimpleRootExec exec = doTest(bitContext, connection, "/agg/twokey.json");
    while (exec.next()) {
        final IntVector key1 = exec.getValueVectorById(SchemaPath.getSimplePath("key1"), IntVector.class);
        final BigIntVector key2 = exec.getValueVectorById(SchemaPath.getSimplePath("key2"), BigIntVector.class);
        final BigIntVector cnt = exec.getValueVectorById(SchemaPath.getSimplePath("cnt"), BigIntVector.class);
        final NullableBigIntVector total = exec.getValueVectorById(SchemaPath.getSimplePath("total"), NullableBigIntVector.class);
        final Integer[] keyArr1 = { Integer.MIN_VALUE, Integer.MIN_VALUE, Integer.MIN_VALUE, Integer.MAX_VALUE, Integer.MAX_VALUE, Integer.MAX_VALUE };
        final long[] keyArr2 = { 0, 1, 2, 0, 1, 2 };
        final long[] cntArr = { 34, 34, 34, 34, 34, 34 };
        final long[] totalArr = { 0, 34, 68, 0, 34, 68 };
        for (int i = 0; i < exec.getRecordCount(); i++) {
            //        System.out.print(key1.getAccessor().getObject(i));
            //        System.out.print("\t");
            //        System.out.print(key2.getAccessor().getObject(i));
            //        System.out.print("\t");
            //        System.out.print(cnt.getAccessor().getObject(i));
            //        System.out.print("\t");
            //        System.out.print(total.getAccessor().getObject(i));
            //        System.out.println();
            assertEquals((Long) cntArr[i], cnt.getAccessor().getObject(i));
            assertEquals(keyArr1[i], key1.getAccessor().getObject(i));
            assertEquals((Long) keyArr2[i], key2.getAccessor().getObject(i));
            assertEquals((Long) totalArr[i], total.getAccessor().getObject(i));
        }
    }
    if (exec.getContext().getFailureCause() != null) {
        throw exec.getContext().getFailureCause();
    }
    assertTrue(!exec.getContext().isFailed());
}
Also used : SimpleRootExec(org.apache.drill.exec.physical.impl.SimpleRootExec) BigIntVector(org.apache.drill.exec.vector.BigIntVector) IntVector(org.apache.drill.exec.vector.IntVector) NullableBigIntVector(org.apache.drill.exec.vector.NullableBigIntVector) NullableBigIntVector(org.apache.drill.exec.vector.NullableBigIntVector) BigIntVector(org.apache.drill.exec.vector.BigIntVector) NullableBigIntVector(org.apache.drill.exec.vector.NullableBigIntVector) Test(org.junit.Test) ExecTest(org.apache.drill.exec.ExecTest)

Example 8 with BigIntVector

use of org.apache.drill.exec.vector.BigIntVector in project drill by apache.

the class TestSimpleExternalSort method validateResults.

private void validateResults(BufferAllocator allocator, List<QueryDataBatch> results) throws SchemaChangeException {
    long previousBigInt = Long.MAX_VALUE;
    int recordCount = 0;
    int batchCount = 0;
    for (QueryDataBatch b : results) {
        RecordBatchLoader loader = new RecordBatchLoader(allocator);
        if (b.getHeader().getRowCount() > 0) {
            batchCount++;
            loader.load(b.getHeader().getDef(), b.getData());
            @SuppressWarnings("resource") BigIntVector c1 = (BigIntVector) loader.getValueAccessorById(BigIntVector.class, loader.getValueVectorId(new SchemaPath("blue", ExpressionPosition.UNKNOWN)).getFieldIds()).getValueVector();
            BigIntVector.Accessor a1 = c1.getAccessor();
            for (int i = 0; i < c1.getAccessor().getValueCount(); i++) {
                recordCount++;
                assertTrue(String.format("%d > %d", previousBigInt, a1.get(i)), previousBigInt >= a1.get(i));
                previousBigInt = a1.get(i);
            }
        }
        loader.clear();
        b.release();
    }
    System.out.println(String.format("Sorted %,d records in %d batches.", recordCount, batchCount));
}
Also used : QueryDataBatch(org.apache.drill.exec.rpc.user.QueryDataBatch) SchemaPath(org.apache.drill.common.expression.SchemaPath) RecordBatchLoader(org.apache.drill.exec.record.RecordBatchLoader) BigIntVector(org.apache.drill.exec.vector.BigIntVector)

Example 9 with BigIntVector

use of org.apache.drill.exec.vector.BigIntVector in project drill by apache.

the class TestWriter method simpleCsv.

@Test
public void simpleCsv() throws Exception {
    // before executing the test deleting the existing CSV files in /tmp/csvtest
    Path path = new Path("/tmp/csvtest");
    if (fs.exists(path)) {
        fs.delete(path, true);
    }
    String plan = Files.toString(FileUtils.getResourceAsFile("/writer/simple_csv_writer.json"), Charsets.UTF_8);
    List<QueryDataBatch> results = testPhysicalWithResults(plan);
    RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
    QueryDataBatch batch = results.get(0);
    assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
    VarCharVector fragmentIdV = (VarCharVector) batchLoader.getValueAccessorById(VarCharVector.class, 0).getValueVector();
    BigIntVector recordWrittenV = (BigIntVector) batchLoader.getValueAccessorById(BigIntVector.class, 1).getValueVector();
    // expected only one row in output
    assertEquals(1, batchLoader.getRecordCount());
    assertEquals("0_0", fragmentIdV.getAccessor().getObject(0).toString());
    assertEquals(132000, recordWrittenV.getAccessor().get(0));
    // now verify csv files are written to disk
    assertTrue(fs.exists(path));
    // expect two files
    FileStatus[] fileStatuses = fs.globStatus(new Path(path.toString(), "*.csv"));
    assertTrue(2 == fileStatuses.length);
    for (QueryDataBatch b : results) {
        b.release();
    }
    batchLoader.clear();
}
Also used : Path(org.apache.hadoop.fs.Path) QueryDataBatch(org.apache.drill.exec.rpc.user.QueryDataBatch) FileStatus(org.apache.hadoop.fs.FileStatus) RecordBatchLoader(org.apache.drill.exec.record.RecordBatchLoader) VarCharVector(org.apache.drill.exec.vector.VarCharVector) BigIntVector(org.apache.drill.exec.vector.BigIntVector) Test(org.junit.Test)

Example 10 with BigIntVector

use of org.apache.drill.exec.vector.BigIntVector in project drill by apache.

the class TestWriter method testCTASQueryHelper.

private void testCTASQueryHelper(String tableName, String testQuery, int expectedOutputCount) throws Exception {
    try {
        List<QueryDataBatch> results = testSqlWithResults(testQuery);
        RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
        int recordsWritten = 0;
        for (QueryDataBatch batch : results) {
            batchLoader.load(batch.getHeader().getDef(), batch.getData());
            if (batchLoader.getRecordCount() <= 0) {
                continue;
            }
            BigIntVector recordWrittenV = (BigIntVector) batchLoader.getValueAccessorById(BigIntVector.class, 1).getValueVector();
            for (int i = 0; i < batchLoader.getRecordCount(); i++) {
                recordsWritten += recordWrittenV.getAccessor().get(i);
            }
            batchLoader.clear();
            batch.release();
        }
        assertEquals(expectedOutputCount, recordsWritten);
    } finally {
        try {
            Path path = new Path(getDfsTestTmpSchemaLocation(), tableName);
            if (fs.exists(path)) {
                fs.delete(path, true);
            }
        } catch (Exception e) {
            // ignore exceptions.
            logger.warn("Failed to delete the table [{}, {}] created as part of the test", getDfsTestTmpSchemaLocation(), tableName);
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) QueryDataBatch(org.apache.drill.exec.rpc.user.QueryDataBatch) RecordBatchLoader(org.apache.drill.exec.record.RecordBatchLoader) BigIntVector(org.apache.drill.exec.vector.BigIntVector)

Aggregations

BigIntVector (org.apache.drill.exec.vector.BigIntVector)14 Test (org.junit.Test)9 RecordBatchLoader (org.apache.drill.exec.record.RecordBatchLoader)8 QueryDataBatch (org.apache.drill.exec.rpc.user.QueryDataBatch)8 SchemaPath (org.apache.drill.common.expression.SchemaPath)6 IntVector (org.apache.drill.exec.vector.IntVector)5 SimpleRootExec (org.apache.drill.exec.physical.impl.SimpleRootExec)4 ExecTest (org.apache.drill.exec.ExecTest)3 FunctionImplementationRegistry (org.apache.drill.exec.expr.fn.FunctionImplementationRegistry)3 FragmentContext (org.apache.drill.exec.ops.FragmentContext)3 PhysicalPlan (org.apache.drill.exec.physical.PhysicalPlan)3 FragmentRoot (org.apache.drill.exec.physical.base.FragmentRoot)3 PhysicalPlanReader (org.apache.drill.exec.planner.PhysicalPlanReader)3 VarCharVector (org.apache.drill.exec.vector.VarCharVector)3 DrillClient (org.apache.drill.exec.client.DrillClient)2 Drillbit (org.apache.drill.exec.server.Drillbit)2 RemoteServiceSet (org.apache.drill.exec.server.RemoteServiceSet)2 NullableBigIntVector (org.apache.drill.exec.vector.NullableBigIntVector)2 Path (org.apache.hadoop.fs.Path)2 Mean (org.apache.commons.math.stat.descriptive.moment.Mean)1