Search in sources :

Example 6 with ValueVector

use of org.apache.drill.exec.vector.ValueVector in project drill by apache.

the class TestOptiqPlans method testFilterString.

@Test
public void testFilterString() throws Exception {
    final RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
    try (final Drillbit bit1 = new Drillbit(config, serviceSet);
        final DrillClient client = new DrillClient(config, serviceSet.getCoordinator())) {
        bit1.run();
        client.connect();
        final List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.LOGICAL, Resources.toString(Resources.getResource("logical_string_filter.json"), Charsets.UTF_8));
        final RecordBatchLoader loader = new RecordBatchLoader(bit1.getContext().getAllocator());
        for (final QueryDataBatch b : results) {
            System.out.println(String.format("Got %d results", b.getHeader().getRowCount()));
            loader.load(b.getHeader().getDef(), b.getData());
            for (final VectorWrapper<?> vw : loader) {
                System.out.println(vw.getValueVector().getField().getPath());
                final ValueVector vv = vw.getValueVector();
                for (int i = 0; i < vv.getAccessor().getValueCount(); i++) {
                    final Object o = vv.getAccessor().getObject(i);
                    if (vv instanceof VarBinaryVector) {
                        final VarBinaryVector.Accessor x = ((VarBinaryVector) vv).getAccessor();
                        final VarBinaryHolder vbh = new VarBinaryHolder();
                        x.get(i, vbh);
                        System.out.printf("%d..%d", vbh.start, vbh.end);
                        System.out.println("[" + new String((byte[]) vv.getAccessor().getObject(i)) + "]");
                    } else {
                        System.out.println(vv.getAccessor().getObject(i));
                    }
                }
            }
            loader.clear();
            b.release();
        }
        client.close();
    }
}
Also used : RecordBatchLoader(org.apache.drill.exec.record.RecordBatchLoader) VarBinaryHolder(org.apache.drill.exec.expr.holders.VarBinaryHolder) DrillbitEndpoint(org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint) VarBinaryVector(org.apache.drill.exec.vector.VarBinaryVector) ValueVector(org.apache.drill.exec.vector.ValueVector) QueryDataBatch(org.apache.drill.exec.rpc.user.QueryDataBatch) Drillbit(org.apache.drill.exec.server.Drillbit) RemoteServiceSet(org.apache.drill.exec.server.RemoteServiceSet) DrillClient(org.apache.drill.exec.client.DrillClient) ExecTest(org.apache.drill.exec.ExecTest) Test(org.junit.Test)

Example 7 with ValueVector

use of org.apache.drill.exec.vector.ValueVector in project drill by apache.

the class HiveAbstractReader method populatePartitionVectors.

protected void populatePartitionVectors(int recordCount) {
    for (int i = 0; i < pVectors.size(); i++) {
        final ValueVector vector = pVectors.get(i);
        final Object val = selectedPartitionValues.get(i);
        AllocationHelper.allocateNew(vector, recordCount);
        if (val != null) {
            HiveUtilities.populateVector(vector, managedBuffer, val, 0, recordCount);
        }
        vector.getMutator().setValueCount(recordCount);
    }
}
Also used : ValueVector(org.apache.drill.exec.vector.ValueVector)

Example 8 with ValueVector

use of org.apache.drill.exec.vector.ValueVector in project drill by apache.

the class ChainedHashTable method createAndSetupHashTable.

public HashTable createAndSetupHashTable(TypedFieldId[] outKeyFieldIds) throws ClassTransformationException, IOException, SchemaChangeException {
    CodeGenerator<HashTable> top = CodeGenerator.get(HashTable.TEMPLATE_DEFINITION, context.getFunctionRegistry(), context.getOptions());
    top.plainJavaCapable(true);
    // Uncomment out this line to debug the generated code.
    // This code is called from generated code, so to step into this code,
    // persist the code generated in HashAggBatch also.
    //  top.saveCodeForDebugging(true);
    ClassGenerator<HashTable> cg = top.getRoot();
    ClassGenerator<HashTable> cgInner = cg.getInnerGenerator("BatchHolder");
    LogicalExpression[] keyExprsBuild = new LogicalExpression[htConfig.getKeyExprsBuild().size()];
    LogicalExpression[] keyExprsProbe = null;
    boolean isProbe = (htConfig.getKeyExprsProbe() != null);
    if (isProbe) {
        keyExprsProbe = new LogicalExpression[htConfig.getKeyExprsProbe().size()];
    }
    ErrorCollector collector = new ErrorCollectorImpl();
    // original ht container from which others may be cloned
    VectorContainer htContainerOrig = new VectorContainer();
    LogicalExpression[] htKeyExprs = new LogicalExpression[htConfig.getKeyExprsBuild().size()];
    TypedFieldId[] htKeyFieldIds = new TypedFieldId[htConfig.getKeyExprsBuild().size()];
    int i = 0;
    for (NamedExpression ne : htConfig.getKeyExprsBuild()) {
        final LogicalExpression expr = ExpressionTreeMaterializer.materialize(ne.getExpr(), incomingBuild, collector, context.getFunctionRegistry());
        if (collector.hasErrors()) {
            throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
        }
        if (expr == null) {
            continue;
        }
        keyExprsBuild[i] = expr;
        i++;
    }
    if (isProbe) {
        i = 0;
        for (NamedExpression ne : htConfig.getKeyExprsProbe()) {
            final LogicalExpression expr = ExpressionTreeMaterializer.materialize(ne.getExpr(), incomingProbe, collector, context.getFunctionRegistry());
            if (collector.hasErrors()) {
                throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
            }
            if (expr == null) {
                continue;
            }
            keyExprsProbe[i] = expr;
            i++;
        }
        JoinUtils.addLeastRestrictiveCasts(keyExprsProbe, incomingProbe, keyExprsBuild, incomingBuild, context);
    }
    i = 0;
    /*
     * Once the implicit casts have been added, create the value vectors for the corresponding
     * type and add it to the hash table's container.
     * Note: Adding implicit casts may have a minor impact on the memory foot print. For example
     * if we have a join condition with bigint on the probe side and int on the build side then
     * after this change we will be allocating a bigint vector in the hashtable instead of an int
     * vector.
     */
    for (NamedExpression ne : htConfig.getKeyExprsBuild()) {
        LogicalExpression expr = keyExprsBuild[i];
        final MaterializedField outputField = MaterializedField.create(ne.getRef().getAsUnescapedPath(), expr.getMajorType());
        @SuppressWarnings("resource") ValueVector vv = TypeHelper.getNewVector(outputField, allocator);
        htKeyFieldIds[i] = htContainerOrig.add(vv);
        i++;
    }
    // generate code for isKeyMatch(), setValue(), getHash() and outputRecordKeys()
    setupIsKeyMatchInternal(cgInner, KeyMatchIncomingBuildMapping, KeyMatchHtableMapping, keyExprsBuild, htConfig.getComparators(), htKeyFieldIds);
    setupIsKeyMatchInternal(cgInner, KeyMatchIncomingProbeMapping, KeyMatchHtableProbeMapping, keyExprsProbe, htConfig.getComparators(), htKeyFieldIds);
    setupSetValue(cgInner, keyExprsBuild, htKeyFieldIds);
    if (outgoing != null) {
        if (outKeyFieldIds.length > htConfig.getKeyExprsBuild().size()) {
            throw new IllegalArgumentException("Mismatched number of output key fields.");
        }
    }
    setupOutputRecordKeys(cgInner, htKeyFieldIds, outKeyFieldIds);
    setupGetHash(cg, /* use top level code generator for getHash */
    GetHashIncomingBuildMapping, incomingBuild, keyExprsBuild, false);
    setupGetHash(cg, /* use top level code generator for getHash */
    GetHashIncomingProbeMapping, incomingProbe, keyExprsProbe, true);
    HashTable ht = context.getImplementationClass(top);
    ht.setup(htConfig, context, allocator, incomingBuild, incomingProbe, outgoing, htContainerOrig);
    return ht;
}
Also used : ErrorCollector(org.apache.drill.common.expression.ErrorCollector) MaterializedField(org.apache.drill.exec.record.MaterializedField) VectorContainer(org.apache.drill.exec.record.VectorContainer) ErrorCollectorImpl(org.apache.drill.common.expression.ErrorCollectorImpl) ValueVector(org.apache.drill.exec.vector.ValueVector) LogicalExpression(org.apache.drill.common.expression.LogicalExpression) SchemaChangeException(org.apache.drill.exec.exception.SchemaChangeException) NamedExpression(org.apache.drill.common.logical.data.NamedExpression) TypedFieldId(org.apache.drill.exec.record.TypedFieldId)

Example 9 with ValueVector

use of org.apache.drill.exec.vector.ValueVector in project drill by apache.

the class StreamingAggBatch method createAggregatorInternal.

private StreamingAggregator createAggregatorInternal() throws SchemaChangeException, ClassTransformationException, IOException {
    ClassGenerator<StreamingAggregator> cg = CodeGenerator.getRoot(StreamingAggTemplate.TEMPLATE_DEFINITION, context.getFunctionRegistry(), context.getOptions());
    cg.getCodeGenerator().plainJavaCapable(true);
    // Uncomment out this line to debug the generated code.
    //    cg.getCodeGenerator().saveCodeForDebugging(true);
    container.clear();
    LogicalExpression[] keyExprs = new LogicalExpression[popConfig.getKeys().size()];
    LogicalExpression[] valueExprs = new LogicalExpression[popConfig.getExprs().size()];
    TypedFieldId[] keyOutputIds = new TypedFieldId[popConfig.getKeys().size()];
    ErrorCollector collector = new ErrorCollectorImpl();
    for (int i = 0; i < keyExprs.length; i++) {
        final NamedExpression ne = popConfig.getKeys().get(i);
        final LogicalExpression expr = ExpressionTreeMaterializer.materialize(ne.getExpr(), incoming, collector, context.getFunctionRegistry());
        if (expr == null) {
            continue;
        }
        keyExprs[i] = expr;
        final MaterializedField outputField = MaterializedField.create(ne.getRef().getAsUnescapedPath(), expr.getMajorType());
        @SuppressWarnings("resource") final ValueVector vector = TypeHelper.getNewVector(outputField, oContext.getAllocator());
        keyOutputIds[i] = container.add(vector);
    }
    for (int i = 0; i < valueExprs.length; i++) {
        final NamedExpression ne = popConfig.getExprs().get(i);
        final LogicalExpression expr = ExpressionTreeMaterializer.materialize(ne.getExpr(), incoming, collector, context.getFunctionRegistry());
        if (expr instanceof IfExpression) {
            throw UserException.unsupportedError(new UnsupportedOperationException("Union type not supported in aggregate functions")).build(logger);
        }
        if (expr == null) {
            continue;
        }
        final MaterializedField outputField = MaterializedField.create(ne.getRef().getAsUnescapedPath(), expr.getMajorType());
        @SuppressWarnings("resource") ValueVector vector = TypeHelper.getNewVector(outputField, oContext.getAllocator());
        TypedFieldId id = container.add(vector);
        valueExprs[i] = new ValueVectorWriteExpression(id, expr, true);
    }
    if (collector.hasErrors()) {
        throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
    }
    setupIsSame(cg, keyExprs);
    setupIsSameApart(cg, keyExprs);
    addRecordValues(cg, valueExprs);
    outputRecordKeys(cg, keyOutputIds, keyExprs);
    outputRecordKeysPrev(cg, keyOutputIds, keyExprs);
    cg.getBlock("resetValues")._return(JExpr.TRUE);
    getIndex(cg);
    container.buildSchema(SelectionVectorMode.NONE);
    StreamingAggregator agg = context.getImplementationClass(cg);
    agg.setup(oContext, incoming, this);
    return agg;
}
Also used : IfExpression(org.apache.drill.common.expression.IfExpression) ErrorCollector(org.apache.drill.common.expression.ErrorCollector) MaterializedField(org.apache.drill.exec.record.MaterializedField) ErrorCollectorImpl(org.apache.drill.common.expression.ErrorCollectorImpl) ValueVector(org.apache.drill.exec.vector.ValueVector) LogicalExpression(org.apache.drill.common.expression.LogicalExpression) SchemaChangeException(org.apache.drill.exec.exception.SchemaChangeException) NamedExpression(org.apache.drill.common.logical.data.NamedExpression) TypedFieldId(org.apache.drill.exec.record.TypedFieldId) ValueVectorWriteExpression(org.apache.drill.exec.expr.ValueVectorWriteExpression)

Example 10 with ValueVector

use of org.apache.drill.exec.vector.ValueVector in project drill by apache.

the class ScanBatch method addImplicitVectors.

private void addImplicitVectors() {
    try {
        if (implicitVectors != null) {
            for (ValueVector v : implicitVectors.values()) {
                v.clear();
            }
        }
        implicitVectors = Maps.newHashMap();
        if (implicitValues != null) {
            for (String column : implicitValues.keySet()) {
                final MaterializedField field = MaterializedField.create(column, Types.optional(MinorType.VARCHAR));
                @SuppressWarnings("resource") final ValueVector v = mutator.addField(field, NullableVarCharVector.class);
                implicitVectors.put(column, v);
            }
        }
    } catch (SchemaChangeException e) {
        // No exception should be thrown here.
        throw UserException.systemError(e).addContext("Failure while allocating implicit vectors").build(logger);
    }
}
Also used : ValueVector(org.apache.drill.exec.vector.ValueVector) SchemaChangeException(org.apache.drill.exec.exception.SchemaChangeException) MaterializedField(org.apache.drill.exec.record.MaterializedField)

Aggregations

ValueVector (org.apache.drill.exec.vector.ValueVector)130 MaterializedField (org.apache.drill.exec.record.MaterializedField)29 Test (org.junit.Test)21 SchemaChangeException (org.apache.drill.exec.exception.SchemaChangeException)18 RecordBatchLoader (org.apache.drill.exec.record.RecordBatchLoader)13 VectorWrapper (org.apache.drill.exec.record.VectorWrapper)13 QueryDataBatch (org.apache.drill.exec.rpc.user.QueryDataBatch)12 ExecTest (org.apache.drill.exec.ExecTest)11 IOException (java.io.IOException)10 LogicalExpression (org.apache.drill.common.expression.LogicalExpression)10 VectorContainer (org.apache.drill.exec.record.VectorContainer)10 Drillbit (org.apache.drill.exec.server.Drillbit)10 ErrorCollector (org.apache.drill.common.expression.ErrorCollector)9 ErrorCollectorImpl (org.apache.drill.common.expression.ErrorCollectorImpl)9 RemoteServiceSet (org.apache.drill.exec.server.RemoteServiceSet)9 SchemaPath (org.apache.drill.common.expression.SchemaPath)8 MajorType (org.apache.drill.common.types.TypeProtos.MajorType)8 FunctionImplementationRegistry (org.apache.drill.exec.expr.fn.FunctionImplementationRegistry)8 FragmentContext (org.apache.drill.exec.ops.FragmentContext)8 PhysicalPlan (org.apache.drill.exec.physical.PhysicalPlan)8