Search in sources :

Example 1 with VectorizationContext

use of org.apache.hadoop.hive.ql.exec.vector.VectorizationContext in project hive by apache.

the class VectorSelectOperatorBench method setup.

@Setup
public void setup(Blackhole bh) throws HiveException {
    HiveConf hconf = new HiveConf();
    List<String> columns = new ArrayList<String>();
    columns.add("a");
    columns.add("b");
    columns.add("c");
    VectorizationContext vc = new VectorizationContext("name", columns);
    selDesc = new SelectDesc(false);
    List<ExprNodeDesc> colList = new ArrayList<ExprNodeDesc>();
    ExprNodeColumnDesc colDesc1 = new ExprNodeColumnDesc(Long.class, "a", "table", false);
    ExprNodeColumnDesc colDesc2 = new ExprNodeColumnDesc(Long.class, "b", "table", false);
    ExprNodeColumnDesc colDesc3 = new ExprNodeColumnDesc(Long.class, "c", "table", false);
    ExprNodeGenericFuncDesc plusDesc = new ExprNodeGenericFuncDesc();
    GenericUDF gudf = new GenericUDFOPPlus();
    plusDesc.setGenericUDF(gudf);
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    children.add(colDesc1);
    children.add(colDesc2);
    plusDesc.setChildren(children);
    plusDesc.setTypeInfo(TypeInfoFactory.longTypeInfo);
    colList.add(plusDesc);
    colList.add(colDesc3);
    selDesc.setColList(colList);
    List<String> outputColNames = new ArrayList<String>();
    outputColNames.add("_col0");
    outputColNames.add("_col1");
    selDesc.setOutputColumnNames(outputColNames);
    VectorSelectDesc vectorSelectDesc = new VectorSelectDesc();
    selDesc.setVectorDesc(vectorSelectDesc);
    List<ExprNodeDesc> selectColList = selDesc.getColList();
    VectorExpression[] vectorSelectExprs = new VectorExpression[selectColList.size()];
    for (int i = 0; i < selectColList.size(); i++) {
        ExprNodeDesc expr = selectColList.get(i);
        VectorExpression ve = vc.getVectorExpression(expr);
        vectorSelectExprs[i] = ve;
    }
    vectorSelectDesc.setSelectExpressions(vectorSelectExprs);
    vectorSelectDesc.setProjectedOutputColumns(new int[] { 3, 2 });
    CompilationOpContext opContext = new CompilationOpContext();
    vso = new VectorSelectOperator(opContext, selDesc, vc, vectorSelectDesc);
    // to trigger vectorForward
    child = new ArrayList<>();
    child.add(new BlackholeOperator(opContext, bh));
    child.add(new BlackholeOperator(opContext, bh));
    vso.initialize(hconf, null);
    vrg = VectorizedRowGroupGenUtil.getVectorizedRowBatch(VectorizedRowBatch.DEFAULT_SIZE, 4, 17);
}
Also used : VectorSelectOperator(org.apache.hadoop.hive.ql.exec.vector.VectorSelectOperator) ArrayList(java.util.ArrayList) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) VectorizationContext(org.apache.hadoop.hive.ql.exec.vector.VectorizationContext) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) CompilationOpContext(org.apache.hadoop.hive.ql.CompilationOpContext) BlackholeOperator(org.apache.hive.benchmark.vectorization.BlackholeOperator) GenericUDFOPPlus(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPPlus) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) HiveConf(org.apache.hadoop.hive.conf.HiveConf) VectorSelectDesc(org.apache.hadoop.hive.ql.plan.VectorSelectDesc) VectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression) SelectDesc(org.apache.hadoop.hive.ql.plan.SelectDesc) VectorSelectDesc(org.apache.hadoop.hive.ql.plan.VectorSelectDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) Setup(org.openjdk.jmh.annotations.Setup)

Example 2 with VectorizationContext

use of org.apache.hadoop.hive.ql.exec.vector.VectorizationContext in project hive by apache.

the class OperatorFactory method getVectorOperator.

public static <T extends OperatorDesc> Operator<T> getVectorOperator(Class<? extends Operator<?>> opClass, CompilationOpContext cContext, T conf, VectorizationContext vContext, VectorDesc vectorDesc) throws HiveException {
    Constructor<? extends Operator<?>> constructor;
    try {
        constructor = opClass.getDeclaredConstructor(CompilationOpContext.class, OperatorDesc.class, VectorizationContext.class, VectorDesc.class);
    } catch (Exception e) {
        throw new HiveException("Constructor " + opClass.getSimpleName() + "(CompilationOpContext, OperatorDesc, VectorizationContext, VectorDesc) not found", e);
    }
    try {
        vectorDesc.setVectorOp(opClass);
        Operator<T> op = (Operator<T>) constructor.newInstance(cContext, conf, vContext, vectorDesc);
        return op;
    } catch (Exception e) {
        throw new HiveException("Error encountered calling constructor " + opClass.getSimpleName() + "(CompilationOpContext, OperatorDesc, VectorizationContext, VectorDesc)", e);
    }
}
Also used : SparkPartitionPruningSinkOperator(org.apache.hadoop.hive.ql.parse.spark.SparkPartitionPruningSinkOperator) VectorSparkPartitionPruningSinkOperator(org.apache.hadoop.hive.ql.exec.vector.VectorSparkPartitionPruningSinkOperator) VectorFilterOperator(org.apache.hadoop.hive.ql.exec.vector.VectorFilterOperator) VectorReduceSinkOperator(org.apache.hadoop.hive.ql.exec.vector.VectorReduceSinkOperator) VectorTopNKeyOperator(org.apache.hadoop.hive.ql.exec.vector.VectorTopNKeyOperator) VectorGroupByOperator(org.apache.hadoop.hive.ql.exec.vector.VectorGroupByOperator) VectorMapJoinOperator(org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOperator) VectorAppMasterEventOperator(org.apache.hadoop.hive.ql.exec.vector.VectorAppMasterEventOperator) VectorSelectOperator(org.apache.hadoop.hive.ql.exec.vector.VectorSelectOperator) VectorSMBMapJoinOperator(org.apache.hadoop.hive.ql.exec.vector.VectorSMBMapJoinOperator) VectorPTFOperator(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFOperator) VectorFileSinkOperator(org.apache.hadoop.hive.ql.exec.vector.VectorFileSinkOperator) VectorSparkHashTableSinkOperator(org.apache.hadoop.hive.ql.exec.vector.VectorSparkHashTableSinkOperator) VectorLimitOperator(org.apache.hadoop.hive.ql.exec.vector.VectorLimitOperator) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) CompilationOpContext(org.apache.hadoop.hive.ql.CompilationOpContext) VectorDesc(org.apache.hadoop.hive.ql.plan.VectorDesc) VectorizationContext(org.apache.hadoop.hive.ql.exec.vector.VectorizationContext) OperatorDesc(org.apache.hadoop.hive.ql.plan.OperatorDesc) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Example 3 with VectorizationContext

use of org.apache.hadoop.hive.ql.exec.vector.VectorizationContext in project hive by apache.

the class Vectorizer method doProcessChild.

private Operator<? extends OperatorDesc> doProcessChild(Operator<? extends OperatorDesc> child, Operator<? extends OperatorDesc> vectorParent, boolean isReduce, boolean isTezOrSpark, VectorTaskColumnInfo vectorTaskColumnInfo) throws VectorizerCannotVectorizeException {
    // Use vector parent to get VectorizationContext.
    final VectorizationContext vContext;
    if (vectorParent instanceof VectorizationContextRegion) {
        vContext = ((VectorizationContextRegion) vectorParent).getOutputVectorizationContext();
    } else {
        vContext = ((VectorizationOperator) vectorParent).getInputVectorizationContext();
    }
    Operator<? extends OperatorDesc> vectorChild;
    try {
        vectorChild = validateAndVectorizeOperator(child, vContext, isReduce, isTezOrSpark, vectorTaskColumnInfo);
    } catch (HiveException e) {
        String issue = "exception: " + VectorizationContext.getStackTraceAsSingleLine(e);
        setNodeIssue(issue);
        throw new VectorizerCannotVectorizeException();
    }
    return vectorChild;
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) VectorizationContext(org.apache.hadoop.hive.ql.exec.vector.VectorizationContext) VectorizationContextRegion(org.apache.hadoop.hive.ql.exec.vector.VectorizationContextRegion)

Example 4 with VectorizationContext

use of org.apache.hadoop.hive.ql.exec.vector.VectorizationContext in project hive by apache.

the class AggregationBase method doVectorTest.

protected static boolean doVectorTest(String aggregationName, TypeInfo typeInfo, GenericUDAFEvaluator evaluator, TypeInfo outputTypeInfo, GenericUDAFEvaluator.Mode udafEvaluatorMode, int maxKeyCount, List<String> columns, String[] columnNames, TypeInfo[] typeInfos, DataTypePhysicalVariation[] dataTypePhysicalVariations, List<ExprNodeDesc> parameterList, VectorRandomBatchSource batchSource, Object[] results) throws Exception {
    HiveConf hiveConf = new HiveConf();
    VectorizationContext vectorizationContext = new VectorizationContext("name", columns, Arrays.asList(typeInfos), Arrays.asList(dataTypePhysicalVariations), hiveConf);
    ImmutablePair<VectorAggregationDesc, String> pair = Vectorizer.getVectorAggregationDesc(aggregationName, parameterList, evaluator, outputTypeInfo, udafEvaluatorMode, vectorizationContext);
    VectorAggregationDesc vecAggrDesc = pair.left;
    if (vecAggrDesc == null) {
        Assert.fail("No vector aggregation expression found for aggregationName " + aggregationName + " udafEvaluatorMode " + udafEvaluatorMode + " parameterList " + parameterList + " outputTypeInfo " + outputTypeInfo);
    }
    Class<? extends VectorAggregateExpression> vecAggrClass = vecAggrDesc.getVecAggrClass();
    Constructor<? extends VectorAggregateExpression> ctor = null;
    try {
        ctor = vecAggrClass.getConstructor(VectorAggregationDesc.class);
    } catch (Exception e) {
        throw new HiveException("Constructor " + vecAggrClass.getSimpleName() + "(VectorAggregationDesc) not available");
    }
    VectorAggregateExpression vecAggrExpr = null;
    try {
        vecAggrExpr = ctor.newInstance(vecAggrDesc);
    } catch (Exception e) {
        throw new HiveException("Failed to create " + vecAggrClass.getSimpleName() + "(VectorAggregationDesc) object ", e);
    }
    VectorExpression.doTransientInit(vecAggrExpr.getInputExpression(), hiveConf);
    // System.out.println("*VECTOR AGGREGATION EXPRESSION* " + vecAggrExpr.getClass().getSimpleName());
    /*
    System.out.println(
        "*DEBUG* typeInfo " + typeInfo.toString() +
        " aggregationTestMode VECTOR_MODE" +
        " vecAggrExpr " + vecAggrExpr.getClass().getSimpleName());
    */
    VectorRandomRowSource rowSource = batchSource.getRowSource();
    VectorizedRowBatchCtx batchContext = new VectorizedRowBatchCtx(columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), /* dataColumnNums */
    null, /* partitionColumnCount */
    0, /* virtualColumnCount */
    0, /* neededVirtualColumns */
    null, vectorizationContext.getScratchColumnTypeNames(), vectorizationContext.getScratchDataTypePhysicalVariations());
    VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
    // Last entry is for a NULL key.
    VectorAggregationBufferRow[] vectorAggregationBufferRows = new VectorAggregationBufferRow[maxKeyCount + 1];
    VectorAggregationBufferRow[] batchBufferRows;
    batchSource.resetBatchIteration();
    int rowIndex = 0;
    while (true) {
        if (!batchSource.fillNextBatch(batch)) {
            break;
        }
        LongColumnVector keyLongColVector = (LongColumnVector) batch.cols[0];
        batchBufferRows = new VectorAggregationBufferRow[VectorizedRowBatch.DEFAULT_SIZE];
        final int size = batch.size;
        boolean selectedInUse = batch.selectedInUse;
        int[] selected = batch.selected;
        for (int logical = 0; logical < size; logical++) {
            final int batchIndex = (selectedInUse ? selected[logical] : logical);
            final int keyAdjustedBatchIndex;
            if (keyLongColVector.isRepeating) {
                keyAdjustedBatchIndex = 0;
            } else {
                keyAdjustedBatchIndex = batchIndex;
            }
            final short key;
            if (keyLongColVector.noNulls || !keyLongColVector.isNull[keyAdjustedBatchIndex]) {
                key = (short) keyLongColVector.vector[keyAdjustedBatchIndex];
            } else {
                key = (short) maxKeyCount;
            }
            VectorAggregationBufferRow bufferRow = vectorAggregationBufferRows[key];
            if (bufferRow == null) {
                VectorAggregateExpression.AggregationBuffer aggregationBuffer = vecAggrExpr.getNewAggregationBuffer();
                aggregationBuffer.reset();
                VectorAggregateExpression.AggregationBuffer[] aggregationBuffers = new VectorAggregateExpression.AggregationBuffer[] { aggregationBuffer };
                bufferRow = new VectorAggregationBufferRow(aggregationBuffers);
                vectorAggregationBufferRows[key] = bufferRow;
            }
            batchBufferRows[logical] = bufferRow;
        }
        vecAggrExpr.aggregateInputSelection(batchBufferRows, 0, batch);
        rowIndex += batch.size;
    }
    String[] outputColumnNames = new String[] { "output" };
    TypeInfo[] outputTypeInfos = new TypeInfo[] { outputTypeInfo };
    VectorizedRowBatchCtx outputBatchContext = new VectorizedRowBatchCtx(outputColumnNames, outputTypeInfos, new DataTypePhysicalVariation[] { vecAggrExpr.getOutputDataTypePhysicalVariation() }, /* dataColumnNums */
    null, /* partitionColumnCount */
    0, /* virtualColumnCount */
    0, /* neededVirtualColumns */
    null, new String[0], new DataTypePhysicalVariation[0]);
    VectorizedRowBatch outputBatch = outputBatchContext.createVectorizedRowBatch();
    short[] keys = new short[VectorizedRowBatch.DEFAULT_SIZE];
    VectorExtractRow resultVectorExtractRow = new VectorExtractRow();
    resultVectorExtractRow.init(new TypeInfo[] { outputTypeInfo }, new int[] { 0 });
    Object[] scrqtchRow = new Object[1];
    for (short key = 0; key < maxKeyCount + 1; key++) {
        VectorAggregationBufferRow vectorAggregationBufferRow = vectorAggregationBufferRows[key];
        if (vectorAggregationBufferRow != null) {
            if (outputBatch.size == VectorizedRowBatch.DEFAULT_SIZE) {
                extractResultObjects(outputBatch, keys, resultVectorExtractRow, outputTypeInfo, scrqtchRow, results);
                outputBatch.reset();
            }
            keys[outputBatch.size] = key;
            VectorAggregateExpression.AggregationBuffer aggregationBuffer = vectorAggregationBufferRow.getAggregationBuffer(0);
            vecAggrExpr.assignRowColumn(outputBatch, outputBatch.size++, 0, aggregationBuffer);
        }
    }
    if (outputBatch.size > 0) {
        extractResultObjects(outputBatch, keys, resultVectorExtractRow, outputTypeInfo, scrqtchRow, results);
    }
    return true;
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) VectorizationContext(org.apache.hadoop.hive.ql.exec.vector.VectorizationContext) VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) HiveConf(org.apache.hadoop.hive.conf.HiveConf) VectorAggregationBufferRow(org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow) LongColumnVector(org.apache.hadoop.hive.ql.exec.vector.LongColumnVector) VectorAggregateExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) VectorExtractRow(org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) VectorAggregationDesc(org.apache.hadoop.hive.ql.exec.vector.VectorAggregationDesc) VectorizedRowBatchCtx(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx) AggregationBuffer(org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Example 5 with VectorizationContext

use of org.apache.hadoop.hive.ql.exec.vector.VectorizationContext in project hive by apache.

the class TestVectorBetweenIn method doVectorBetweenInTest.

private boolean doVectorBetweenInTest(TypeInfo typeInfo, BetweenInVariation betweenInVariation, List<Object> compareList, List<String> columns, String[] columnNames, TypeInfo[] typeInfos, DataTypePhysicalVariation[] dataTypePhysicalVariations, List<ExprNodeDesc> children, GenericUDF udf, ExprNodeGenericFuncDesc exprDesc, BetweenInTestMode betweenInTestMode, VectorRandomBatchSource batchSource, ObjectInspector objectInspector, TypeInfo outputTypeInfo, Object[] resultObjects) throws Exception {
    HiveConf hiveConf = new HiveConf();
    if (betweenInTestMode == BetweenInTestMode.ADAPTOR) {
        hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true);
    }
    final boolean isFilter = betweenInVariation.isFilter;
    VectorizationContext vectorizationContext = new VectorizationContext("name", columns, Arrays.asList(typeInfos), Arrays.asList(dataTypePhysicalVariations), hiveConf);
    VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc, (isFilter ? VectorExpressionDescriptor.Mode.FILTER : VectorExpressionDescriptor.Mode.PROJECTION));
    vectorExpression.transientInit(hiveConf);
    if (betweenInTestMode == BetweenInTestMode.VECTOR_EXPRESSION) {
        String vecExprString = vectorExpression.toString();
        if (vectorExpression instanceof VectorUDFAdaptor) {
            System.out.println("*NO NATIVE VECTOR EXPRESSION* typeInfo " + typeInfo.toString() + " betweenInTestMode " + betweenInTestMode + " betweenInVariation " + betweenInVariation + " vectorExpression " + vecExprString);
        } else if (dataTypePhysicalVariations[0] == DataTypePhysicalVariation.DECIMAL_64) {
            final String nameToCheck = vectorExpression.getClass().getSimpleName();
            if (!nameToCheck.contains("Decimal64")) {
                System.out.println("*EXPECTED DECIMAL_64 VECTOR EXPRESSION* typeInfo " + typeInfo.toString() + " betweenInTestMode " + betweenInTestMode + " betweenInVariation " + betweenInVariation + " vectorExpression " + vecExprString);
            }
        }
    }
    // System.out.println("*VECTOR EXPRESSION* " + vectorExpression.getClass().getSimpleName());
    /*
    System.out.println(
        "*DEBUG* typeInfo " + typeInfo.toString() +
        " betweenInTestMode " + betweenInTestMode +
        " betweenInVariation " + betweenInVariation +
        " vectorExpression " + vectorExpression.toString());
    */
    VectorRandomRowSource rowSource = batchSource.getRowSource();
    VectorizedRowBatchCtx batchContext = new VectorizedRowBatchCtx(columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), /* dataColumnNums */
    null, /* partitionColumnCount */
    0, /* virtualColumnCount */
    0, /* neededVirtualColumns */
    null, vectorizationContext.getScratchColumnTypeNames(), vectorizationContext.getScratchDataTypePhysicalVariations());
    VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
    VectorExtractRow resultVectorExtractRow = null;
    Object[] scrqtchRow = null;
    if (!isFilter) {
        resultVectorExtractRow = new VectorExtractRow();
        final int outputColumnNum = vectorExpression.getOutputColumnNum();
        resultVectorExtractRow.init(new TypeInfo[] { outputTypeInfo }, new int[] { outputColumnNum });
        scrqtchRow = new Object[1];
    }
    boolean copySelectedInUse = false;
    int[] copySelected = new int[VectorizedRowBatch.DEFAULT_SIZE];
    batchSource.resetBatchIteration();
    int rowIndex = 0;
    while (true) {
        if (!batchSource.fillNextBatch(batch)) {
            break;
        }
        final int originalBatchSize = batch.size;
        if (isFilter) {
            copySelectedInUse = batch.selectedInUse;
            if (batch.selectedInUse) {
                System.arraycopy(batch.selected, 0, copySelected, 0, originalBatchSize);
            }
        }
        // In filter mode, the batch size can be made smaller.
        vectorExpression.evaluate(batch);
        if (!isFilter) {
            extractResultObjects(batch, rowIndex, resultVectorExtractRow, scrqtchRow, objectInspector, resultObjects);
        } else {
            final int currentBatchSize = batch.size;
            if (copySelectedInUse && batch.selectedInUse) {
                int selectIndex = 0;
                for (int i = 0; i < originalBatchSize; i++) {
                    final int originalBatchIndex = copySelected[i];
                    final boolean booleanResult;
                    if (selectIndex < currentBatchSize && batch.selected[selectIndex] == originalBatchIndex) {
                        booleanResult = true;
                        selectIndex++;
                    } else {
                        booleanResult = false;
                    }
                    resultObjects[rowIndex + i] = new BooleanWritable(booleanResult);
                }
            } else if (batch.selectedInUse) {
                int selectIndex = 0;
                for (int i = 0; i < originalBatchSize; i++) {
                    final boolean booleanResult;
                    if (selectIndex < currentBatchSize && batch.selected[selectIndex] == i) {
                        booleanResult = true;
                        selectIndex++;
                    } else {
                        booleanResult = false;
                    }
                    resultObjects[rowIndex + i] = new BooleanWritable(booleanResult);
                }
            } else if (currentBatchSize == 0) {
                // Whole batch got zapped.
                for (int i = 0; i < originalBatchSize; i++) {
                    resultObjects[rowIndex + i] = new BooleanWritable(false);
                }
            } else {
                // Every row kept.
                for (int i = 0; i < originalBatchSize; i++) {
                    resultObjects[rowIndex + i] = new BooleanWritable(true);
                }
            }
        }
        rowIndex += originalBatchSize;
    }
    return true;
}
Also used : VectorizationContext(org.apache.hadoop.hive.ql.exec.vector.VectorizationContext) VectorUDFAdaptor(org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFAdaptor) VectorExtractRow(org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow) VectorizedRowBatchCtx(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx) VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) BooleanWritable(org.apache.hadoop.io.BooleanWritable) HiveConf(org.apache.hadoop.hive.conf.HiveConf) VectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Aggregations

VectorizationContext (org.apache.hadoop.hive.ql.exec.vector.VectorizationContext)36 VectorExpression (org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression)19 HiveConf (org.apache.hadoop.hive.conf.HiveConf)18 VectorExtractRow (org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow)18 VectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch)18 VectorUDFAdaptor (org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFAdaptor)16 VectorizedRowBatchCtx (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx)12 ArrayList (java.util.ArrayList)8 CompilationOpContext (org.apache.hadoop.hive.ql.CompilationOpContext)8 VectorRandomRowSource (org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)7 VectorMapJoinDesc (org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc)7 DataTypePhysicalVariation (org.apache.hadoop.hive.common.type.DataTypePhysicalVariation)6 MapJoinTableContainer (org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer)6 MapJoinTableContainerSerDe (org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainerSerDe)6 VectorMapJoinOperator (org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOperator)6 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)6 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)6 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)6 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)6 List (java.util.List)5