Search in sources :

Example 1 with VectorReduceSinkInfo

use of org.apache.hadoop.hive.ql.plan.VectorReduceSinkInfo in project hive by apache.

the class Vectorizer method validateAndVectorizeOperator.

public Operator<? extends OperatorDesc> validateAndVectorizeOperator(Operator<? extends OperatorDesc> op, VectorizationContext vContext, boolean isReduce, boolean isTezOrSpark, VectorTaskColumnInfo vectorTaskColumnInfo) throws HiveException, VectorizerCannotVectorizeException {
    Operator<? extends OperatorDesc> vectorOp = null;
    // This "global" allows various validation methods to set the "not vectorized" reason.
    currentOperator = op;
    boolean isNative;
    try {
        switch(op.getType()) {
            case MAPJOIN:
                {
                    if (op instanceof MapJoinOperator) {
                        if (!validateMapJoinOperator((MapJoinOperator) op)) {
                            throw new VectorizerCannotVectorizeException();
                        }
                    } else if (op instanceof SMBMapJoinOperator) {
                        if (!validateSMBMapJoinOperator((SMBMapJoinOperator) op)) {
                            throw new VectorizerCannotVectorizeException();
                        }
                    } else {
                        setOperatorNotSupported(op);
                        throw new VectorizerCannotVectorizeException();
                    }
                    if (op instanceof MapJoinOperator) {
                        MapJoinDesc desc = (MapJoinDesc) op.getConf();
                        VectorMapJoinDesc vectorMapJoinDesc = new VectorMapJoinDesc();
                        boolean specialize = canSpecializeMapJoin(op, desc, isTezOrSpark, vContext, vectorMapJoinDesc);
                        if (!specialize) {
                            Class<? extends Operator<?>> opClass = null;
                            // *NON-NATIVE* vector map differences for LEFT OUTER JOIN and Filtered...
                            List<ExprNodeDesc> bigTableFilters = desc.getFilters().get((byte) desc.getPosBigTable());
                            boolean isOuterAndFiltered = (!desc.isNoOuterJoin() && bigTableFilters.size() > 0);
                            if (!isOuterAndFiltered) {
                                opClass = VectorMapJoinOperator.class;
                            } else {
                                opClass = VectorMapJoinOuterFilteredOperator.class;
                            }
                            vectorOp = OperatorFactory.getVectorOperator(opClass, op.getCompilationOpContext(), desc, vContext, vectorMapJoinDesc);
                            isNative = false;
                        } else {
                            // TEMPORARY Until Native Vector Map Join with Hybrid passes tests...
                            // HiveConf.setBoolVar(physicalContext.getConf(),
                            // HiveConf.ConfVars.HIVEUSEHYBRIDGRACEHASHJOIN, false);
                            vectorOp = specializeMapJoinOperator(op, vContext, desc, vectorMapJoinDesc);
                            isNative = true;
                            if (vectorTaskColumnInfo != null) {
                                VectorMapJoinInfo vectorMapJoinInfo = vectorMapJoinDesc.getVectorMapJoinInfo();
                                if (usesVectorUDFAdaptor(vectorMapJoinDesc.getAllBigTableKeyExpressions())) {
                                    vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true);
                                }
                                if (usesVectorUDFAdaptor(vectorMapJoinDesc.getAllBigTableValueExpressions())) {
                                    vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true);
                                }
                            }
                        }
                    } else {
                        Preconditions.checkState(op instanceof SMBMapJoinOperator);
                        SMBJoinDesc smbJoinSinkDesc = (SMBJoinDesc) op.getConf();
                        VectorSMBJoinDesc vectorSMBJoinDesc = new VectorSMBJoinDesc();
                        vectorOp = OperatorFactory.getVectorOperator(op.getCompilationOpContext(), smbJoinSinkDesc, vContext, vectorSMBJoinDesc);
                        isNative = false;
                    }
                }
                break;
            case REDUCESINK:
                {
                    if (!validateReduceSinkOperator((ReduceSinkOperator) op)) {
                        throw new VectorizerCannotVectorizeException();
                    }
                    ReduceSinkDesc reduceDesc = (ReduceSinkDesc) op.getConf();
                    VectorReduceSinkDesc vectorReduceSinkDesc = new VectorReduceSinkDesc();
                    boolean specialize = canSpecializeReduceSink(reduceDesc, isTezOrSpark, vContext, vectorReduceSinkDesc);
                    if (!specialize) {
                        vectorOp = OperatorFactory.getVectorOperator(op.getCompilationOpContext(), reduceDesc, vContext, vectorReduceSinkDesc);
                        isNative = false;
                    } else {
                        vectorOp = specializeReduceSinkOperator(op, vContext, reduceDesc, vectorReduceSinkDesc);
                        isNative = true;
                        if (vectorTaskColumnInfo != null) {
                            VectorReduceSinkInfo vectorReduceSinkInfo = vectorReduceSinkDesc.getVectorReduceSinkInfo();
                            if (usesVectorUDFAdaptor(vectorReduceSinkInfo.getReduceSinkKeyExpressions())) {
                                vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true);
                            }
                            if (usesVectorUDFAdaptor(vectorReduceSinkInfo.getReduceSinkValueExpressions())) {
                                vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true);
                            }
                        }
                    }
                }
                break;
            case FILTER:
                {
                    if (!validateFilterOperator((FilterOperator) op)) {
                        throw new VectorizerCannotVectorizeException();
                    }
                    VectorFilterDesc vectorFilterDesc = new VectorFilterDesc();
                    vectorOp = vectorizeFilterOperator(op, vContext, vectorFilterDesc);
                    isNative = true;
                    if (vectorTaskColumnInfo != null) {
                        VectorExpression vectorPredicateExpr = vectorFilterDesc.getPredicateExpression();
                        if (usesVectorUDFAdaptor(vectorPredicateExpr)) {
                            vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true);
                        }
                    }
                }
                break;
            case SELECT:
                {
                    if (!validateSelectOperator((SelectOperator) op)) {
                        throw new VectorizerCannotVectorizeException();
                    }
                    VectorSelectDesc vectorSelectDesc = new VectorSelectDesc();
                    vectorOp = vectorizeSelectOperator(op, vContext, vectorSelectDesc);
                    isNative = true;
                    if (vectorTaskColumnInfo != null) {
                        VectorExpression[] vectorSelectExprs = vectorSelectDesc.getSelectExpressions();
                        if (usesVectorUDFAdaptor(vectorSelectExprs)) {
                            vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true);
                        }
                    }
                }
                break;
            case GROUPBY:
                {
                    // The validateGroupByOperator method will update vectorGroupByDesc.
                    VectorGroupByDesc vectorGroupByDesc = new VectorGroupByDesc();
                    if (!validateGroupByOperator((GroupByOperator) op, isReduce, isTezOrSpark, vectorGroupByDesc)) {
                        throw new VectorizerCannotVectorizeException();
                    }
                    ImmutablePair<Operator<? extends OperatorDesc>, String> pair = doVectorizeGroupByOperator(op, vContext, vectorGroupByDesc);
                    if (pair.left == null) {
                        setOperatorIssue(pair.right);
                        throw new VectorizerCannotVectorizeException();
                    }
                    vectorOp = pair.left;
                    isNative = false;
                    if (vectorTaskColumnInfo != null) {
                        VectorExpression[] vecKeyExpressions = vectorGroupByDesc.getKeyExpressions();
                        if (usesVectorUDFAdaptor(vecKeyExpressions)) {
                            vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true);
                        }
                        VectorAggregationDesc[] vecAggrDescs = vectorGroupByDesc.getVecAggrDescs();
                        for (VectorAggregationDesc vecAggrDesc : vecAggrDescs) {
                            if (usesVectorUDFAdaptor(vecAggrDesc.getInputExpression())) {
                                vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true);
                            }
                        }
                    }
                }
                break;
            case FILESINK:
                {
                    if (!validateFileSinkOperator((FileSinkOperator) op)) {
                        throw new VectorizerCannotVectorizeException();
                    }
                    FileSinkDesc fileSinkDesc = (FileSinkDesc) op.getConf();
                    VectorFileSinkDesc vectorFileSinkDesc = new VectorFileSinkDesc();
                    vectorOp = OperatorFactory.getVectorOperator(op.getCompilationOpContext(), fileSinkDesc, vContext, vectorFileSinkDesc);
                    isNative = false;
                }
                break;
            case LIMIT:
                {
                    // No validation.
                    LimitDesc limitDesc = (LimitDesc) op.getConf();
                    VectorLimitDesc vectorLimitDesc = new VectorLimitDesc();
                    vectorOp = OperatorFactory.getVectorOperator(op.getCompilationOpContext(), limitDesc, vContext, vectorLimitDesc);
                    isNative = true;
                }
                break;
            case EVENT:
                {
                    // No validation.
                    AppMasterEventDesc eventDesc = (AppMasterEventDesc) op.getConf();
                    VectorAppMasterEventDesc vectorEventDesc = new VectorAppMasterEventDesc();
                    vectorOp = OperatorFactory.getVectorOperator(op.getCompilationOpContext(), eventDesc, vContext, vectorEventDesc);
                    isNative = true;
                }
                break;
            case PTF:
                {
                    // The validatePTFOperator method will update vectorPTFDesc.
                    VectorPTFDesc vectorPTFDesc = new VectorPTFDesc();
                    if (!validatePTFOperator((PTFOperator) op, vContext, vectorPTFDesc)) {
                        throw new VectorizerCannotVectorizeException();
                    }
                    vectorOp = vectorizePTFOperator(op, vContext, vectorPTFDesc);
                    isNative = true;
                }
                break;
            case HASHTABLESINK:
                {
                    // No validation.
                    SparkHashTableSinkDesc sparkHashTableSinkDesc = (SparkHashTableSinkDesc) op.getConf();
                    VectorSparkHashTableSinkDesc vectorSparkHashTableSinkDesc = new VectorSparkHashTableSinkDesc();
                    vectorOp = OperatorFactory.getVectorOperator(op.getCompilationOpContext(), sparkHashTableSinkDesc, vContext, vectorSparkHashTableSinkDesc);
                    isNative = true;
                }
                break;
            case SPARKPRUNINGSINK:
                {
                    // No validation.
                    SparkPartitionPruningSinkDesc sparkPartitionPruningSinkDesc = (SparkPartitionPruningSinkDesc) op.getConf();
                    VectorSparkPartitionPruningSinkDesc vectorSparkPartitionPruningSinkDesc = new VectorSparkPartitionPruningSinkDesc();
                    vectorOp = OperatorFactory.getVectorOperator(op.getCompilationOpContext(), sparkPartitionPruningSinkDesc, vContext, vectorSparkPartitionPruningSinkDesc);
                    // need to maintain the unique ID so that target map works can
                    // read the output
                    ((SparkPartitionPruningSinkOperator) vectorOp).setUniqueId(((SparkPartitionPruningSinkOperator) op).getUniqueId());
                    isNative = true;
                }
                break;
            default:
                setOperatorNotSupported(op);
                throw new VectorizerCannotVectorizeException();
        }
    } catch (HiveException e) {
        setOperatorIssue(e.getMessage());
        throw new VectorizerCannotVectorizeException();
    }
    Preconditions.checkState(vectorOp != null);
    if (vectorTaskColumnInfo != null && !isNative) {
        vectorTaskColumnInfo.setAllNative(false);
    }
    LOG.debug("vectorizeOperator " + vectorOp.getClass().getName());
    LOG.debug("vectorizeOperator " + vectorOp.getConf().getClass().getName());
    return vectorOp;
}
Also used : VectorReduceSinkLongOperator(org.apache.hadoop.hive.ql.exec.vector.reducesink.VectorReduceSinkLongOperator) VectorReduceSinkStringOperator(org.apache.hadoop.hive.ql.exec.vector.reducesink.VectorReduceSinkStringOperator) VectorMapJoinInnerBigOnlyMultiKeyOperator(org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerBigOnlyMultiKeyOperator) VectorMapJoinLeftSemiMultiKeyOperator(org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinLeftSemiMultiKeyOperator) VectorReduceSinkObjectHashOperator(org.apache.hadoop.hive.ql.exec.vector.reducesink.VectorReduceSinkObjectHashOperator) VectorMapJoinOuterFilteredOperator(org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOuterFilteredOperator) SparkPartitionPruningSinkOperator(org.apache.hadoop.hive.ql.parse.spark.SparkPartitionPruningSinkOperator) VectorizationOperator(org.apache.hadoop.hive.ql.exec.vector.VectorizationOperator) VectorMapJoinInnerMultiKeyOperator(org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerMultiKeyOperator) VectorMapJoinOperator(org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOperator) VectorPTFOperator(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFOperator) VectorReduceSinkEmptyKeyOperator(org.apache.hadoop.hive.ql.exec.vector.reducesink.VectorReduceSinkEmptyKeyOperator) VectorMapJoinInnerStringOperator(org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerStringOperator) VectorMapJoinOuterLongOperator(org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinOuterLongOperator) VectorMapJoinLeftSemiStringOperator(org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinLeftSemiStringOperator) VectorMapJoinLeftSemiLongOperator(org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinLeftSemiLongOperator) VectorReduceSinkMultiKeyOperator(org.apache.hadoop.hive.ql.exec.vector.reducesink.VectorReduceSinkMultiKeyOperator) VectorMapJoinInnerBigOnlyLongOperator(org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerBigOnlyLongOperator) VectorMapJoinInnerBigOnlyStringOperator(org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerBigOnlyStringOperator) VectorMapJoinOuterStringOperator(org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinOuterStringOperator) VectorMapJoinInnerLongOperator(org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerLongOperator) VectorMapJoinOuterMultiKeyOperator(org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinOuterMultiKeyOperator) VectorAppMasterEventDesc(org.apache.hadoop.hive.ql.plan.VectorAppMasterEventDesc) AppMasterEventDesc(org.apache.hadoop.hive.ql.plan.AppMasterEventDesc) VectorMapJoinDesc(org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) SMBJoinDesc(org.apache.hadoop.hive.ql.plan.SMBJoinDesc) VectorSMBJoinDesc(org.apache.hadoop.hive.ql.plan.VectorSMBJoinDesc) VectorFileSinkDesc(org.apache.hadoop.hive.ql.plan.VectorFileSinkDesc) FileSinkDesc(org.apache.hadoop.hive.ql.plan.FileSinkDesc) VectorMapJoinOperator(org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOperator) VectorFileSinkDesc(org.apache.hadoop.hive.ql.plan.VectorFileSinkDesc) VectorReduceSinkInfo(org.apache.hadoop.hive.ql.plan.VectorReduceSinkInfo) VectorSparkPartitionPruningSinkDesc(org.apache.hadoop.hive.ql.plan.VectorSparkPartitionPruningSinkDesc) SparkPartitionPruningSinkDesc(org.apache.hadoop.hive.ql.optimizer.spark.SparkPartitionPruningSinkDesc) VectorAppMasterEventDesc(org.apache.hadoop.hive.ql.plan.VectorAppMasterEventDesc) ArrayList(java.util.ArrayList) List(java.util.List) VectorSelectDesc(org.apache.hadoop.hive.ql.plan.VectorSelectDesc) VectorReduceSinkDesc(org.apache.hadoop.hive.ql.plan.VectorReduceSinkDesc) ReduceSinkDesc(org.apache.hadoop.hive.ql.plan.ReduceSinkDesc) SparkPartitionPruningSinkOperator(org.apache.hadoop.hive.ql.parse.spark.SparkPartitionPruningSinkOperator) VectorMapJoinOperator(org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOperator) VectorFilterDesc(org.apache.hadoop.hive.ql.plan.VectorFilterDesc) SparkHashTableSinkDesc(org.apache.hadoop.hive.ql.plan.SparkHashTableSinkDesc) VectorSparkHashTableSinkDesc(org.apache.hadoop.hive.ql.plan.VectorSparkHashTableSinkDesc) VectorMapJoinDesc(org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc) MapJoinDesc(org.apache.hadoop.hive.ql.plan.MapJoinDesc) VectorSparkHashTableSinkDesc(org.apache.hadoop.hive.ql.plan.VectorSparkHashTableSinkDesc) VectorSparkPartitionPruningSinkDesc(org.apache.hadoop.hive.ql.plan.VectorSparkPartitionPruningSinkDesc) VectorMapJoinInfo(org.apache.hadoop.hive.ql.plan.VectorMapJoinInfo) VectorSMBJoinDesc(org.apache.hadoop.hive.ql.plan.VectorSMBJoinDesc) VectorReduceSinkDesc(org.apache.hadoop.hive.ql.plan.VectorReduceSinkDesc) VectorLimitDesc(org.apache.hadoop.hive.ql.plan.VectorLimitDesc) VectorAggregationDesc(org.apache.hadoop.hive.ql.exec.vector.VectorAggregationDesc) VectorLimitDesc(org.apache.hadoop.hive.ql.plan.VectorLimitDesc) LimitDesc(org.apache.hadoop.hive.ql.plan.LimitDesc) ImmutablePair(org.apache.commons.lang3.tuple.ImmutablePair) VectorMapJoinOuterFilteredOperator(org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOuterFilteredOperator) VectorGroupByDesc(org.apache.hadoop.hive.ql.plan.VectorGroupByDesc) VectorPTFDesc(org.apache.hadoop.hive.ql.plan.VectorPTFDesc) VectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression)

Example 2 with VectorReduceSinkInfo

use of org.apache.hadoop.hive.ql.plan.VectorReduceSinkInfo in project hive by apache.

the class Vectorizer method vectorizeOperator.

public Operator<? extends OperatorDesc> vectorizeOperator(Operator<? extends OperatorDesc> op, VectorizationContext vContext, boolean isTezOrSpark, VectorTaskColumnInfo vectorTaskColumnInfo) throws HiveException {
    Operator<? extends OperatorDesc> vectorOp = null;
    boolean isNative;
    switch(op.getType()) {
        case TABLESCAN:
            vectorOp = vectorizeTableScanOperator(op, vContext);
            isNative = true;
            break;
        case MAPJOIN:
            {
                if (op instanceof MapJoinOperator) {
                    VectorMapJoinInfo vectorMapJoinInfo = new VectorMapJoinInfo();
                    MapJoinDesc desc = (MapJoinDesc) op.getConf();
                    boolean specialize = canSpecializeMapJoin(op, desc, isTezOrSpark, vContext, vectorMapJoinInfo);
                    if (!specialize) {
                        Class<? extends Operator<?>> opClass = null;
                        // *NON-NATIVE* vector map differences for LEFT OUTER JOIN and Filtered...
                        List<ExprNodeDesc> bigTableFilters = desc.getFilters().get((byte) desc.getPosBigTable());
                        boolean isOuterAndFiltered = (!desc.isNoOuterJoin() && bigTableFilters.size() > 0);
                        if (!isOuterAndFiltered) {
                            opClass = VectorMapJoinOperator.class;
                        } else {
                            opClass = VectorMapJoinOuterFilteredOperator.class;
                        }
                        vectorOp = OperatorFactory.getVectorOperator(opClass, op.getCompilationOpContext(), op.getConf(), vContext);
                        isNative = false;
                    } else {
                        // TEMPORARY Until Native Vector Map Join with Hybrid passes tests...
                        // HiveConf.setBoolVar(physicalContext.getConf(),
                        //    HiveConf.ConfVars.HIVEUSEHYBRIDGRACEHASHJOIN, false);
                        vectorOp = specializeMapJoinOperator(op, vContext, desc, vectorMapJoinInfo);
                        isNative = true;
                        if (vectorTaskColumnInfo != null) {
                            if (usesVectorUDFAdaptor(vectorMapJoinInfo.getBigTableKeyExpressions())) {
                                vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true);
                            }
                            if (usesVectorUDFAdaptor(vectorMapJoinInfo.getBigTableValueExpressions())) {
                                vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true);
                            }
                        }
                    }
                } else {
                    Preconditions.checkState(op instanceof SMBMapJoinOperator);
                    SMBJoinDesc smbJoinSinkDesc = (SMBJoinDesc) op.getConf();
                    VectorSMBJoinDesc vectorSMBJoinDesc = new VectorSMBJoinDesc();
                    smbJoinSinkDesc.setVectorDesc(vectorSMBJoinDesc);
                    vectorOp = OperatorFactory.getVectorOperator(op.getCompilationOpContext(), smbJoinSinkDesc, vContext);
                    isNative = false;
                }
            }
            break;
        case REDUCESINK:
            {
                VectorReduceSinkInfo vectorReduceSinkInfo = new VectorReduceSinkInfo();
                ReduceSinkDesc desc = (ReduceSinkDesc) op.getConf();
                boolean specialize = canSpecializeReduceSink(desc, isTezOrSpark, vContext, vectorReduceSinkInfo);
                if (!specialize) {
                    vectorOp = OperatorFactory.getVectorOperator(op.getCompilationOpContext(), op.getConf(), vContext);
                    isNative = false;
                } else {
                    vectorOp = specializeReduceSinkOperator(op, vContext, desc, vectorReduceSinkInfo);
                    isNative = true;
                    if (vectorTaskColumnInfo != null) {
                        if (usesVectorUDFAdaptor(vectorReduceSinkInfo.getReduceSinkKeyExpressions())) {
                            vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true);
                        }
                        if (usesVectorUDFAdaptor(vectorReduceSinkInfo.getReduceSinkValueExpressions())) {
                            vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true);
                        }
                    }
                }
            }
            break;
        case FILTER:
            {
                vectorOp = vectorizeFilterOperator(op, vContext);
                isNative = true;
                if (vectorTaskColumnInfo != null) {
                    VectorFilterDesc vectorFilterDesc = (VectorFilterDesc) ((AbstractOperatorDesc) vectorOp.getConf()).getVectorDesc();
                    VectorExpression vectorPredicateExpr = vectorFilterDesc.getPredicateExpression();
                    if (usesVectorUDFAdaptor(vectorPredicateExpr)) {
                        vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true);
                    }
                }
            }
            break;
        case SELECT:
            {
                vectorOp = vectorizeSelectOperator(op, vContext);
                isNative = true;
                if (vectorTaskColumnInfo != null) {
                    VectorSelectDesc vectorSelectDesc = (VectorSelectDesc) ((AbstractOperatorDesc) vectorOp.getConf()).getVectorDesc();
                    VectorExpression[] vectorSelectExprs = vectorSelectDesc.getSelectExpressions();
                    if (usesVectorUDFAdaptor(vectorSelectExprs)) {
                        vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true);
                    }
                }
            }
            break;
        case GROUPBY:
            {
                vectorOp = vectorizeGroupByOperator(op, vContext);
                isNative = false;
                if (vectorTaskColumnInfo != null) {
                    VectorGroupByDesc vectorGroupByDesc = (VectorGroupByDesc) ((AbstractOperatorDesc) vectorOp.getConf()).getVectorDesc();
                    if (!vectorGroupByDesc.isVectorOutput()) {
                        vectorTaskColumnInfo.setGroupByVectorOutput(false);
                    }
                    VectorExpression[] vecKeyExpressions = vectorGroupByDesc.getKeyExpressions();
                    if (usesVectorUDFAdaptor(vecKeyExpressions)) {
                        vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true);
                    }
                    VectorAggregateExpression[] vecAggregators = vectorGroupByDesc.getAggregators();
                    for (VectorAggregateExpression vecAggr : vecAggregators) {
                        if (usesVectorUDFAdaptor(vecAggr.inputExpression())) {
                            vectorTaskColumnInfo.setUsesVectorUDFAdaptor(true);
                        }
                    }
                }
            }
            break;
        case FILESINK:
            {
                FileSinkDesc fileSinkDesc = (FileSinkDesc) op.getConf();
                VectorFileSinkDesc vectorFileSinkDesc = new VectorFileSinkDesc();
                fileSinkDesc.setVectorDesc(vectorFileSinkDesc);
                vectorOp = OperatorFactory.getVectorOperator(op.getCompilationOpContext(), fileSinkDesc, vContext);
                isNative = false;
            }
            break;
        case LIMIT:
            {
                LimitDesc limitDesc = (LimitDesc) op.getConf();
                VectorLimitDesc vectorLimitDesc = new VectorLimitDesc();
                limitDesc.setVectorDesc(vectorLimitDesc);
                vectorOp = OperatorFactory.getVectorOperator(op.getCompilationOpContext(), limitDesc, vContext);
                isNative = true;
            }
            break;
        case EVENT:
            {
                AppMasterEventDesc eventDesc = (AppMasterEventDesc) op.getConf();
                VectorAppMasterEventDesc vectorEventDesc = new VectorAppMasterEventDesc();
                eventDesc.setVectorDesc(vectorEventDesc);
                vectorOp = OperatorFactory.getVectorOperator(op.getCompilationOpContext(), eventDesc, vContext);
                isNative = true;
            }
            break;
        case HASHTABLESINK:
            {
                SparkHashTableSinkDesc sparkHashTableSinkDesc = (SparkHashTableSinkDesc) op.getConf();
                VectorSparkHashTableSinkDesc vectorSparkHashTableSinkDesc = new VectorSparkHashTableSinkDesc();
                sparkHashTableSinkDesc.setVectorDesc(vectorSparkHashTableSinkDesc);
                vectorOp = OperatorFactory.getVectorOperator(op.getCompilationOpContext(), sparkHashTableSinkDesc, vContext);
                isNative = true;
            }
            break;
        case SPARKPRUNINGSINK:
            {
                SparkPartitionPruningSinkDesc sparkPartitionPruningSinkDesc = (SparkPartitionPruningSinkDesc) op.getConf();
                VectorSparkPartitionPruningSinkDesc vectorSparkPartitionPruningSinkDesc = new VectorSparkPartitionPruningSinkDesc();
                sparkPartitionPruningSinkDesc.setVectorDesc(vectorSparkPartitionPruningSinkDesc);
                vectorOp = OperatorFactory.getVectorOperator(op.getCompilationOpContext(), sparkPartitionPruningSinkDesc, vContext);
                isNative = true;
            }
            break;
        default:
            // These are children of GROUP BY operators with non-vector outputs.
            isNative = false;
            vectorOp = op;
            break;
    }
    Preconditions.checkState(vectorOp != null);
    if (vectorTaskColumnInfo != null && !isNative) {
        vectorTaskColumnInfo.setAllNative(false);
    }
    LOG.debug("vectorizeOperator " + vectorOp.getClass().getName());
    LOG.debug("vectorizeOperator " + vectorOp.getConf().getClass().getName());
    if (vectorOp != op) {
        fixupParentChildOperators(op, vectorOp);
        ((AbstractOperatorDesc) vectorOp.getConf()).setVectorMode(true);
    }
    return vectorOp;
}
Also used : VectorMapJoinInnerStringOperator(org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerStringOperator) VectorReduceSinkLongOperator(org.apache.hadoop.hive.ql.exec.vector.reducesink.VectorReduceSinkLongOperator) VectorMapJoinOuterLongOperator(org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinOuterLongOperator) VectorReduceSinkStringOperator(org.apache.hadoop.hive.ql.exec.vector.reducesink.VectorReduceSinkStringOperator) VectorMapJoinInnerBigOnlyMultiKeyOperator(org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerBigOnlyMultiKeyOperator) VectorMapJoinLeftSemiMultiKeyOperator(org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinLeftSemiMultiKeyOperator) VectorMapJoinLeftSemiStringOperator(org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinLeftSemiStringOperator) VectorMapJoinLeftSemiLongOperator(org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinLeftSemiLongOperator) VectorReduceSinkMultiKeyOperator(org.apache.hadoop.hive.ql.exec.vector.reducesink.VectorReduceSinkMultiKeyOperator) VectorMapJoinOuterFilteredOperator(org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOuterFilteredOperator) VectorMapJoinInnerBigOnlyLongOperator(org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerBigOnlyLongOperator) VectorMapJoinInnerBigOnlyStringOperator(org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerBigOnlyStringOperator) VectorMapJoinInnerMultiKeyOperator(org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerMultiKeyOperator) VectorMapJoinOuterStringOperator(org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinOuterStringOperator) VectorMapJoinOperator(org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOperator) VectorMapJoinInnerLongOperator(org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerLongOperator) VectorMapJoinOuterMultiKeyOperator(org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinOuterMultiKeyOperator) AppMasterEventDesc(org.apache.hadoop.hive.ql.plan.AppMasterEventDesc) VectorAppMasterEventDesc(org.apache.hadoop.hive.ql.plan.VectorAppMasterEventDesc) SMBJoinDesc(org.apache.hadoop.hive.ql.plan.SMBJoinDesc) VectorSMBJoinDesc(org.apache.hadoop.hive.ql.plan.VectorSMBJoinDesc) VectorFileSinkDesc(org.apache.hadoop.hive.ql.plan.VectorFileSinkDesc) FileSinkDesc(org.apache.hadoop.hive.ql.plan.FileSinkDesc) VectorMapJoinOperator(org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOperator) VectorFileSinkDesc(org.apache.hadoop.hive.ql.plan.VectorFileSinkDesc) VectorReduceSinkInfo(org.apache.hadoop.hive.ql.plan.VectorReduceSinkInfo) VectorSparkPartitionPruningSinkDesc(org.apache.hadoop.hive.ql.plan.VectorSparkPartitionPruningSinkDesc) SparkPartitionPruningSinkDesc(org.apache.hadoop.hive.ql.optimizer.spark.SparkPartitionPruningSinkDesc) VectorAppMasterEventDesc(org.apache.hadoop.hive.ql.plan.VectorAppMasterEventDesc) ArrayList(java.util.ArrayList) List(java.util.List) VectorSelectDesc(org.apache.hadoop.hive.ql.plan.VectorSelectDesc) VectorReduceSinkDesc(org.apache.hadoop.hive.ql.plan.VectorReduceSinkDesc) ReduceSinkDesc(org.apache.hadoop.hive.ql.plan.ReduceSinkDesc) VectorMapJoinOperator(org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOperator) VectorFilterDesc(org.apache.hadoop.hive.ql.plan.VectorFilterDesc) SparkHashTableSinkDesc(org.apache.hadoop.hive.ql.plan.SparkHashTableSinkDesc) VectorSparkHashTableSinkDesc(org.apache.hadoop.hive.ql.plan.VectorSparkHashTableSinkDesc) AbstractOperatorDesc(org.apache.hadoop.hive.ql.plan.AbstractOperatorDesc) MapJoinDesc(org.apache.hadoop.hive.ql.plan.MapJoinDesc) VectorMapJoinDesc(org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc) VectorSparkHashTableSinkDesc(org.apache.hadoop.hive.ql.plan.VectorSparkHashTableSinkDesc) VectorSparkPartitionPruningSinkDesc(org.apache.hadoop.hive.ql.plan.VectorSparkPartitionPruningSinkDesc) VectorMapJoinInfo(org.apache.hadoop.hive.ql.plan.VectorMapJoinInfo) VectorSMBJoinDesc(org.apache.hadoop.hive.ql.plan.VectorSMBJoinDesc) VectorAggregateExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression) VectorLimitDesc(org.apache.hadoop.hive.ql.plan.VectorLimitDesc) LimitDesc(org.apache.hadoop.hive.ql.plan.LimitDesc) VectorLimitDesc(org.apache.hadoop.hive.ql.plan.VectorLimitDesc) VectorMapJoinOuterFilteredOperator(org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOuterFilteredOperator) VectorGroupByDesc(org.apache.hadoop.hive.ql.plan.VectorGroupByDesc) VectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression)

Example 3 with VectorReduceSinkInfo

use of org.apache.hadoop.hive.ql.plan.VectorReduceSinkInfo in project hive by apache.

the class Vectorizer method canSpecializeReduceSink.

private boolean canSpecializeReduceSink(ReduceSinkDesc desc, boolean isTezOrSpark, VectorizationContext vContext, VectorReduceSinkDesc vectorDesc) throws HiveException {
    VectorReduceSinkInfo vectorReduceSinkInfo = new VectorReduceSinkInfo();
    // Various restrictions.
    // Set this if we encounter a condition we were not expecting.
    boolean isUnexpectedCondition = false;
    boolean isVectorizationReduceSinkNativeEnabled = HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_VECTORIZATION_REDUCESINK_NEW_ENABLED);
    String engine = HiveConf.getVar(hiveConf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE);
    int limit = desc.getTopN();
    float memUsage = desc.getTopNMemoryUsage();
    boolean hasPTFTopN = (limit >= 0 && memUsage > 0 && desc.isPTFReduceSink());
    boolean hasDistinctColumns = (desc.getDistinctColumnIndices().size() > 0);
    TableDesc keyTableDesc = desc.getKeySerializeInfo();
    Class<? extends Deserializer> keySerializerClass = keyTableDesc.getDeserializerClass();
    boolean isKeyBinarySortable = (keySerializerClass == org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe.class);
    TableDesc valueTableDesc = desc.getValueSerializeInfo();
    Class<? extends Deserializer> valueDeserializerClass = valueTableDesc.getDeserializerClass();
    boolean isValueLazyBinary = (valueDeserializerClass == org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe.class);
    // We are doing work here we'd normally do in VectorGroupByCommonOperator's constructor.
    // So if we later decide not to specialize, we'll just waste any scratch columns allocated...
    List<ExprNodeDesc> keysDescs = desc.getKeyCols();
    final boolean isEmptyKey = (keysDescs.size() == 0);
    if (!isEmptyKey) {
        VectorExpression[] allKeyExpressions = vContext.getVectorExpressions(keysDescs);
        final int[] reduceSinkKeyColumnMap = new int[allKeyExpressions.length];
        final TypeInfo[] reduceSinkKeyTypeInfos = new TypeInfo[allKeyExpressions.length];
        final Type[] reduceSinkKeyColumnVectorTypes = new Type[allKeyExpressions.length];
        final VectorExpression[] reduceSinkKeyExpressions;
        // Since a key expression can be a calculation and the key will go into a scratch column,
        // we need the mapping and type information.
        ArrayList<VectorExpression> groupByKeyExpressionsList = new ArrayList<VectorExpression>();
        for (int i = 0; i < reduceSinkKeyColumnMap.length; i++) {
            VectorExpression ve = allKeyExpressions[i];
            reduceSinkKeyColumnMap[i] = ve.getOutputColumnNum();
            reduceSinkKeyTypeInfos[i] = keysDescs.get(i).getTypeInfo();
            reduceSinkKeyColumnVectorTypes[i] = VectorizationContext.getColumnVectorTypeFromTypeInfo(reduceSinkKeyTypeInfos[i]);
            if (!IdentityExpression.isColumnOnly(ve)) {
                groupByKeyExpressionsList.add(ve);
            }
        }
        if (groupByKeyExpressionsList.size() == 0) {
            reduceSinkKeyExpressions = null;
        } else {
            reduceSinkKeyExpressions = groupByKeyExpressionsList.toArray(new VectorExpression[0]);
        }
        vectorReduceSinkInfo.setReduceSinkKeyColumnMap(reduceSinkKeyColumnMap);
        vectorReduceSinkInfo.setReduceSinkKeyTypeInfos(reduceSinkKeyTypeInfos);
        vectorReduceSinkInfo.setReduceSinkKeyColumnVectorTypes(reduceSinkKeyColumnVectorTypes);
        vectorReduceSinkInfo.setReduceSinkKeyExpressions(reduceSinkKeyExpressions);
    }
    ArrayList<ExprNodeDesc> valueDescs = desc.getValueCols();
    final boolean isEmptyValue = (valueDescs.size() == 0);
    if (!isEmptyValue) {
        VectorExpression[] allValueExpressions = vContext.getVectorExpressions(valueDescs);
        final int[] reduceSinkValueColumnMap = new int[allValueExpressions.length];
        final TypeInfo[] reduceSinkValueTypeInfos = new TypeInfo[allValueExpressions.length];
        final Type[] reduceSinkValueColumnVectorTypes = new Type[allValueExpressions.length];
        VectorExpression[] reduceSinkValueExpressions;
        ArrayList<VectorExpression> reduceSinkValueExpressionsList = new ArrayList<VectorExpression>();
        for (int i = 0; i < valueDescs.size(); ++i) {
            VectorExpression ve = allValueExpressions[i];
            reduceSinkValueColumnMap[i] = ve.getOutputColumnNum();
            reduceSinkValueTypeInfos[i] = valueDescs.get(i).getTypeInfo();
            reduceSinkValueColumnVectorTypes[i] = VectorizationContext.getColumnVectorTypeFromTypeInfo(reduceSinkValueTypeInfos[i]);
            if (!IdentityExpression.isColumnOnly(ve)) {
                reduceSinkValueExpressionsList.add(ve);
            }
        }
        if (reduceSinkValueExpressionsList.size() == 0) {
            reduceSinkValueExpressions = null;
        } else {
            reduceSinkValueExpressions = reduceSinkValueExpressionsList.toArray(new VectorExpression[0]);
        }
        vectorReduceSinkInfo.setReduceSinkValueColumnMap(reduceSinkValueColumnMap);
        vectorReduceSinkInfo.setReduceSinkValueTypeInfos(reduceSinkValueTypeInfos);
        vectorReduceSinkInfo.setReduceSinkValueColumnVectorTypes(reduceSinkValueColumnVectorTypes);
        vectorReduceSinkInfo.setReduceSinkValueExpressions(reduceSinkValueExpressions);
    }
    boolean useUniformHash = desc.getReducerTraits().contains(UNIFORM);
    vectorReduceSinkInfo.setUseUniformHash(useUniformHash);
    List<ExprNodeDesc> bucketDescs = desc.getBucketCols();
    final boolean isEmptyBuckets = (bucketDescs == null || bucketDescs.size() == 0);
    List<ExprNodeDesc> partitionDescs = desc.getPartitionCols();
    final boolean isEmptyPartitions = (partitionDescs == null || partitionDescs.size() == 0);
    if (useUniformHash || (isEmptyKey && isEmptyBuckets && isEmptyPartitions)) {
    // NOTE: For Uniform Hash or no buckets/partitions, when the key is empty, we will use the VectorReduceSinkEmptyKeyOperator instead.
    } else {
        // Collect bucket and/or partition information for object hashing.
        int[] reduceSinkBucketColumnMap = null;
        TypeInfo[] reduceSinkBucketTypeInfos = null;
        Type[] reduceSinkBucketColumnVectorTypes = null;
        VectorExpression[] reduceSinkBucketExpressions = null;
        if (!isEmptyBuckets) {
            VectorExpression[] allBucketExpressions = vContext.getVectorExpressions(bucketDescs);
            reduceSinkBucketColumnMap = new int[bucketDescs.size()];
            reduceSinkBucketTypeInfos = new TypeInfo[bucketDescs.size()];
            reduceSinkBucketColumnVectorTypes = new Type[bucketDescs.size()];
            ArrayList<VectorExpression> reduceSinkBucketExpressionsList = new ArrayList<VectorExpression>();
            for (int i = 0; i < bucketDescs.size(); ++i) {
                VectorExpression ve = allBucketExpressions[i];
                reduceSinkBucketColumnMap[i] = ve.getOutputColumnNum();
                reduceSinkBucketTypeInfos[i] = bucketDescs.get(i).getTypeInfo();
                reduceSinkBucketColumnVectorTypes[i] = VectorizationContext.getColumnVectorTypeFromTypeInfo(reduceSinkBucketTypeInfos[i]);
                if (!IdentityExpression.isColumnOnly(ve)) {
                    reduceSinkBucketExpressionsList.add(ve);
                }
            }
            if (reduceSinkBucketExpressionsList.size() == 0) {
                reduceSinkBucketExpressions = null;
            } else {
                reduceSinkBucketExpressions = reduceSinkBucketExpressionsList.toArray(new VectorExpression[0]);
            }
        }
        int[] reduceSinkPartitionColumnMap = null;
        TypeInfo[] reduceSinkPartitionTypeInfos = null;
        Type[] reduceSinkPartitionColumnVectorTypes = null;
        VectorExpression[] reduceSinkPartitionExpressions = null;
        if (!isEmptyPartitions) {
            VectorExpression[] allPartitionExpressions = vContext.getVectorExpressions(partitionDescs);
            reduceSinkPartitionColumnMap = new int[partitionDescs.size()];
            reduceSinkPartitionTypeInfos = new TypeInfo[partitionDescs.size()];
            reduceSinkPartitionColumnVectorTypes = new Type[partitionDescs.size()];
            ArrayList<VectorExpression> reduceSinkPartitionExpressionsList = new ArrayList<VectorExpression>();
            for (int i = 0; i < partitionDescs.size(); ++i) {
                VectorExpression ve = allPartitionExpressions[i];
                reduceSinkPartitionColumnMap[i] = ve.getOutputColumnNum();
                reduceSinkPartitionTypeInfos[i] = partitionDescs.get(i).getTypeInfo();
                reduceSinkPartitionColumnVectorTypes[i] = VectorizationContext.getColumnVectorTypeFromTypeInfo(reduceSinkPartitionTypeInfos[i]);
                if (!IdentityExpression.isColumnOnly(ve)) {
                    reduceSinkPartitionExpressionsList.add(ve);
                }
            }
            if (reduceSinkPartitionExpressionsList.size() == 0) {
                reduceSinkPartitionExpressions = null;
            } else {
                reduceSinkPartitionExpressions = reduceSinkPartitionExpressionsList.toArray(new VectorExpression[0]);
            }
        }
        vectorReduceSinkInfo.setReduceSinkBucketColumnMap(reduceSinkBucketColumnMap);
        vectorReduceSinkInfo.setReduceSinkBucketTypeInfos(reduceSinkBucketTypeInfos);
        vectorReduceSinkInfo.setReduceSinkBucketColumnVectorTypes(reduceSinkBucketColumnVectorTypes);
        vectorReduceSinkInfo.setReduceSinkBucketExpressions(reduceSinkBucketExpressions);
        vectorReduceSinkInfo.setReduceSinkPartitionColumnMap(reduceSinkPartitionColumnMap);
        vectorReduceSinkInfo.setReduceSinkPartitionTypeInfos(reduceSinkPartitionTypeInfos);
        vectorReduceSinkInfo.setReduceSinkPartitionColumnVectorTypes(reduceSinkPartitionColumnVectorTypes);
        vectorReduceSinkInfo.setReduceSinkPartitionExpressions(reduceSinkPartitionExpressions);
    }
    // Remember the condition variables for EXPLAIN regardless.
    vectorDesc.setVectorReduceSinkInfo(vectorReduceSinkInfo);
    vectorDesc.setIsVectorizationReduceSinkNativeEnabled(isVectorizationReduceSinkNativeEnabled);
    vectorDesc.setEngine(engine);
    vectorDesc.setIsEmptyKey(isEmptyKey);
    vectorDesc.setIsEmptyValue(isEmptyValue);
    vectorDesc.setIsEmptyBuckets(isEmptyBuckets);
    vectorDesc.setIsEmptyPartitions(isEmptyPartitions);
    vectorDesc.setHasPTFTopN(hasPTFTopN);
    vectorDesc.setHasDistinctColumns(hasDistinctColumns);
    vectorDesc.setIsKeyBinarySortable(isKeyBinarySortable);
    vectorDesc.setIsValueLazyBinary(isValueLazyBinary);
    // This indicates we logged an inconsistency (from our point-of-view) and will not make this
    // operator native...
    vectorDesc.setIsUnexpectedCondition(isUnexpectedCondition);
    // Many restrictions.
    if (!isVectorizationReduceSinkNativeEnabled || !isTezOrSpark || hasPTFTopN || hasDistinctColumns || !isKeyBinarySortable || !isValueLazyBinary || isUnexpectedCondition) {
        return false;
    }
    return true;
}
Also used : ArrayList(java.util.ArrayList) LazyBinarySerDe(org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe) UDFToString(org.apache.hadoop.hive.ql.udf.UDFToString) VectorReduceSinkInfo(org.apache.hadoop.hive.ql.plan.VectorReduceSinkInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) InConstantType(org.apache.hadoop.hive.ql.exec.vector.VectorizationContext.InConstantType) HashTableImplementationType(org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.HashTableImplementationType) HashTableKeyType(org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.HashTableKeyType) Type(org.apache.hadoop.hive.ql.exec.vector.ColumnVector.Type) VectorDeserializeType(org.apache.hadoop.hive.ql.plan.VectorPartitionDesc.VectorDeserializeType) SupportedFunctionType(org.apache.hadoop.hive.ql.plan.VectorPTFDesc.SupportedFunctionType) OperatorType(org.apache.hadoop.hive.ql.plan.api.OperatorType) VectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression) TableDesc(org.apache.hadoop.hive.ql.plan.TableDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc)

Example 4 with VectorReduceSinkInfo

use of org.apache.hadoop.hive.ql.plan.VectorReduceSinkInfo in project hive by apache.

the class Vectorizer method specializeReduceSinkOperator.

private Operator<? extends OperatorDesc> specializeReduceSinkOperator(Operator<? extends OperatorDesc> op, VectorizationContext vContext, ReduceSinkDesc desc, VectorReduceSinkDesc vectorDesc) throws HiveException {
    VectorReduceSinkInfo vectorReduceSinkInfo = vectorDesc.getVectorReduceSinkInfo();
    Type[] reduceSinkKeyColumnVectorTypes = vectorReduceSinkInfo.getReduceSinkKeyColumnVectorTypes();
    // By default, we can always use the multi-key class.
    VectorReduceSinkDesc.ReduceSinkKeyType reduceSinkKeyType = VectorReduceSinkDesc.ReduceSinkKeyType.MULTI_KEY;
    // Look for single column optimization.
    if (reduceSinkKeyColumnVectorTypes != null && reduceSinkKeyColumnVectorTypes.length == 1) {
        LOG.info("Vectorizer vectorizeOperator groupby typeName " + vectorReduceSinkInfo.getReduceSinkKeyTypeInfos()[0]);
        Type columnVectorType = reduceSinkKeyColumnVectorTypes[0];
        switch(columnVectorType) {
            case LONG:
                {
                    PrimitiveCategory primitiveCategory = ((PrimitiveTypeInfo) vectorReduceSinkInfo.getReduceSinkKeyTypeInfos()[0]).getPrimitiveCategory();
                    switch(primitiveCategory) {
                        case BOOLEAN:
                        case BYTE:
                        case SHORT:
                        case INT:
                        case LONG:
                            reduceSinkKeyType = VectorReduceSinkDesc.ReduceSinkKeyType.LONG;
                            break;
                        default:
                            // Other integer types not supported yet.
                            break;
                    }
                }
                break;
            case BYTES:
                reduceSinkKeyType = VectorReduceSinkDesc.ReduceSinkKeyType.STRING;
            default:
                // Stay with multi-key.
                break;
        }
    }
    Class<? extends Operator<?>> opClass = null;
    if (vectorReduceSinkInfo.getUseUniformHash()) {
        if (vectorDesc.getIsEmptyKey()) {
            opClass = VectorReduceSinkEmptyKeyOperator.class;
        } else {
            switch(reduceSinkKeyType) {
                case LONG:
                    opClass = VectorReduceSinkLongOperator.class;
                    break;
                case STRING:
                    opClass = VectorReduceSinkStringOperator.class;
                    break;
                case MULTI_KEY:
                    opClass = VectorReduceSinkMultiKeyOperator.class;
                    break;
                default:
                    throw new HiveException("Unknown reduce sink key type " + reduceSinkKeyType);
            }
        }
    } else {
        if (vectorDesc.getIsEmptyKey() && vectorDesc.getIsEmptyBuckets() && vectorDesc.getIsEmptyPartitions()) {
            opClass = VectorReduceSinkEmptyKeyOperator.class;
        } else {
            opClass = VectorReduceSinkObjectHashOperator.class;
        }
    }
    vectorDesc.setReduceSinkKeyType(reduceSinkKeyType);
    vectorDesc.setVectorReduceSinkInfo(vectorReduceSinkInfo);
    LOG.info("Vectorizer vectorizeOperator reduce sink class " + opClass.getSimpleName());
    Operator<? extends OperatorDesc> vectorOp = null;
    try {
        vectorOp = OperatorFactory.getVectorOperator(opClass, op.getCompilationOpContext(), op.getConf(), vContext, vectorDesc);
    } catch (Exception e) {
        LOG.info("Vectorizer vectorizeOperator reduce sink class exception " + opClass.getSimpleName() + " exception " + e);
        throw new HiveException(e);
    }
    return vectorOp;
}
Also used : InConstantType(org.apache.hadoop.hive.ql.exec.vector.VectorizationContext.InConstantType) HashTableImplementationType(org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.HashTableImplementationType) HashTableKeyType(org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.HashTableKeyType) Type(org.apache.hadoop.hive.ql.exec.vector.ColumnVector.Type) VectorDeserializeType(org.apache.hadoop.hive.ql.plan.VectorPartitionDesc.VectorDeserializeType) SupportedFunctionType(org.apache.hadoop.hive.ql.plan.VectorPTFDesc.SupportedFunctionType) OperatorType(org.apache.hadoop.hive.ql.plan.api.OperatorType) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) VectorReduceSinkDesc(org.apache.hadoop.hive.ql.plan.VectorReduceSinkDesc) VectorReduceSinkInfo(org.apache.hadoop.hive.ql.plan.VectorReduceSinkInfo) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Aggregations

ArrayList (java.util.ArrayList)3 VectorExpression (org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression)3 VectorReduceSinkInfo (org.apache.hadoop.hive.ql.plan.VectorReduceSinkInfo)3 List (java.util.List)2 Type (org.apache.hadoop.hive.ql.exec.vector.ColumnVector.Type)2 VectorMapJoinOperator (org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOperator)2 VectorMapJoinOuterFilteredOperator (org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOuterFilteredOperator)2 InConstantType (org.apache.hadoop.hive.ql.exec.vector.VectorizationContext.InConstantType)2 VectorMapJoinInnerBigOnlyLongOperator (org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerBigOnlyLongOperator)2 VectorMapJoinInnerBigOnlyMultiKeyOperator (org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerBigOnlyMultiKeyOperator)2 VectorMapJoinInnerBigOnlyStringOperator (org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerBigOnlyStringOperator)2 VectorMapJoinInnerLongOperator (org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerLongOperator)2 VectorMapJoinInnerMultiKeyOperator (org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerMultiKeyOperator)2 VectorMapJoinInnerStringOperator (org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinInnerStringOperator)2 VectorMapJoinLeftSemiLongOperator (org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinLeftSemiLongOperator)2 VectorMapJoinLeftSemiMultiKeyOperator (org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinLeftSemiMultiKeyOperator)2 VectorMapJoinLeftSemiStringOperator (org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinLeftSemiStringOperator)2 VectorMapJoinOuterLongOperator (org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinOuterLongOperator)2 VectorMapJoinOuterMultiKeyOperator (org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinOuterMultiKeyOperator)2 VectorMapJoinOuterStringOperator (org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinOuterStringOperator)2