Search in sources :

Example 1 with ConstantVectorExpression

use of org.apache.hadoop.hive.ql.exec.vector.expressions.ConstantVectorExpression in project hive by apache.

the class VectorizationContext method getConstantVectorExpression.

private VectorExpression getConstantVectorExpression(Object constantValue, TypeInfo typeInfo, VectorExpressionDescriptor.Mode mode) throws HiveException {
    String typeName = typeInfo.getTypeName();
    VectorExpressionDescriptor.ArgumentType vectorArgType = VectorExpressionDescriptor.ArgumentType.fromHiveTypeName(typeName);
    if (vectorArgType == VectorExpressionDescriptor.ArgumentType.NONE) {
        throw new HiveException("No vector argument type for type name " + typeName);
    }
    int outCol = -1;
    if (mode == VectorExpressionDescriptor.Mode.PROJECTION) {
        outCol = ocm.allocateOutputColumn(typeInfo);
    }
    if (constantValue == null) {
        if (typeInfo.getCategory() != Category.PRIMITIVE) {
            throw new HiveException("Complex type constants (" + typeInfo.getCategory() + ") not supported for type name " + typeName);
        }
        if (mode == VectorExpressionDescriptor.Mode.FILTER) {
            return new FilterConstantBooleanVectorExpression(0);
        } else {
            return new ConstantVectorExpression(outCol, typeInfo, true);
        }
    }
    // Boolean is special case.
    if (typeName.equalsIgnoreCase("boolean")) {
        if (mode == VectorExpressionDescriptor.Mode.FILTER) {
            if ((Boolean) constantValue) {
                return new FilterConstantBooleanVectorExpression(1);
            } else {
                return new FilterConstantBooleanVectorExpression(0);
            }
        } else {
            if ((Boolean) constantValue) {
                return new ConstantVectorExpression(outCol, 1, typeInfo);
            } else {
                return new ConstantVectorExpression(outCol, 0, typeInfo);
            }
        }
    }
    return ConstantVectorExpression.create(outCol, constantValue, typeInfo);
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) CastDecimalToString(org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToString) CastLongToString(org.apache.hadoop.hive.ql.exec.vector.expressions.CastLongToString) CastFloatToString(org.apache.hadoop.hive.ql.exec.vector.expressions.CastFloatToString) CastDateToString(org.apache.hadoop.hive.ql.exec.vector.expressions.CastDateToString) CastTimestampToString(org.apache.hadoop.hive.ql.exec.vector.expressions.CastTimestampToString) CastDoubleToString(org.apache.hadoop.hive.ql.exec.vector.expressions.CastDoubleToString) CastBooleanToStringViaLongToString(org.apache.hadoop.hive.ql.exec.vector.expressions.CastBooleanToStringViaLongToString) FilterConstantBooleanVectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.FilterConstantBooleanVectorExpression) ConstantVectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.ConstantVectorExpression) CastStringToBoolean(org.apache.hadoop.hive.ql.exec.vector.expressions.CastStringToBoolean) ArgumentType(org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.ArgumentType)

Example 2 with ConstantVectorExpression

use of org.apache.hadoop.hive.ql.exec.vector.expressions.ConstantVectorExpression in project hive by apache.

the class VectorizationContext method getCoalesceExpression.

private VectorExpression getCoalesceExpression(List<ExprNodeDesc> childExpr, VectorExpressionDescriptor.Mode mode, TypeInfo returnType) throws HiveException {
    int[] inputColumns = new int[childExpr.size()];
    VectorExpression[] vectorChildren = getVectorExpressions(childExpr, VectorExpressionDescriptor.Mode.PROJECTION);
    final int size = vectorChildren.length;
    TypeInfo[] inputTypeInfos = new TypeInfo[size];
    DataTypePhysicalVariation[] inputDataTypePhysicalVariations = new DataTypePhysicalVariation[size];
    DataTypePhysicalVariation outputDataTypePhysicalVariation = DataTypePhysicalVariation.DECIMAL_64;
    boolean fixConstants = false;
    for (int i = 0; i < vectorChildren.length; ++i) {
        VectorExpression ve = vectorChildren[i];
        inputColumns[i] = ve.getOutputColumnNum();
        inputTypeInfos[i] = ve.getOutputTypeInfo();
        inputDataTypePhysicalVariations[i] = ve.getOutputDataTypePhysicalVariation();
        if (inputDataTypePhysicalVariations[i] == DataTypePhysicalVariation.NONE || inputDataTypePhysicalVariations[i] == null) {
            if (childExpr.get(i) instanceof ExprNodeConstantDesc && inputTypeInfos[i] instanceof DecimalTypeInfo && ((DecimalTypeInfo) inputTypeInfos[i]).precision() <= 18) {
                fixConstants = true;
            } else {
                outputDataTypePhysicalVariation = DataTypePhysicalVariation.NONE;
            }
        }
    }
    if (outputDataTypePhysicalVariation == DataTypePhysicalVariation.DECIMAL_64 && fixConstants) {
        for (int i = 0; i < vectorChildren.length; ++i) {
            if ((inputDataTypePhysicalVariations[i] == DataTypePhysicalVariation.NONE || inputDataTypePhysicalVariations[i] == null) && vectorChildren[i] instanceof ConstantVectorExpression) {
                ConstantVectorExpression cve = ((ConstantVectorExpression) vectorChildren[i]);
                HiveDecimal hd = cve.getDecimalValue();
                Long longValue = new HiveDecimalWritable(hd).serialize64(((DecimalTypeInfo) cve.getOutputTypeInfo()).getScale());
                ((ConstantVectorExpression) vectorChildren[i]).setLongValue(longValue);
                vectorChildren[i].setOutputDataTypePhysicalVariation(DataTypePhysicalVariation.DECIMAL_64);
                int scratchColIndex = vectorChildren[i].getOutputColumnNum() - ocm.initialOutputCol;
                ocm.scratchDataTypePhysicalVariations[scratchColIndex] = DataTypePhysicalVariation.DECIMAL_64;
            }
        }
    }
    final int outputColumnNum = ocm.allocateOutputColumn(returnType, outputDataTypePhysicalVariation);
    VectorCoalesce vectorCoalesce = new VectorCoalesce(inputColumns, outputColumnNum);
    vectorCoalesce.setChildExpressions(vectorChildren);
    vectorCoalesce.setInputTypeInfos(inputTypeInfos);
    vectorCoalesce.setInputDataTypePhysicalVariations(inputDataTypePhysicalVariations);
    vectorCoalesce.setOutputTypeInfo(returnType);
    vectorCoalesce.setOutputDataTypePhysicalVariation(outputDataTypePhysicalVariation);
    freeNonColumns(vectorChildren);
    // Assume.
    boolean isFilter = false;
    if (mode == VectorExpressionDescriptor.Mode.FILTER) {
        // Is output type a BOOLEAN?
        if (returnType.getCategory() == Category.PRIMITIVE && ((PrimitiveTypeInfo) returnType).getPrimitiveCategory() == PrimitiveCategory.BOOLEAN) {
            isFilter = true;
        } else {
            return null;
        }
    }
    if (isFilter) {
        // Wrap the PROJECTION IF expression output with a filter.
        SelectColumnIsTrue filterVectorExpr = new SelectColumnIsTrue(vectorCoalesce.getOutputColumnNum());
        filterVectorExpr.setChildExpressions(new VectorExpression[] { vectorCoalesce });
        filterVectorExpr.setInputTypeInfos(vectorCoalesce.getOutputTypeInfo());
        filterVectorExpr.setInputDataTypePhysicalVariations(vectorCoalesce.getOutputDataTypePhysicalVariation());
        return filterVectorExpr;
    } else {
        return vectorCoalesce;
    }
}
Also used : ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) VectorCoalesce(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorCoalesce) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) BaseCharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.BaseCharTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) FilterConstantBooleanVectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.FilterConstantBooleanVectorExpression) ConstantVectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.ConstantVectorExpression) VectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression) DynamicValueVectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.DynamicValueVectorExpression) ConstantVectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.ConstantVectorExpression) SelectColumnIsTrue(org.apache.hadoop.hive.ql.exec.vector.expressions.SelectColumnIsTrue)

Example 3 with ConstantVectorExpression

use of org.apache.hadoop.hive.ql.exec.vector.expressions.ConstantVectorExpression in project hive by apache.

the class VectorPTFDesc method getEvaluator.

// We provide this public method to help EXPLAIN VECTORIZATION show the evaluator classes.
public static VectorPTFEvaluatorBase getEvaluator(SupportedFunctionType functionType, boolean isDistinct, WindowFrameDef windowFrameDef, Type[] columnVectorTypes, VectorExpression[] inputVectorExpressions, int outputColumnNum) {
    final boolean isRowEndCurrent = (windowFrameDef.getWindowType() == WindowType.ROWS && windowFrameDef.getEnd().isCurrentRow());
    /*
     * we should only stream when the window start is unbounded and the end row is the current,
     * because that's the way how streaming evaluation works: calculate from the very-first row then
     * create result for the current row on the fly, so with other words: currently we cannot force
     * a boundary on a streaming evaluator
     */
    final boolean canStream = windowFrameDef.getStart().isUnbounded() && isRowEndCurrent;
    // most of the evaluators will use only first argument
    VectorExpression inputVectorExpression = inputVectorExpressions[0];
    Type columnVectorType = columnVectorTypes[0];
    VectorPTFEvaluatorBase evaluator;
    switch(functionType) {
        case ROW_NUMBER:
            evaluator = new VectorPTFEvaluatorRowNumber(windowFrameDef, inputVectorExpression, outputColumnNum);
            break;
        case RANK:
            evaluator = new VectorPTFEvaluatorRank(windowFrameDef, outputColumnNum);
            break;
        case DENSE_RANK:
            evaluator = new VectorPTFEvaluatorDenseRank(windowFrameDef, outputColumnNum);
            break;
        case MIN:
            switch(columnVectorType) {
                case LONG:
                    evaluator = !canStream ? new VectorPTFEvaluatorLongMin(windowFrameDef, inputVectorExpression, outputColumnNum) : new VectorPTFEvaluatorStreamingLongMin(windowFrameDef, inputVectorExpression, outputColumnNum);
                    break;
                case DOUBLE:
                    evaluator = !canStream ? new VectorPTFEvaluatorDoubleMin(windowFrameDef, inputVectorExpression, outputColumnNum) : new VectorPTFEvaluatorStreamingDoubleMin(windowFrameDef, inputVectorExpression, outputColumnNum);
                    break;
                case DECIMAL:
                    evaluator = !canStream ? new VectorPTFEvaluatorDecimalMin(windowFrameDef, inputVectorExpression, outputColumnNum) : new VectorPTFEvaluatorStreamingDecimalMin(windowFrameDef, inputVectorExpression, outputColumnNum);
                    break;
                default:
                    throw new RuntimeException("Unexpected column vector type " + columnVectorType + " for " + functionType);
            }
            break;
        case MAX:
            switch(columnVectorType) {
                case LONG:
                    evaluator = !canStream ? new VectorPTFEvaluatorLongMax(windowFrameDef, inputVectorExpression, outputColumnNum) : new VectorPTFEvaluatorStreamingLongMax(windowFrameDef, inputVectorExpression, outputColumnNum);
                    break;
                case DOUBLE:
                    evaluator = !canStream ? new VectorPTFEvaluatorDoubleMax(windowFrameDef, inputVectorExpression, outputColumnNum) : new VectorPTFEvaluatorStreamingDoubleMax(windowFrameDef, inputVectorExpression, outputColumnNum);
                    break;
                case DECIMAL:
                    evaluator = !canStream ? new VectorPTFEvaluatorDecimalMax(windowFrameDef, inputVectorExpression, outputColumnNum) : new VectorPTFEvaluatorStreamingDecimalMax(windowFrameDef, inputVectorExpression, outputColumnNum);
                    break;
                default:
                    throw new RuntimeException("Unexpected column vector type " + columnVectorType + " for " + functionType);
            }
            break;
        case SUM:
            switch(columnVectorType) {
                case LONG:
                    evaluator = !canStream ? new VectorPTFEvaluatorLongSum(windowFrameDef, inputVectorExpression, outputColumnNum) : new VectorPTFEvaluatorStreamingLongSum(windowFrameDef, inputVectorExpression, outputColumnNum);
                    break;
                case DOUBLE:
                    evaluator = !canStream ? new VectorPTFEvaluatorDoubleSum(windowFrameDef, inputVectorExpression, outputColumnNum) : new VectorPTFEvaluatorStreamingDoubleSum(windowFrameDef, inputVectorExpression, outputColumnNum);
                    break;
                case DECIMAL:
                    evaluator = !canStream ? new VectorPTFEvaluatorDecimalSum(windowFrameDef, inputVectorExpression, outputColumnNum) : new VectorPTFEvaluatorStreamingDecimalSum(windowFrameDef, inputVectorExpression, outputColumnNum);
                    break;
                default:
                    throw new RuntimeException("Unexpected column vector type " + columnVectorType + " for " + functionType);
            }
            break;
        case AVG:
            switch(columnVectorType) {
                case LONG:
                    evaluator = !canStream ? new VectorPTFEvaluatorLongAvg(windowFrameDef, inputVectorExpression, outputColumnNum) : new VectorPTFEvaluatorStreamingLongAvg(windowFrameDef, inputVectorExpression, outputColumnNum);
                    break;
                case DOUBLE:
                    evaluator = !canStream ? new VectorPTFEvaluatorDoubleAvg(windowFrameDef, inputVectorExpression, outputColumnNum) : new VectorPTFEvaluatorStreamingDoubleAvg(windowFrameDef, inputVectorExpression, outputColumnNum);
                    break;
                case DECIMAL:
                    evaluator = !canStream ? new VectorPTFEvaluatorDecimalAvg(windowFrameDef, inputVectorExpression, outputColumnNum) : new VectorPTFEvaluatorStreamingDecimalAvg(windowFrameDef, inputVectorExpression, outputColumnNum);
                    break;
                default:
                    throw new RuntimeException("Unexpected column vector type " + columnVectorType + " for " + functionType);
            }
            break;
        case FIRST_VALUE:
            switch(columnVectorType) {
                case LONG:
                    evaluator = new VectorPTFEvaluatorLongFirstValue(windowFrameDef, inputVectorExpression, outputColumnNum);
                    break;
                case DOUBLE:
                    evaluator = new VectorPTFEvaluatorDoubleFirstValue(windowFrameDef, inputVectorExpression, outputColumnNum);
                    break;
                case DECIMAL:
                    evaluator = new VectorPTFEvaluatorDecimalFirstValue(windowFrameDef, inputVectorExpression, outputColumnNum);
                    break;
                default:
                    throw new RuntimeException("Unexpected column vector type " + columnVectorType + " for " + functionType);
            }
            break;
        case LAST_VALUE:
            switch(columnVectorType) {
                case LONG:
                    evaluator = new VectorPTFEvaluatorLongLastValue(windowFrameDef, inputVectorExpression, outputColumnNum);
                    break;
                case DOUBLE:
                    evaluator = new VectorPTFEvaluatorDoubleLastValue(windowFrameDef, inputVectorExpression, outputColumnNum);
                    break;
                case DECIMAL:
                    evaluator = new VectorPTFEvaluatorDecimalLastValue(windowFrameDef, inputVectorExpression, outputColumnNum);
                    break;
                default:
                    throw new RuntimeException("Unexpected column vector type " + columnVectorType + " for " + functionType);
            }
            break;
        case COUNT:
            if (inputVectorExpression == null) {
                evaluator = new VectorPTFEvaluatorCountStar(windowFrameDef, inputVectorExpression, outputColumnNum);
            } else {
                if (isDistinct) {
                    switch(columnVectorType) {
                        case BYTES:
                            evaluator = new VectorPTFEvaluatorBytesCountDistinct(windowFrameDef, inputVectorExpression, outputColumnNum);
                            break;
                        // Decimal64ColumnVector is a LongColumnVector
                        case DECIMAL_64:
                        case LONG:
                            evaluator = new VectorPTFEvaluatorLongCountDistinct(windowFrameDef, inputVectorExpression, outputColumnNum);
                            break;
                        case DOUBLE:
                            evaluator = new VectorPTFEvaluatorDoubleCountDistinct(windowFrameDef, inputVectorExpression, outputColumnNum);
                            break;
                        case DECIMAL:
                            evaluator = new VectorPTFEvaluatorDecimalCountDistinct(windowFrameDef, inputVectorExpression, outputColumnNum);
                            break;
                        case TIMESTAMP:
                            evaluator = new VectorPTFEvaluatorTimestampCountDistinct(windowFrameDef, inputVectorExpression, outputColumnNum);
                            break;
                        default:
                            throw new RuntimeException("Unexpected column type for ptf count distinct: " + columnVectorType);
                    }
                } else {
                    evaluator = new VectorPTFEvaluatorCount(windowFrameDef, inputVectorExpression, outputColumnNum);
                }
            }
            break;
        case LAG:
            // lag(column, constant, ...)
            int amt = inputVectorExpressions.length > 1 ? (int) ((ConstantVectorExpression) inputVectorExpressions[1]).getLongValue() : 1;
            // lag(column, constant, constant/column)
            VectorExpression defaultValueExpression = inputVectorExpressions.length > 2 ? inputVectorExpressions[2] : null;
            switch(columnVectorType) {
                case LONG:
                case DOUBLE:
                case DECIMAL:
                    evaluator = new VectorPTFEvaluatorLag(windowFrameDef, inputVectorExpression, outputColumnNum, columnVectorType, amt, defaultValueExpression);
                    break;
                default:
                    throw new RuntimeException("Unexpected column vector type " + columnVectorType + " for " + functionType);
            }
            break;
        case LEAD:
            // lead(column, constant, ...)
            amt = inputVectorExpressions.length > 1 ? (int) ((ConstantVectorExpression) inputVectorExpressions[1]).getLongValue() : 1;
            // lead(column, constant, constant/column)
            defaultValueExpression = inputVectorExpressions.length > 2 ? inputVectorExpressions[2] : null;
            switch(columnVectorType) {
                case LONG:
                case DOUBLE:
                case DECIMAL:
                    evaluator = new VectorPTFEvaluatorLead(windowFrameDef, inputVectorExpression, outputColumnNum, columnVectorType, amt, defaultValueExpression);
                    break;
                default:
                    throw new RuntimeException("Unexpected column vector type " + columnVectorType + " for " + functionType);
            }
            break;
        default:
            throw new RuntimeException("Unexpected function type " + functionType);
    }
    return evaluator;
}
Also used : VectorPTFEvaluatorStreamingLongSum(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorStreamingLongSum) VectorPTFEvaluatorDecimalSum(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorDecimalSum) VectorPTFEvaluatorDoubleSum(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorDoubleSum) VectorPTFEvaluatorDoubleCountDistinct(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorDoubleCountDistinct) VectorPTFEvaluatorLongCountDistinct(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorLongCountDistinct) VectorPTFEvaluatorCount(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorCount) VectorPTFEvaluatorDoubleMin(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorDoubleMin) VectorPTFEvaluatorDoubleAvg(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorDoubleAvg) VectorPTFEvaluatorStreamingDoubleSum(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorStreamingDoubleSum) VectorPTFEvaluatorBytesCountDistinct(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorBytesCountDistinct) ConstantVectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.ConstantVectorExpression) VectorPTFEvaluatorStreamingDoubleMin(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorStreamingDoubleMin) VectorPTFEvaluatorStreamingDoubleAvg(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorStreamingDoubleAvg) VectorPTFEvaluatorDecimalMin(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorDecimalMin) VectorPTFEvaluatorDecimalAvg(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorDecimalAvg) VectorPTFEvaluatorBase(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorBase) VectorPTFEvaluatorDoubleFirstValue(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorDoubleFirstValue) VectorPTFEvaluatorLead(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorLead) VectorPTFEvaluatorDecimalMax(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorDecimalMax) VectorPTFEvaluatorStreamingDoubleMax(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorStreamingDoubleMax) VectorPTFEvaluatorLongFirstValue(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorLongFirstValue) VectorPTFEvaluatorLongLastValue(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorLongLastValue) VectorPTFEvaluatorDecimalLastValue(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorDecimalLastValue) ConstantVectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.ConstantVectorExpression) VectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression) VectorPTFEvaluatorRowNumber(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorRowNumber) VectorPTFEvaluatorStreamingDecimalMax(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorStreamingDecimalMax) VectorPTFEvaluatorStreamingDecimalMin(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorStreamingDecimalMin) VectorPTFEvaluatorStreamingDecimalAvg(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorStreamingDecimalAvg) VectorPTFEvaluatorTimestampCountDistinct(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorTimestampCountDistinct) VectorPTFEvaluatorLongMin(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorLongMin) VectorPTFEvaluatorLongAvg(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorLongAvg) VectorPTFEvaluatorCountStar(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorCountStar) VectorPTFEvaluatorLongMax(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorLongMax) VectorPTFEvaluatorDecimalFirstValue(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorDecimalFirstValue) VectorPTFEvaluatorStreamingDecimalSum(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorStreamingDecimalSum) VectorPTFEvaluatorDecimalCountDistinct(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorDecimalCountDistinct) VectorPTFEvaluatorDoubleLastValue(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorDoubleLastValue) VectorPTFEvaluatorDoubleMax(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorDoubleMax) VectorPTFEvaluatorStreamingLongMax(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorStreamingLongMax) VectorPTFEvaluatorLag(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorLag) WindowType(org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowType) Type(org.apache.hadoop.hive.ql.exec.vector.ColumnVector.Type) VectorPTFEvaluatorRank(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorRank) VectorPTFEvaluatorLongSum(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorLongSum) VectorPTFEvaluatorStreamingLongMin(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorStreamingLongMin) VectorPTFEvaluatorStreamingLongAvg(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorStreamingLongAvg) VectorPTFEvaluatorDenseRank(org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorDenseRank)

Example 4 with ConstantVectorExpression

use of org.apache.hadoop.hive.ql.exec.vector.expressions.ConstantVectorExpression in project hive by apache.

the class VectorFilterOperator method initializeOp.

@Override
protected void initializeOp(Configuration hconf) throws HiveException {
    super.initializeOp(hconf);
    VectorExpression.doTransientInit(predicateExpression, hconf);
    try {
        heartbeatInterval = HiveConf.getIntVar(hconf, HiveConf.ConfVars.HIVESENDHEARTBEAT);
        predicateExpression.init(hconf);
    } catch (Throwable e) {
        throw new HiveException(e);
    }
    if (predicateExpression instanceof ConstantVectorExpression) {
        ConstantVectorExpression cve = (ConstantVectorExpression) this.predicateExpression;
        if (cve.getLongValue() == 1) {
            filterMode = 1;
        } else {
            filterMode = -1;
        }
    }
    temporarySelected = new int[VectorizedRowBatch.DEFAULT_SIZE];
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) ConstantVectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.ConstantVectorExpression)

Aggregations

ConstantVectorExpression (org.apache.hadoop.hive.ql.exec.vector.expressions.ConstantVectorExpression)4 FilterConstantBooleanVectorExpression (org.apache.hadoop.hive.ql.exec.vector.expressions.FilterConstantBooleanVectorExpression)2 VectorExpression (org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression)2 DataTypePhysicalVariation (org.apache.hadoop.hive.common.type.DataTypePhysicalVariation)1 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)1 Type (org.apache.hadoop.hive.ql.exec.vector.ColumnVector.Type)1 ArgumentType (org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.ArgumentType)1 CastBooleanToStringViaLongToString (org.apache.hadoop.hive.ql.exec.vector.expressions.CastBooleanToStringViaLongToString)1 CastDateToString (org.apache.hadoop.hive.ql.exec.vector.expressions.CastDateToString)1 CastDecimalToString (org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToString)1 CastDoubleToString (org.apache.hadoop.hive.ql.exec.vector.expressions.CastDoubleToString)1 CastFloatToString (org.apache.hadoop.hive.ql.exec.vector.expressions.CastFloatToString)1 CastLongToString (org.apache.hadoop.hive.ql.exec.vector.expressions.CastLongToString)1 CastStringToBoolean (org.apache.hadoop.hive.ql.exec.vector.expressions.CastStringToBoolean)1 CastTimestampToString (org.apache.hadoop.hive.ql.exec.vector.expressions.CastTimestampToString)1 DynamicValueVectorExpression (org.apache.hadoop.hive.ql.exec.vector.expressions.DynamicValueVectorExpression)1 SelectColumnIsTrue (org.apache.hadoop.hive.ql.exec.vector.expressions.SelectColumnIsTrue)1 VectorCoalesce (org.apache.hadoop.hive.ql.exec.vector.expressions.VectorCoalesce)1 VectorPTFEvaluatorBase (org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorBase)1 VectorPTFEvaluatorBytesCountDistinct (org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFEvaluatorBytesCountDistinct)1