Search in sources :

Example 1 with GenericUDFIn

use of org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn in project phoenix by apache.

the class IndexPredicateAnalyzer method analyzeExpr.

private ExprNodeDesc analyzeExpr(ExprNodeGenericFuncDesc expr, List<IndexSearchCondition> searchConditions, Object... nodeOutputs) throws SemanticException {
    if (FunctionRegistry.isOpAnd(expr)) {
        assert (nodeOutputs.length == 2);
        ExprNodeDesc residual1 = (ExprNodeDesc) nodeOutputs[0];
        ExprNodeDesc residual2 = (ExprNodeDesc) nodeOutputs[1];
        if (residual1 == null) {
            return residual2;
        }
        if (residual2 == null) {
            return residual1;
        }
        List<ExprNodeDesc> residuals = new ArrayList<ExprNodeDesc>();
        residuals.add(residual1);
        residuals.add(residual2);
        return new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, FunctionRegistry.getGenericUDFForAnd(), residuals);
    }
    GenericUDF genericUDF = expr.getGenericUDF();
    if (!(genericUDF instanceof GenericUDFBaseCompare)) {
        // 2015-10-22 Added by JeongMin Ju : Processing Between/In Operator
        if (genericUDF instanceof GenericUDFBetween) {
            // In case of not between, The value of first element of nodeOutputs is true.
            // otherwise false.
            processingBetweenOperator(expr, searchConditions, nodeOutputs);
            return expr;
        } else if (genericUDF instanceof GenericUDFIn) {
            // In case of not in operator, in operator exist as child of not operator.
            processingInOperator(expr, searchConditions, false, nodeOutputs);
            return expr;
        } else if (genericUDF instanceof GenericUDFOPNot && ((ExprNodeGenericFuncDesc) expr.getChildren().get(0)).getGenericUDF() instanceof GenericUDFIn) {
            // In case of not in operator, in operator exist as child of not operator.
            processingInOperator((ExprNodeGenericFuncDesc) expr.getChildren().get(0), searchConditions, true, ((ExprNodeGenericFuncDesc) nodeOutputs[0]).getChildren().toArray());
            return expr;
        } else if (genericUDF instanceof GenericUDFOPNull) {
            processingNullOperator(expr, searchConditions, nodeOutputs);
            return expr;
        } else if (genericUDF instanceof GenericUDFOPNotNull) {
            processingNotNullOperator(expr, searchConditions, nodeOutputs);
            return expr;
        } else {
            return expr;
        }
    }
    ExprNodeDesc expr1 = (ExprNodeDesc) nodeOutputs[0];
    ExprNodeDesc expr2 = (ExprNodeDesc) nodeOutputs[1];
    // user
    if (expr1.getTypeInfo().equals(expr2.getTypeInfo())) {
        expr1 = getColumnExpr(expr1);
        expr2 = getColumnExpr(expr2);
    }
    ExprNodeDesc[] extracted = ExprNodeDescUtils.extractComparePair(expr1, expr2);
    if (extracted == null || (extracted.length > 2 && !acceptsFields)) {
        return expr;
    }
    ExprNodeColumnDesc columnDesc;
    ExprNodeConstantDesc constantDesc;
    if (extracted[0] instanceof ExprNodeConstantDesc) {
        genericUDF = genericUDF.flip();
        columnDesc = (ExprNodeColumnDesc) extracted[1];
        constantDesc = (ExprNodeConstantDesc) extracted[0];
    } else {
        columnDesc = (ExprNodeColumnDesc) extracted[0];
        constantDesc = (ExprNodeConstantDesc) extracted[1];
    }
    Set<String> allowed = columnToUDFs.get(columnDesc.getColumn());
    if (allowed == null) {
        return expr;
    }
    String udfName = genericUDF.getUdfName();
    if (!allowed.contains(genericUDF.getUdfName())) {
        return expr;
    }
    String[] fields = null;
    if (extracted.length > 2) {
        ExprNodeFieldDesc fieldDesc = (ExprNodeFieldDesc) extracted[2];
        if (!isValidField(fieldDesc)) {
            return expr;
        }
        fields = ExprNodeDescUtils.extractFields(fieldDesc);
    }
    // We also need to update the expr so that the index query can be
    // generated.
    // Note that, hive does not support UDFToDouble etc in the query text.
    List<ExprNodeDesc> list = new ArrayList<ExprNodeDesc>();
    list.add(expr1);
    list.add(expr2);
    expr = new ExprNodeGenericFuncDesc(expr.getTypeInfo(), expr.getGenericUDF(), list);
    searchConditions.add(new IndexSearchCondition(columnDesc, udfName, constantDesc, expr, fields));
    // remove it from the residual predicate
    return fields == null ? null : expr;
}
Also used : GenericUDFBetween(org.apache.hadoop.hive.ql.udf.generic.GenericUDFBetween) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) GenericUDFOPNull(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull) ArrayList(java.util.ArrayList) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) GenericUDFOPNotNull(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) GenericUDFBaseCompare(org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseCompare) ExprNodeFieldDesc(org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) GenericUDFIn(org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) GenericUDFOPNot(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNot)

Example 2 with GenericUDFIn

use of org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn in project flink by apache.

the class HiveParserRexNodeConverter method convertGenericFunc.

private RexNode convertGenericFunc(ExprNodeGenericFuncDesc func) throws SemanticException {
    ExprNodeDesc tmpExprNode;
    RexNode tmpRN;
    List<RexNode> childRexNodeLst = new ArrayList<>();
    List<RelDataType> argTypes = new ArrayList<>();
    // TODO: 1) Expand to other functions as needed 2) What about types other than primitive.
    TypeInfo tgtDT = null;
    GenericUDF tgtUdf = func.getGenericUDF();
    if (tgtUdf instanceof GenericUDFIn) {
        return convertIN(func);
    }
    boolean isNumeric = isNumericBinary(func);
    boolean isCompare = !isNumeric && tgtUdf instanceof GenericUDFBaseCompare;
    boolean isWhenCase = tgtUdf instanceof GenericUDFWhen || tgtUdf instanceof GenericUDFCase;
    boolean isTransformableTimeStamp = func.getGenericUDF() instanceof GenericUDFUnixTimeStamp && func.getChildren().size() != 0;
    if (isNumeric) {
        tgtDT = func.getTypeInfo();
        assert func.getChildren().size() == 2;
    // TODO: checking 2 children is useless, compare already does that.
    } else if (isCompare && (func.getChildren().size() == 2)) {
        tgtDT = FunctionRegistry.getCommonClassForComparison(func.getChildren().get(0).getTypeInfo(), func.getChildren().get(1).getTypeInfo());
    } else if (isWhenCase) {
        // functions as they are not allowed
        if (checkForStatefulFunctions(func.getChildren())) {
            throw new SemanticException("Stateful expressions cannot be used inside of CASE");
        }
    } else if (isTransformableTimeStamp) {
        func = ExprNodeGenericFuncDesc.newInstance(new GenericUDFToUnixTimeStamp(), func.getChildren());
    }
    for (ExprNodeDesc childExpr : func.getChildren()) {
        tmpExprNode = childExpr;
        if (tgtDT != null && TypeInfoUtils.isConversionRequiredForComparison(tgtDT, childExpr.getTypeInfo())) {
            if (isCompare) {
                // For compare, we will convert requisite children
                tmpExprNode = HiveASTParseUtils.createConversionCast(childExpr, (PrimitiveTypeInfo) tgtDT);
            } else if (isNumeric) {
                // For numeric, we'll do minimum necessary cast - if we cast to the type
                // of expression, bad things will happen.
                PrimitiveTypeInfo minArgType = HiveParserExprNodeDescUtils.deriveMinArgumentCast(childExpr, tgtDT);
                tmpExprNode = HiveASTParseUtils.createConversionCast(childExpr, minArgType);
            } else {
                throw new AssertionError("Unexpected " + tgtDT + " - not a numeric op or compare");
            }
        }
        argTypes.add(HiveParserTypeConverter.convert(tmpExprNode.getTypeInfo(), cluster.getTypeFactory()));
        tmpRN = convert(tmpExprNode);
        childRexNodeLst.add(tmpRN);
    }
    // process the function
    RelDataType retType = HiveParserTypeConverter.convert(func.getTypeInfo(), cluster.getTypeFactory());
    SqlOperator calciteOp = HiveParserSqlFunctionConverter.getCalciteOperator(func.getFuncText(), func.getGenericUDF(), argTypes, retType);
    if (calciteOp.getKind() == SqlKind.CASE) {
        // If it is a case operator, we need to rewrite it
        childRexNodeLst = rewriteCaseChildren(func, childRexNodeLst);
    }
    RexNode expr = cluster.getRexBuilder().makeCall(calciteOp, childRexNodeLst);
    // check whether we need a calcite cast
    RexNode cast = handleExplicitCast(func, childRexNodeLst, ((RexCall) expr).getOperator());
    if (cast != null) {
        expr = cast;
        retType = cast.getType();
    }
    // an exception
    if (flattenExpr && expr instanceof RexCall && !(((RexCall) expr).getOperator() instanceof SqlCastFunction)) {
        RexCall call = (RexCall) expr;
        expr = cluster.getRexBuilder().makeCall(retType, call.getOperator(), RexUtil.flatten(call.getOperands(), call.getOperator()));
    }
    return expr;
}
Also used : GenericUDFCase(org.apache.hadoop.hive.ql.udf.generic.GenericUDFCase) SqlCastFunction(org.apache.calcite.sql.fun.SqlCastFunction) SqlOperator(org.apache.calcite.sql.SqlOperator) GenericUDFWhen(org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen) ArrayList(java.util.ArrayList) GenericUDFToUnixTimeStamp(org.apache.hadoop.hive.ql.udf.generic.GenericUDFToUnixTimeStamp) RelDataType(org.apache.calcite.rel.type.RelDataType) GenericUDFUnixTimeStamp(org.apache.hadoop.hive.ql.udf.generic.GenericUDFUnixTimeStamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) RexCall(org.apache.calcite.rex.RexCall) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) GenericUDFBaseCompare(org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseCompare) GenericUDFIn(org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) RexNode(org.apache.calcite.rex.RexNode) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 3 with GenericUDFIn

use of org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn in project hive by apache.

the class TestVectorBetweenIn method doBetweenInVariation.

private boolean doBetweenInVariation(Random random, String typeName, boolean tryDecimal64, BetweenInVariation betweenInVariation, int subVariation) throws Exception {
    TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeName);
    boolean isDecimal64 = checkDecimal64(tryDecimal64, typeInfo);
    DataTypePhysicalVariation dataTypePhysicalVariation = (isDecimal64 ? DataTypePhysicalVariation.DECIMAL_64 : DataTypePhysicalVariation.NONE);
    final int decimal64Scale = (isDecimal64 ? ((DecimalTypeInfo) typeInfo).getScale() : 0);
    // ----------------------------------------------------------------------------------------------
    ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(typeInfo);
    final int valueCount = 10 + random.nextInt(10);
    List<Object> valueList = new ArrayList<Object>(valueCount);
    for (int i = 0; i < valueCount; i++) {
        valueList.add(VectorRandomRowSource.randomWritable(random, typeInfo, objectInspector, dataTypePhysicalVariation, /* allowNull */
        false));
    }
    final boolean isBetween = (betweenInVariation == BetweenInVariation.FILTER_BETWEEN || betweenInVariation == BetweenInVariation.FILTER_NOT_BETWEEN || betweenInVariation == BetweenInVariation.PROJECTION_BETWEEN || betweenInVariation == BetweenInVariation.PROJECTION_NOT_BETWEEN);
    List<Object> compareList = new ArrayList<Object>();
    List<Object> sortedList = new ArrayList<Object>(valueCount);
    sortedList.addAll(valueList);
    Object exampleObject = valueList.get(0);
    WritableComparator writableComparator = WritableComparator.get((Class<? extends WritableComparable>) exampleObject.getClass());
    sortedList.sort(writableComparator);
    final boolean isInvert;
    if (isBetween) {
        // FILTER_BETWEEN
        // FILTER_NOT_BETWEEN
        // PROJECTION_BETWEEN
        // PROJECTION_NOT_BETWEEN
        isInvert = (betweenInVariation == BetweenInVariation.FILTER_NOT_BETWEEN || betweenInVariation == BetweenInVariation.PROJECTION_NOT_BETWEEN);
        switch(subVariation) {
            case 0:
                // Range covers all values exactly.
                compareList.add(sortedList.get(0));
                compareList.add(sortedList.get(valueCount - 1));
                break;
            case 1:
                // Exclude the first and last sorted.
                compareList.add(sortedList.get(1));
                compareList.add(sortedList.get(valueCount - 2));
                break;
            case 2:
                // Only last 2 sorted.
                compareList.add(sortedList.get(valueCount - 2));
                compareList.add(sortedList.get(valueCount - 1));
                break;
            case 3:
            case 4:
            case 5:
            case 6:
                {
                    // Choose 2 adjacent in the middle.
                    Object min = sortedList.get(5);
                    Object max = sortedList.get(6);
                    compareList.add(min);
                    compareList.add(max);
                    if (subVariation == 4) {
                        removeValue(valueList, min);
                    } else if (subVariation == 5) {
                        removeValue(valueList, max);
                    } else if (subVariation == 6) {
                        removeValue(valueList, min);
                        removeValue(valueList, max);
                    }
                }
                break;
            default:
                return false;
        }
    } else {
        // FILTER_IN.
        // PROJECTION_IN.
        isInvert = false;
        switch(subVariation) {
            case 0:
                // All values.
                compareList.addAll(valueList);
                break;
            case 1:
                // Don't include the first and last sorted.
                for (int i = 1; i < valueCount - 1; i++) {
                    compareList.add(valueList.get(i));
                }
                break;
            case 2:
                // The even ones.
                for (int i = 2; i < valueCount; i += 2) {
                    compareList.add(valueList.get(i));
                }
                break;
            case 3:
                {
                    // Choose 2 adjacent in the middle.
                    Object min = sortedList.get(5);
                    Object max = sortedList.get(6);
                    compareList.add(min);
                    compareList.add(max);
                    if (subVariation == 4) {
                        removeValue(valueList, min);
                    } else if (subVariation == 5) {
                        removeValue(valueList, max);
                    } else if (subVariation == 6) {
                        removeValue(valueList, min);
                        removeValue(valueList, max);
                    }
                }
                break;
            default:
                return false;
        }
    }
    // ----------------------------------------------------------------------------------------------
    GenerationSpec generationSpec = GenerationSpec.createValueList(typeInfo, valueList);
    List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
    List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
    generationSpecList.add(generationSpec);
    explicitDataTypePhysicalVariationList.add(dataTypePhysicalVariation);
    VectorRandomRowSource rowSource = new VectorRandomRowSource();
    rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
    0, /* allowNull */
    true, /* isUnicodeOk */
    true, explicitDataTypePhysicalVariationList);
    List<String> columns = new ArrayList<String>();
    String col1Name = rowSource.columnNames().get(0);
    columns.add(col1Name);
    final ExprNodeDesc col1Expr = new ExprNodeColumnDesc(typeInfo, col1Name, "table", false);
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    if (isBetween) {
        children.add(new ExprNodeConstantDesc(Boolean.valueOf(isInvert)));
    }
    children.add(col1Expr);
    for (Object compareObject : compareList) {
        ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(typeInfo, VectorRandomRowSource.getNonWritableObject(compareObject, typeInfo, objectInspector));
        children.add(constDesc);
    }
    String[] columnNames = columns.toArray(new String[0]);
    Object[][] randomRows = rowSource.randomRows(100000);
    VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
    final GenericUDF udf;
    final ObjectInspector outputObjectInspector;
    if (isBetween) {
        udf = new GenericUDFBetween();
        // First argument is boolean invert. Arguments 1..3 are inspectors for range limits...
        ObjectInspector[] argumentOIs = new ObjectInspector[4];
        argumentOIs[0] = PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
        argumentOIs[1] = objectInspector;
        argumentOIs[2] = objectInspector;
        argumentOIs[3] = objectInspector;
        outputObjectInspector = udf.initialize(argumentOIs);
    } else {
        final int compareCount = compareList.size();
        udf = new GenericUDFIn();
        ObjectInspector[] argumentOIs = new ObjectInspector[compareCount];
        ConstantObjectInspector constantObjectInspector = (ConstantObjectInspector) children.get(1).getWritableObjectInspector();
        for (int i = 0; i < compareCount; i++) {
            argumentOIs[i] = constantObjectInspector;
        }
        outputObjectInspector = udf.initialize(argumentOIs);
    }
    TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(outputObjectInspector);
    ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, udf, children);
    return executeTestModesAndVerify(typeInfo, betweenInVariation, compareList, columns, columnNames, children, udf, exprDesc, randomRows, rowSource, batchSource, outputTypeInfo, /* skipAdaptor */
    false);
}
Also used : ArrayList(java.util.ArrayList) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) GenericUDFBetween(org.apache.hadoop.hive.ql.udf.generic.GenericUDFBetween) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) WritableComparator(org.apache.hadoop.io.WritableComparator) VectorRandomBatchSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) GenerationSpec(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) GenericUDFIn(org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Example 4 with GenericUDFIn

use of org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn in project hive by apache.

the class TestVectorBetweenIn method doBetweenStructInVariation.

private boolean doBetweenStructInVariation(Random random, String structTypeName, BetweenInVariation betweenInVariation) throws Exception {
    StructTypeInfo structTypeInfo = (StructTypeInfo) TypeInfoUtils.getTypeInfoFromTypeString(structTypeName);
    ObjectInspector structObjectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(structTypeInfo);
    final int valueCount = 10 + random.nextInt(10);
    List<Object> valueList = new ArrayList<Object>(valueCount);
    for (int i = 0; i < valueCount; i++) {
        valueList.add(VectorRandomRowSource.randomWritable(random, structTypeInfo, structObjectInspector, DataTypePhysicalVariation.NONE, /* allowNull */
        false));
    }
    final boolean isInvert = false;
    // No convenient WritableComparator / WritableComparable available for STRUCT.
    List<Object> compareList = new ArrayList<Object>();
    Set<Integer> includedSet = new HashSet<Integer>();
    final int chooseLimit = 4 + random.nextInt(valueCount / 2);
    int chooseCount = 0;
    while (chooseCount < chooseLimit) {
        final int index = random.nextInt(valueCount);
        if (includedSet.contains(index)) {
            continue;
        }
        includedSet.add(index);
        compareList.add(valueList.get(index));
        chooseCount++;
    }
    // ----------------------------------------------------------------------------------------------
    GenerationSpec structGenerationSpec = GenerationSpec.createValueList(structTypeInfo, valueList);
    List<GenerationSpec> structGenerationSpecList = new ArrayList<GenerationSpec>();
    List<DataTypePhysicalVariation> structExplicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
    structGenerationSpecList.add(structGenerationSpec);
    structExplicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
    VectorRandomRowSource structRowSource = new VectorRandomRowSource();
    structRowSource.initGenerationSpecSchema(random, structGenerationSpecList, /* maxComplexDepth */
    0, /* allowNull */
    true, /* isUnicodeOk */
    true, structExplicitDataTypePhysicalVariationList);
    Object[][] structRandomRows = structRowSource.randomRows(100000);
    // ---------------------------------------------------------------------------------------------
    List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
    List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
    List<TypeInfo> fieldTypeInfoList = structTypeInfo.getAllStructFieldTypeInfos();
    final int fieldCount = fieldTypeInfoList.size();
    for (int i = 0; i < fieldCount; i++) {
        GenerationSpec generationSpec = GenerationSpec.createOmitGeneration(fieldTypeInfoList.get(i));
        generationSpecList.add(generationSpec);
        explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
    }
    VectorRandomRowSource rowSource = new VectorRandomRowSource();
    rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
    0, /* allowNull */
    true, /* isUnicodeOk */
    true, explicitDataTypePhysicalVariationList);
    Object[][] randomRows = rowSource.randomRows(100000);
    final int rowCount = randomRows.length;
    for (int r = 0; r < rowCount; r++) {
        List<Object> fieldValueList = (ArrayList) structRandomRows[r][0];
        for (int f = 0; f < fieldCount; f++) {
            randomRows[r][f] = fieldValueList.get(f);
        }
    }
    // ---------------------------------------------------------------------------------------------
    // Currently, STRUCT IN vectorization assumes a GenericUDFStruct.
    List<ObjectInspector> structUdfObjectInspectorList = new ArrayList<ObjectInspector>();
    List<ExprNodeDesc> structUdfChildren = new ArrayList<ExprNodeDesc>(fieldCount);
    List<String> rowColumnNameList = rowSource.columnNames();
    for (int i = 0; i < fieldCount; i++) {
        TypeInfo fieldTypeInfo = fieldTypeInfoList.get(i);
        ExprNodeColumnDesc fieldExpr = new ExprNodeColumnDesc(fieldTypeInfo, rowColumnNameList.get(i), "table", false);
        structUdfChildren.add(fieldExpr);
        ObjectInspector fieldObjectInspector = VectorRandomRowSource.getObjectInspector(fieldTypeInfo, DataTypePhysicalVariation.NONE);
        structUdfObjectInspectorList.add(fieldObjectInspector);
    }
    StandardStructObjectInspector structUdfObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(rowColumnNameList, structUdfObjectInspectorList);
    String structUdfTypeName = structUdfObjectInspector.getTypeName();
    TypeInfo structUdfTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(structUdfTypeName);
    String structFuncText = "struct";
    FunctionInfo fi = FunctionRegistry.getFunctionInfo(structFuncText);
    GenericUDF genericUDF = fi.getGenericUDF();
    ExprNodeDesc col1Expr = new ExprNodeGenericFuncDesc(structUdfObjectInspector, genericUDF, structFuncText, structUdfChildren);
    // ---------------------------------------------------------------------------------------------
    List<String> columns = new ArrayList<String>();
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    children.add(col1Expr);
    for (int i = 0; i < compareList.size(); i++) {
        Object compareObject = compareList.get(i);
        ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(structUdfTypeInfo, VectorRandomRowSource.getNonWritableObject(compareObject, structUdfTypeInfo, structUdfObjectInspector));
        children.add(constDesc);
    }
    for (int i = 0; i < fieldCount; i++) {
        columns.add(rowColumnNameList.get(i));
    }
    String[] columnNames = columns.toArray(new String[0]);
    VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
    // ---------------------------------------------------------------------------------------------
    final GenericUDF udf = new GenericUDFIn();
    final int compareCount = compareList.size();
    ObjectInspector[] argumentOIs = new ObjectInspector[compareCount];
    for (int i = 0; i < compareCount; i++) {
        argumentOIs[i] = structUdfObjectInspector;
    }
    final ObjectInspector outputObjectInspector = udf.initialize(argumentOIs);
    TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(outputObjectInspector);
    ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, udf, children);
    return executeTestModesAndVerify(structUdfTypeInfo, betweenInVariation, compareList, columns, columnNames, children, udf, exprDesc, randomRows, rowSource, batchSource, outputTypeInfo, /* skipAdaptor */
    true);
}
Also used : ArrayList(java.util.ArrayList) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) HashSet(java.util.HashSet) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) VectorRandomBatchSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource) FunctionInfo(org.apache.hadoop.hive.ql.exec.FunctionInfo) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) GenerationSpec(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) GenericUDFIn(org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Example 5 with GenericUDFIn

use of org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn in project hive by apache.

the class HiveFunctionHelper method convertInputs.

/**
 * {@inheritDoc}
 */
@Override
public List<RexNode> convertInputs(FunctionInfo fi, List<RexNode> inputs, RelDataType returnType) throws SemanticException {
    // 1) Obtain UDF
    final GenericUDF genericUDF = fi.getGenericUDF();
    final TypeInfo typeInfo = TypeConverter.convert(returnType);
    TypeInfo targetType = null;
    boolean isNumeric = genericUDF instanceof GenericUDFBaseBinary && typeInfo.getCategory() == Category.PRIMITIVE && PrimitiveGrouping.NUMERIC_GROUP == PrimitiveObjectInspectorUtils.getPrimitiveGrouping(((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory());
    boolean isCompare = !isNumeric && genericUDF instanceof GenericUDFBaseCompare;
    boolean isBetween = !isNumeric && genericUDF instanceof GenericUDFBetween;
    boolean isIN = !isNumeric && genericUDF instanceof GenericUDFIn;
    if (isNumeric) {
        targetType = typeInfo;
    } else if (genericUDF instanceof GenericUDFBaseCompare) {
        targetType = FunctionRegistry.getCommonClassForComparison(TypeConverter.convert(inputs.get(0).getType()), TypeConverter.convert(inputs.get(1).getType()));
    } else if (genericUDF instanceof GenericUDFBetween) {
        assert inputs.size() == 4;
        // We skip first child as is not involved (is the revert boolean)
        // The target type needs to account for all 3 operands
        targetType = FunctionRegistry.getCommonClassForComparison(TypeConverter.convert(inputs.get(1).getType()), FunctionRegistry.getCommonClassForComparison(TypeConverter.convert(inputs.get(2).getType()), TypeConverter.convert(inputs.get(3).getType())));
    } else if (genericUDF instanceof GenericUDFIn) {
        // We're only considering the first element of the IN list for the type
        assert inputs.size() > 1;
        targetType = FunctionRegistry.getCommonClassForComparison(TypeConverter.convert(inputs.get(0).getType()), TypeConverter.convert(inputs.get(1).getType()));
    }
    if (targetType != null) {
        List<RexNode> newInputs = new ArrayList<>();
        // Convert inputs if needed
        for (int i = 0; i < inputs.size(); ++i) {
            RexNode input = inputs.get(i);
            TypeInfo inputTypeInfo = TypeConverter.convert(input.getType());
            RexNode tmpExprNode = input;
            if (TypeInfoUtils.isConversionRequiredForComparison(targetType, inputTypeInfo)) {
                if (isIN || isCompare) {
                    // For IN and compare, we will convert requisite children
                    tmpExprNode = convert(targetType, input);
                } else if (isBetween) {
                    // For BETWEEN skip the first child (the revert boolean)
                    if (i > 0) {
                        tmpExprNode = convert(targetType, input);
                    }
                } else if (isNumeric) {
                    // For numeric, we'll do minimum necessary cast - if we cast to the type
                    // of expression, bad things will happen.
                    PrimitiveTypeInfo minArgType = ExprNodeDescUtils.deriveMinArgumentCast(inputTypeInfo, targetType);
                    tmpExprNode = convert(minArgType, input);
                } else {
                    throw new AssertionError("Unexpected " + targetType + " - not a numeric op or compare");
                }
            }
            newInputs.add(tmpExprNode);
        }
        return newInputs;
    }
    return inputs;
}
Also used : GenericUDFBetween(org.apache.hadoop.hive.ql.udf.generic.GenericUDFBetween) GenericUDFBaseBinary(org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseBinary) ArrayList(java.util.ArrayList) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) GenericUDFBaseCompare(org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseCompare) GenericUDFIn(org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn) RexNode(org.apache.calcite.rex.RexNode)

Aggregations

GenericUDFIn (org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn)10 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)8 ArrayList (java.util.ArrayList)7 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)7 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)6 ExprNodeConstantDesc (org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc)6 GenericUDF (org.apache.hadoop.hive.ql.udf.generic.GenericUDF)6 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)6 GenericUDFBaseCompare (org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseCompare)5 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)5 GenericUDFBetween (org.apache.hadoop.hive.ql.udf.generic.GenericUDFBetween)4 RexNode (org.apache.calcite.rex.RexNode)3 StructTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo)3 RelDataType (org.apache.calcite.rel.type.RelDataType)2 RexCall (org.apache.calcite.rex.RexCall)2 SqlOperator (org.apache.calcite.sql.SqlOperator)2 SqlCastFunction (org.apache.calcite.sql.fun.SqlCastFunction)2 DataTypePhysicalVariation (org.apache.hadoop.hive.common.type.DataTypePhysicalVariation)2 VectorRandomBatchSource (org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource)2 VectorRandomRowSource (org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)2