Search in sources :

Example 1 with OrderExpression

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.OrderExpression in project flink by apache.

the class HiveParserBaseSemanticAnalyzer method processOrderSpec.

static OrderSpec processOrderSpec(HiveParserASTNode sortNode) {
    OrderSpec oSpec = new OrderSpec();
    int exprCnt = sortNode.getChildCount();
    for (int i = 0; i < exprCnt; i++) {
        OrderExpression exprSpec = new OrderExpression();
        HiveParserASTNode orderSpec = (HiveParserASTNode) sortNode.getChild(i);
        HiveParserASTNode nullOrderSpec = (HiveParserASTNode) orderSpec.getChild(0);
        exprSpec.setExpression((HiveParserASTNode) nullOrderSpec.getChild(0));
        if (orderSpec.getType() == HiveASTParser.TOK_TABSORTCOLNAMEASC) {
            exprSpec.setOrder(Order.ASC);
        } else {
            exprSpec.setOrder(Order.DESC);
        }
        if (nullOrderSpec.getType() == HiveASTParser.TOK_NULLS_FIRST) {
            exprSpec.setNullOrder(NullOrder.NULLS_FIRST);
        } else {
            exprSpec.setNullOrder(NullOrder.NULLS_LAST);
        }
        oSpec.addExpression(exprSpec);
    }
    return oSpec;
}
Also used : OrderSpec(org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.OrderSpec) OrderExpression(org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.OrderExpression)

Example 2 with OrderExpression

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.OrderExpression in project flink by apache.

the class HiveParserBaseSemanticAnalyzer method getHiveAggInfo.

public static AggInfo getHiveAggInfo(HiveParserASTNode aggAst, int aggFnLstArgIndx, HiveParserRowResolver inputRR, HiveParserWindowingSpec.WindowFunctionSpec winFuncSpec, HiveParserSemanticAnalyzer semanticAnalyzer, FrameworkConfig frameworkConfig, RelOptCluster cluster) throws SemanticException {
    AggInfo aInfo;
    // 1 Convert UDAF Params to ExprNodeDesc
    ArrayList<ExprNodeDesc> aggParameters = new ArrayList<>();
    for (int i = 1; i <= aggFnLstArgIndx; i++) {
        HiveParserASTNode paraExpr = (HiveParserASTNode) aggAst.getChild(i);
        ExprNodeDesc paraExprNode = semanticAnalyzer.genExprNodeDesc(paraExpr, inputRR);
        aggParameters.add(paraExprNode);
    }
    // 2. Is this distinct UDAF
    boolean isDistinct = aggAst.getType() == HiveASTParser.TOK_FUNCTIONDI;
    // 3. Determine type of UDAF
    TypeInfo udafRetType = null;
    // 3.1 Obtain UDAF name
    String aggName = unescapeIdentifier(aggAst.getChild(0).getText());
    boolean isAllColumns = false;
    // 3.2 Rank functions type is 'int'/'double'
    if (FunctionRegistry.isRankingFunction(aggName)) {
        if (aggName.equalsIgnoreCase("percent_rank")) {
            udafRetType = TypeInfoFactory.doubleTypeInfo;
        } else {
            udafRetType = TypeInfoFactory.intTypeInfo;
        }
        // set arguments for rank functions
        for (OrderExpression orderExpr : winFuncSpec.windowSpec.getOrder().getExpressions()) {
            aggParameters.add(semanticAnalyzer.genExprNodeDesc(orderExpr.getExpression(), inputRR));
        }
    } else {
        // 3.3 Try obtaining UDAF evaluators to determine the ret type
        try {
            isAllColumns = aggAst.getType() == HiveASTParser.TOK_FUNCTIONSTAR;
            // 3.3.1 Get UDAF Evaluator
            GenericUDAFEvaluator.Mode amode = HiveParserUtils.groupByDescModeToUDAFMode(GroupByDesc.Mode.COMPLETE, isDistinct);
            GenericUDAFEvaluator genericUDAFEvaluator;
            if (aggName.toLowerCase().equals(FunctionRegistry.LEAD_FUNC_NAME) || aggName.toLowerCase().equals(FunctionRegistry.LAG_FUNC_NAME)) {
                ArrayList<ObjectInspector> originalParameterTypeInfos = HiveParserUtils.getWritableObjectInspector(aggParameters);
                genericUDAFEvaluator = FunctionRegistry.getGenericWindowingEvaluator(aggName, originalParameterTypeInfos, isDistinct, isAllColumns);
                HiveParserBaseSemanticAnalyzer.GenericUDAFInfo udaf = HiveParserUtils.getGenericUDAFInfo(genericUDAFEvaluator, amode, aggParameters);
                udafRetType = ((ListTypeInfo) udaf.returnType).getListElementTypeInfo();
            } else {
                genericUDAFEvaluator = HiveParserUtils.getGenericUDAFEvaluator(aggName, aggParameters, aggAst, isDistinct, isAllColumns, frameworkConfig.getOperatorTable());
                // 3.3.2 Get UDAF Info using UDAF Evaluator
                HiveParserBaseSemanticAnalyzer.GenericUDAFInfo udaf = HiveParserUtils.getGenericUDAFInfo(genericUDAFEvaluator, amode, aggParameters);
                if (HiveParserUtils.pivotResult(aggName)) {
                    udafRetType = ((ListTypeInfo) udaf.returnType).getListElementTypeInfo();
                } else {
                    udafRetType = udaf.returnType;
                }
            }
        } catch (Exception e) {
            LOG.debug("CBO: Couldn't Obtain UDAF evaluators for " + aggName + ", trying to translate to GenericUDF");
        }
        // 3.4 Try GenericUDF translation
        if (udafRetType == null) {
            HiveParserTypeCheckCtx tcCtx = new HiveParserTypeCheckCtx(inputRR, frameworkConfig, cluster);
            // We allow stateful functions in the SELECT list (but nowhere else)
            tcCtx.setAllowStatefulFunctions(true);
            tcCtx.setAllowDistinctFunctions(false);
            ExprNodeDesc exp = semanticAnalyzer.genExprNodeDesc((HiveParserASTNode) aggAst.getChild(0), inputRR, tcCtx);
            udafRetType = exp.getTypeInfo();
        }
    }
    // 4. Construct AggInfo
    aInfo = new AggInfo(aggParameters, udafRetType, aggName, isDistinct, isAllColumns, null);
    return aInfo;
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) OrderExpression(org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.OrderExpression) GenericUDAFEvaluator(org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator) ArrayList(java.util.ArrayList) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) UnsupportedEncodingException(java.io.UnsupportedEncodingException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) InvalidTableException(org.apache.hadoop.hive.ql.metadata.InvalidTableException) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc)

Example 3 with OrderExpression

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.OrderExpression in project flink by apache.

the class HiveParserBaseSemanticAnalyzer method getOrderKeys.

public static List<RexFieldCollation> getOrderKeys(OrderSpec orderSpec, HiveParserRexNodeConverter converter, HiveParserRowResolver inputRR, HiveParserTypeCheckCtx typeCheckCtx, HiveParserSemanticAnalyzer semanticAnalyzer) throws SemanticException {
    List<RexFieldCollation> orderKeys = new ArrayList<>();
    if (orderSpec != null) {
        List<OrderExpression> oExprs = orderSpec.getExpressions();
        for (OrderExpression oExpr : oExprs) {
            typeCheckCtx.setAllowStatefulFunctions(true);
            ExprNodeDesc exp = semanticAnalyzer.genExprNodeDesc(oExpr.getExpression(), inputRR, typeCheckCtx);
            RexNode ordExp = converter.convert(exp);
            Set<SqlKind> flags = new HashSet<>();
            if (oExpr.getOrder() == Order.DESC) {
                flags.add(SqlKind.DESCENDING);
            }
            if (oExpr.getNullOrder() == NullOrder.NULLS_FIRST) {
                flags.add(SqlKind.NULLS_FIRST);
            } else if (oExpr.getNullOrder() == NullOrder.NULLS_LAST) {
                flags.add(SqlKind.NULLS_LAST);
            } else {
                throw new SemanticException("Unexpected null ordering option: " + oExpr.getNullOrder());
            }
            orderKeys.add(new RexFieldCollation(ordExp, flags));
        }
    }
    return orderKeys;
}
Also used : OrderExpression(org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.OrderExpression) ArrayList(java.util.ArrayList) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) SqlKind(org.apache.calcite.sql.SqlKind) RexFieldCollation(org.apache.calcite.rex.RexFieldCollation) RexNode(org.apache.calcite.rex.RexNode) HashSet(java.util.HashSet) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Aggregations

OrderExpression (org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.OrderExpression)3 ArrayList (java.util.ArrayList)2 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)2 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)2 UnsupportedEncodingException (java.io.UnsupportedEncodingException)1 HashSet (java.util.HashSet)1 RexFieldCollation (org.apache.calcite.rex.RexFieldCollation)1 RexNode (org.apache.calcite.rex.RexNode)1 SqlKind (org.apache.calcite.sql.SqlKind)1 OrderSpec (org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.OrderSpec)1 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)1 InvalidTableException (org.apache.hadoop.hive.ql.metadata.InvalidTableException)1 GenericUDAFEvaluator (org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator)1 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)1 ListTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo)1 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)1 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)1