use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.OrderExpression in project flink by apache.
the class HiveParserBaseSemanticAnalyzer method processOrderSpec.
static OrderSpec processOrderSpec(HiveParserASTNode sortNode) {
OrderSpec oSpec = new OrderSpec();
int exprCnt = sortNode.getChildCount();
for (int i = 0; i < exprCnt; i++) {
OrderExpression exprSpec = new OrderExpression();
HiveParserASTNode orderSpec = (HiveParserASTNode) sortNode.getChild(i);
HiveParserASTNode nullOrderSpec = (HiveParserASTNode) orderSpec.getChild(0);
exprSpec.setExpression((HiveParserASTNode) nullOrderSpec.getChild(0));
if (orderSpec.getType() == HiveASTParser.TOK_TABSORTCOLNAMEASC) {
exprSpec.setOrder(Order.ASC);
} else {
exprSpec.setOrder(Order.DESC);
}
if (nullOrderSpec.getType() == HiveASTParser.TOK_NULLS_FIRST) {
exprSpec.setNullOrder(NullOrder.NULLS_FIRST);
} else {
exprSpec.setNullOrder(NullOrder.NULLS_LAST);
}
oSpec.addExpression(exprSpec);
}
return oSpec;
}
use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.OrderExpression in project flink by apache.
the class HiveParserBaseSemanticAnalyzer method getHiveAggInfo.
public static AggInfo getHiveAggInfo(HiveParserASTNode aggAst, int aggFnLstArgIndx, HiveParserRowResolver inputRR, HiveParserWindowingSpec.WindowFunctionSpec winFuncSpec, HiveParserSemanticAnalyzer semanticAnalyzer, FrameworkConfig frameworkConfig, RelOptCluster cluster) throws SemanticException {
AggInfo aInfo;
// 1 Convert UDAF Params to ExprNodeDesc
ArrayList<ExprNodeDesc> aggParameters = new ArrayList<>();
for (int i = 1; i <= aggFnLstArgIndx; i++) {
HiveParserASTNode paraExpr = (HiveParserASTNode) aggAst.getChild(i);
ExprNodeDesc paraExprNode = semanticAnalyzer.genExprNodeDesc(paraExpr, inputRR);
aggParameters.add(paraExprNode);
}
// 2. Is this distinct UDAF
boolean isDistinct = aggAst.getType() == HiveASTParser.TOK_FUNCTIONDI;
// 3. Determine type of UDAF
TypeInfo udafRetType = null;
// 3.1 Obtain UDAF name
String aggName = unescapeIdentifier(aggAst.getChild(0).getText());
boolean isAllColumns = false;
// 3.2 Rank functions type is 'int'/'double'
if (FunctionRegistry.isRankingFunction(aggName)) {
if (aggName.equalsIgnoreCase("percent_rank")) {
udafRetType = TypeInfoFactory.doubleTypeInfo;
} else {
udafRetType = TypeInfoFactory.intTypeInfo;
}
// set arguments for rank functions
for (OrderExpression orderExpr : winFuncSpec.windowSpec.getOrder().getExpressions()) {
aggParameters.add(semanticAnalyzer.genExprNodeDesc(orderExpr.getExpression(), inputRR));
}
} else {
// 3.3 Try obtaining UDAF evaluators to determine the ret type
try {
isAllColumns = aggAst.getType() == HiveASTParser.TOK_FUNCTIONSTAR;
// 3.3.1 Get UDAF Evaluator
GenericUDAFEvaluator.Mode amode = HiveParserUtils.groupByDescModeToUDAFMode(GroupByDesc.Mode.COMPLETE, isDistinct);
GenericUDAFEvaluator genericUDAFEvaluator;
if (aggName.toLowerCase().equals(FunctionRegistry.LEAD_FUNC_NAME) || aggName.toLowerCase().equals(FunctionRegistry.LAG_FUNC_NAME)) {
ArrayList<ObjectInspector> originalParameterTypeInfos = HiveParserUtils.getWritableObjectInspector(aggParameters);
genericUDAFEvaluator = FunctionRegistry.getGenericWindowingEvaluator(aggName, originalParameterTypeInfos, isDistinct, isAllColumns);
HiveParserBaseSemanticAnalyzer.GenericUDAFInfo udaf = HiveParserUtils.getGenericUDAFInfo(genericUDAFEvaluator, amode, aggParameters);
udafRetType = ((ListTypeInfo) udaf.returnType).getListElementTypeInfo();
} else {
genericUDAFEvaluator = HiveParserUtils.getGenericUDAFEvaluator(aggName, aggParameters, aggAst, isDistinct, isAllColumns, frameworkConfig.getOperatorTable());
// 3.3.2 Get UDAF Info using UDAF Evaluator
HiveParserBaseSemanticAnalyzer.GenericUDAFInfo udaf = HiveParserUtils.getGenericUDAFInfo(genericUDAFEvaluator, amode, aggParameters);
if (HiveParserUtils.pivotResult(aggName)) {
udafRetType = ((ListTypeInfo) udaf.returnType).getListElementTypeInfo();
} else {
udafRetType = udaf.returnType;
}
}
} catch (Exception e) {
LOG.debug("CBO: Couldn't Obtain UDAF evaluators for " + aggName + ", trying to translate to GenericUDF");
}
// 3.4 Try GenericUDF translation
if (udafRetType == null) {
HiveParserTypeCheckCtx tcCtx = new HiveParserTypeCheckCtx(inputRR, frameworkConfig, cluster);
// We allow stateful functions in the SELECT list (but nowhere else)
tcCtx.setAllowStatefulFunctions(true);
tcCtx.setAllowDistinctFunctions(false);
ExprNodeDesc exp = semanticAnalyzer.genExprNodeDesc((HiveParserASTNode) aggAst.getChild(0), inputRR, tcCtx);
udafRetType = exp.getTypeInfo();
}
}
// 4. Construct AggInfo
aInfo = new AggInfo(aggParameters, udafRetType, aggName, isDistinct, isAllColumns, null);
return aInfo;
}
use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.OrderExpression in project flink by apache.
the class HiveParserBaseSemanticAnalyzer method getOrderKeys.
public static List<RexFieldCollation> getOrderKeys(OrderSpec orderSpec, HiveParserRexNodeConverter converter, HiveParserRowResolver inputRR, HiveParserTypeCheckCtx typeCheckCtx, HiveParserSemanticAnalyzer semanticAnalyzer) throws SemanticException {
List<RexFieldCollation> orderKeys = new ArrayList<>();
if (orderSpec != null) {
List<OrderExpression> oExprs = orderSpec.getExpressions();
for (OrderExpression oExpr : oExprs) {
typeCheckCtx.setAllowStatefulFunctions(true);
ExprNodeDesc exp = semanticAnalyzer.genExprNodeDesc(oExpr.getExpression(), inputRR, typeCheckCtx);
RexNode ordExp = converter.convert(exp);
Set<SqlKind> flags = new HashSet<>();
if (oExpr.getOrder() == Order.DESC) {
flags.add(SqlKind.DESCENDING);
}
if (oExpr.getNullOrder() == NullOrder.NULLS_FIRST) {
flags.add(SqlKind.NULLS_FIRST);
} else if (oExpr.getNullOrder() == NullOrder.NULLS_LAST) {
flags.add(SqlKind.NULLS_LAST);
} else {
throw new SemanticException("Unexpected null ordering option: " + oExpr.getNullOrder());
}
orderKeys.add(new RexFieldCollation(ordExp, flags));
}
}
return orderKeys;
}
Aggregations