use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.
the class HiveParserCalcitePlanner method subqueryRestrictionCheck.
private void subqueryRestrictionCheck(HiveParserQB qb, HiveParserASTNode searchCond, RelNode srcRel, boolean forHavingClause, Set<HiveParserASTNode> corrScalarQueries) throws SemanticException {
List<HiveParserASTNode> subQueriesInOriginalTree = HiveParserSubQueryUtils.findSubQueries(searchCond);
HiveParserASTNode clonedSearchCond = (HiveParserASTNode) HiveParserSubQueryUtils.ADAPTOR.dupTree(searchCond);
List<HiveParserASTNode> subQueries = HiveParserSubQueryUtils.findSubQueries(clonedSearchCond);
for (int i = 0; i < subQueriesInOriginalTree.size(); i++) {
int sqIdx = qb.incrNumSubQueryPredicates();
HiveParserASTNode originalSubQueryAST = subQueriesInOriginalTree.get(i);
HiveParserASTNode subQueryAST = subQueries.get(i);
// HiveParserSubQueryUtils.rewriteParentQueryWhere(clonedSearchCond, subQueryAST);
ObjectPair<Boolean, Integer> subqInfo = new ObjectPair<>(false, 0);
if (!topLevelConjunctCheck(clonedSearchCond, subqInfo)) {
// Restriction.7.h :: SubQuery predicates can appear only as top level conjuncts.
throw new SemanticException(HiveParserErrorMsg.getMsg(ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION, subQueryAST, "Only SubQuery expressions that are top level conjuncts are allowed"));
}
HiveParserASTNode outerQueryExpr = (HiveParserASTNode) subQueryAST.getChild(2);
if (outerQueryExpr != null && outerQueryExpr.getType() == HiveASTParser.TOK_SUBQUERY_EXPR) {
throw new SemanticException(HiveParserErrorMsg.getMsg(ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION, outerQueryExpr, "IN/NOT IN subqueries are not allowed in LHS"));
}
HiveParserQBSubQuery subQuery = HiveParserSubQueryUtils.buildSubQuery(sqIdx, subQueryAST, originalSubQueryAST, semanticAnalyzer.ctx, frameworkConfig, cluster);
HiveParserRowResolver inputRR = relToRowResolver.get(srcRel);
String havingInputAlias = null;
boolean isCorrScalarWithAgg = subQuery.subqueryRestrictionsCheck(inputRR, forHavingClause, havingInputAlias);
if (isCorrScalarWithAgg) {
corrScalarQueries.add(originalSubQueryAST);
}
}
}
use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.
the class HiveParserCalcitePlanner method genJoinLogicalPlan.
// Generate Join Logical Plan Relnode by walking through the join AST.
private RelNode genJoinLogicalPlan(HiveParserASTNode joinParseTree, Map<String, RelNode> aliasToRel) throws SemanticException {
RelNode leftRel = null;
RelNode rightRel = null;
JoinType hiveJoinType;
if (joinParseTree.getToken().getType() == HiveASTParser.TOK_UNIQUEJOIN) {
String msg = "UNIQUE JOIN is currently not supported in CBO, turn off cbo to use UNIQUE JOIN.";
throw new SemanticException(msg);
}
// 1. Determine Join Type
switch(joinParseTree.getToken().getType()) {
case HiveASTParser.TOK_LEFTOUTERJOIN:
hiveJoinType = JoinType.LEFTOUTER;
break;
case HiveASTParser.TOK_RIGHTOUTERJOIN:
hiveJoinType = JoinType.RIGHTOUTER;
break;
case HiveASTParser.TOK_FULLOUTERJOIN:
hiveJoinType = JoinType.FULLOUTER;
break;
case HiveASTParser.TOK_LEFTSEMIJOIN:
hiveJoinType = JoinType.LEFTSEMI;
break;
default:
hiveJoinType = JoinType.INNER;
break;
}
// 2. Get Left Table Alias
HiveParserASTNode left = (HiveParserASTNode) joinParseTree.getChild(0);
String leftTableAlias = null;
if (left.getToken().getType() == HiveASTParser.TOK_TABREF || (left.getToken().getType() == HiveASTParser.TOK_SUBQUERY) || (left.getToken().getType() == HiveASTParser.TOK_PTBLFUNCTION)) {
String tableName = HiveParserBaseSemanticAnalyzer.getUnescapedUnqualifiedTableName((HiveParserASTNode) left.getChild(0)).toLowerCase();
leftTableAlias = left.getChildCount() == 1 ? tableName : unescapeIdentifier(left.getChild(left.getChildCount() - 1).getText().toLowerCase());
leftTableAlias = left.getToken().getType() == HiveASTParser.TOK_PTBLFUNCTION ? unescapeIdentifier(left.getChild(1).getText().toLowerCase()) : leftTableAlias;
leftRel = aliasToRel.get(leftTableAlias);
} else if (HiveParserUtils.isJoinToken(left)) {
leftRel = genJoinLogicalPlan(left, aliasToRel);
} else {
assert (false);
}
// 3. Get Right Table Alias
HiveParserASTNode right = (HiveParserASTNode) joinParseTree.getChild(1);
String rightTableAlias = null;
if (right.getToken().getType() == HiveASTParser.TOK_TABREF || right.getToken().getType() == HiveASTParser.TOK_SUBQUERY || right.getToken().getType() == HiveASTParser.TOK_PTBLFUNCTION) {
String tableName = HiveParserBaseSemanticAnalyzer.getUnescapedUnqualifiedTableName((HiveParserASTNode) right.getChild(0)).toLowerCase();
rightTableAlias = right.getChildCount() == 1 ? tableName : unescapeIdentifier(right.getChild(right.getChildCount() - 1).getText().toLowerCase());
rightTableAlias = right.getToken().getType() == HiveASTParser.TOK_PTBLFUNCTION ? unescapeIdentifier(right.getChild(1).getText().toLowerCase()) : rightTableAlias;
rightRel = aliasToRel.get(rightTableAlias);
} else {
assert (false);
}
// 4. Get Join Condn
HiveParserASTNode joinCond = (HiveParserASTNode) joinParseTree.getChild(2);
// 5. Create Join rel
return genJoinRelNode(leftRel, leftTableAlias, rightRel, rightTableAlias, hiveJoinType, joinCond);
}
use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.
the class HiveParserUtils method rewriteGroupingFunctionAST.
public static HiveParserASTNode rewriteGroupingFunctionAST(final List<HiveParserASTNode> grpByAstExprs, HiveParserASTNode targetNode, final boolean noneSet) throws SemanticException {
final MutableBoolean visited = new MutableBoolean(false);
final MutableBoolean found = new MutableBoolean(false);
final boolean legacyGrouping = legacyGrouping();
TreeVisitorAction action = new TreeVisitorAction() {
@Override
public Object pre(Object t) {
return t;
}
@Override
public Object post(Object t) {
HiveParserASTNode current = (HiveParserASTNode) t;
// rewrite grouping function
if (current.getType() == HiveASTParser.TOK_FUNCTION && current.getChildCount() >= 2) {
HiveParserASTNode func = (HiveParserASTNode) current.getChild(0);
if (func.getText().equals("grouping")) {
visited.setValue(true);
convertGrouping(current, grpByAstExprs, noneSet, legacyGrouping, found);
}
} else if (legacyGrouping && current.getType() == HiveASTParser.TOK_TABLE_OR_COL && current.getChildCount() == 1) {
// rewrite grouping__id
HiveParserASTNode child = (HiveParserASTNode) current.getChild(0);
if (child.getText().equalsIgnoreCase(VirtualColumn.GROUPINGID.getName())) {
return convertToLegacyGroupingId(current, grpByAstExprs.size());
}
}
return t;
}
};
HiveParserASTNode newTargetNode = (HiveParserASTNode) new TreeVisitor(HiveASTParseDriver.ADAPTOR).visit(targetNode, action);
if (visited.booleanValue() && !found.booleanValue()) {
throw new SemanticException("Expression in GROUPING function not present in GROUP BY");
}
return newTargetNode;
}
use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.
the class HiveParserUtils method genSelectDIAST.
public static HiveParserASTNode genSelectDIAST(HiveParserRowResolver rr) {
LinkedHashMap<String, LinkedHashMap<String, ColumnInfo>> map = rr.getRslvMap();
HiveParserASTNode selectDI = new HiveParserASTNode(new CommonToken(HiveASTParser.TOK_SELECTDI, "TOK_SELECTDI"));
// table, is deterministic, but undefined - RR stores them in the order of addition.
for (String tabAlias : map.keySet()) {
for (Map.Entry<String, ColumnInfo> entry : map.get(tabAlias).entrySet()) {
selectDI.addChild(buildSelExprSubTree(tabAlias, entry.getKey()));
}
}
return selectDI;
}
use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.
the class HiveParserUtils method extractLateralViewInfo.
// extracts useful information for a given lateral view node
public static LateralViewInfo extractLateralViewInfo(HiveParserASTNode lateralView, HiveParserRowResolver inputRR, HiveParserSemanticAnalyzer hiveAnalyzer, FrameworkConfig frameworkConfig, RelOptCluster cluster) throws SemanticException {
// checks the left sub-tree
HiveParserASTNode sel = (HiveParserASTNode) lateralView.getChild(0);
Preconditions.checkArgument(sel.getToken().getType() == HiveASTParser.TOK_SELECT);
Preconditions.checkArgument(sel.getChildCount() == 1);
HiveParserASTNode selExpr = (HiveParserASTNode) sel.getChild(0);
Preconditions.checkArgument(selExpr.getToken().getType() == HiveASTParser.TOK_SELEXPR);
// decide function name and function
HiveParserASTNode func = (HiveParserASTNode) selExpr.getChild(0);
Preconditions.checkArgument(func.getToken().getType() == HiveASTParser.TOK_FUNCTION);
String funcName = getFunctionText(func, true);
SqlOperator sqlOperator = getSqlOperator(funcName, frameworkConfig.getOperatorTable(), SqlFunctionCategory.USER_DEFINED_TABLE_FUNCTION);
Preconditions.checkArgument(isUDTF(sqlOperator), funcName + " is not a valid UDTF");
// decide operands
List<ExprNodeDesc> operands = new ArrayList<>(func.getChildCount() - 1);
List<ColumnInfo> operandColInfos = new ArrayList<>(func.getChildCount() - 1);
HiveParserTypeCheckCtx typeCheckCtx = new HiveParserTypeCheckCtx(inputRR, frameworkConfig, cluster);
for (int i = 1; i < func.getChildCount(); i++) {
ExprNodeDesc exprDesc = hiveAnalyzer.genExprNodeDesc((HiveParserASTNode) func.getChild(i), inputRR, typeCheckCtx);
operands.add(exprDesc);
operandColInfos.add(new ColumnInfo(getColumnInternalName(i - 1), exprDesc.getWritableObjectInspector(), null, false));
}
// decide table alias -- there must be a table alias
HiveParserASTNode tabAliasNode = (HiveParserASTNode) selExpr.getChild(selExpr.getChildCount() - 1);
Preconditions.checkArgument(tabAliasNode.getToken().getType() == HiveASTParser.TOK_TABALIAS);
String tabAlias = unescapeIdentifier(tabAliasNode.getChild(0).getText().toLowerCase());
// decide column aliases -- column aliases are optional
List<String> colAliases = new ArrayList<>();
for (int i = 1; i < selExpr.getChildCount() - 1; i++) {
HiveParserASTNode child = (HiveParserASTNode) selExpr.getChild(i);
Preconditions.checkArgument(child.getToken().getType() == HiveASTParser.Identifier);
colAliases.add(unescapeIdentifier(child.getText().toLowerCase()));
}
return new LateralViewInfo(funcName, sqlOperator, operands, operandColInfos, colAliases, tabAlias);
}
Aggregations