use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.
the class HiveParserCalcitePlanner method genLogicalPlan.
// Given an AST, generate and return the RelNode plan. Returns null if nothing needs to be done.
public RelNode genLogicalPlan(HiveParserASTNode ast) throws SemanticException {
LOG.info("Starting generating logical plan");
HiveParserPreCboCtx cboCtx = new HiveParserPreCboCtx();
// change the location of position alias process here
processPositionAlias(ast, semanticAnalyzer.getConf());
if (!semanticAnalyzer.genResolvedParseTree(ast, cboCtx)) {
return null;
}
// plan
for (String alias : semanticAnalyzer.getQB().getSubqAliases()) {
removeOBInSubQuery(semanticAnalyzer.getQB().getSubqForAlias(alias));
}
HiveParserASTNode queryForCbo = ast;
if (cboCtx.type == HiveParserPreCboCtx.Type.CTAS || cboCtx.type == HiveParserPreCboCtx.Type.VIEW) {
// nodeOfInterest is the query
queryForCbo = cboCtx.nodeOfInterest;
}
verifyCanHandleAst(queryForCbo, getQB(), semanticAnalyzer.getQueryProperties());
semanticAnalyzer.disableJoinMerge = true;
return logicalPlan();
}
use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.
the class HiveParserCalcitePlanner method genFilterRelNode.
private RelNode genFilterRelNode(HiveParserQB qb, HiveParserASTNode searchCond, RelNode srcRel, Map<String, Integer> outerNameToPosMap, HiveParserRowResolver outerRR, boolean forHavingClause) throws SemanticException {
Map<HiveParserASTNode, RelNode> subQueryToRelNode = new HashMap<>();
boolean isSubQuery = genSubQueryRelNode(qb, searchCond, srcRel, forHavingClause, subQueryToRelNode);
if (isSubQuery) {
ExprNodeDesc subQueryExpr = semanticAnalyzer.genExprNodeDesc(searchCond, relToRowResolver.get(srcRel), outerRR, subQueryToRelNode, forHavingClause);
Map<String, Integer> hiveColNameToCalcitePos = relToHiveColNameCalcitePosMap.get(srcRel);
RexNode convertedFilterLHS = new HiveParserRexNodeConverter(cluster, srcRel.getRowType(), outerNameToPosMap, hiveColNameToCalcitePos, relToRowResolver.get(srcRel), outerRR, 0, true, subqueryId, funcConverter).convert(subQueryExpr).accept(funcConverter);
RelNode filterRel = LogicalFilter.create(srcRel, convertedFilterLHS);
relToHiveColNameCalcitePosMap.put(filterRel, relToHiveColNameCalcitePosMap.get(srcRel));
relToRowResolver.put(filterRel, relToRowResolver.get(srcRel));
subqueryId++;
return filterRel;
} else {
return genFilterRelNode(searchCond, srcRel, outerNameToPosMap, outerRR, forHavingClause);
}
}
use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.
the class HiveParserCalcitePlanner method genSubQueryRelNode.
private boolean genSubQueryRelNode(HiveParserQB qb, HiveParserASTNode node, RelNode srcRel, boolean forHavingClause, Map<HiveParserASTNode, RelNode> subQueryToRelNode) throws SemanticException {
Set<HiveParserASTNode> corrScalarQueriesWithAgg = new HashSet<>();
// disallow sub-queries which HIVE doesn't currently support
subqueryRestrictionCheck(qb, node, srcRel, forHavingClause, corrScalarQueriesWithAgg);
Deque<HiveParserASTNode> stack = new ArrayDeque<>();
stack.push(node);
boolean isSubQuery = false;
while (!stack.isEmpty()) {
HiveParserASTNode next = stack.pop();
switch(next.getType()) {
case HiveASTParser.TOK_SUBQUERY_EXPR:
// Restriction 2.h Subquery is not allowed in LHS
if (next.getChildren().size() == 3 && next.getChild(2).getType() == HiveASTParser.TOK_SUBQUERY_EXPR) {
throw new SemanticException(HiveParserErrorMsg.getMsg(ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION, next.getChild(2), "SubQuery in LHS expressions are not supported."));
}
String sbQueryAlias = "sq_" + qb.incrNumSubQueryPredicates();
HiveParserQB subQB = new HiveParserQB(qb.getId(), sbQueryAlias, true);
HiveParserBaseSemanticAnalyzer.Phase1Ctx ctx1 = initPhase1Ctx();
semanticAnalyzer.doPhase1((HiveParserASTNode) next.getChild(1), subQB, ctx1, null);
semanticAnalyzer.getMetaData(subQB);
RelNode subQueryRelNode = genLogicalPlan(subQB, false, relToHiveColNameCalcitePosMap.get(srcRel), relToRowResolver.get(srcRel));
subQueryToRelNode.put(next, subQueryRelNode);
isSubQuery = true;
break;
default:
int childCount = next.getChildCount();
for (int i = childCount - 1; i >= 0; i--) {
stack.push((HiveParserASTNode) next.getChild(i));
}
}
}
return isSubQuery;
}
use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method convertAlterTableDropParts.
private Operation convertAlterTableDropParts(String[] qualified, HiveParserASTNode ast) {
boolean ifExists = ast.getFirstChildWithType(HiveASTParser.TOK_IFEXISTS) != null;
// If the drop has to fail on non-existent partitions, we cannot batch expressions.
// That is because we actually have to check each separate expression for existence.
// We could do a small optimization for the case where expr has all columns and all
// operators are equality, if we assume those would always match one partition (which
// may not be true with legacy, non-normalized column values). This is probably a
// popular case but that's kinda hacky. Let's not do it for now.
Table tab = getTable(new ObjectPath(qualified[0], qualified[1]));
// hive represents drop partition specs with generic func desc, but what we need is just
// spec maps
List<Map<String, String>> partSpecs = new ArrayList<>();
for (int i = 0; i < ast.getChildCount(); i++) {
HiveParserASTNode child = (HiveParserASTNode) ast.getChild(i);
if (child.getType() == HiveASTParser.TOK_PARTSPEC) {
partSpecs.add(getPartSpec(child));
}
}
validateAlterTableType(tab);
ObjectIdentifier tableIdentifier = catalogManager.qualifyIdentifier(UnresolvedIdentifier.of(qualified[0], qualified[1]));
List<CatalogPartitionSpec> specs = partSpecs.stream().map(CatalogPartitionSpec::new).collect(Collectors.toList());
return new DropPartitionsOperation(tableIdentifier, ifExists, specs);
}
use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method convertAlterTable.
private Operation convertAlterTable(HiveParserASTNode input) throws SemanticException {
Operation operation = null;
HiveParserASTNode ast = (HiveParserASTNode) input.getChild(1);
String[] qualified = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) input.getChild(0));
String tableName = HiveParserBaseSemanticAnalyzer.getDotName(qualified);
HashMap<String, String> partSpec = null;
HiveParserASTNode partSpecNode = (HiveParserASTNode) input.getChild(2);
if (partSpecNode != null) {
partSpec = getPartSpec(partSpecNode);
}
CatalogBaseTable alteredTable = getAlteredTable(tableName, false);
switch(ast.getType()) {
case HiveASTParser.TOK_ALTERTABLE_RENAME:
operation = convertAlterTableRename(tableName, ast, false);
break;
case HiveASTParser.TOK_ALTERTABLE_ADDCOLS:
operation = convertAlterTableModifyCols(alteredTable, tableName, ast, false);
break;
case HiveASTParser.TOK_ALTERTABLE_REPLACECOLS:
operation = convertAlterTableModifyCols(alteredTable, tableName, ast, true);
break;
case HiveASTParser.TOK_ALTERTABLE_RENAMECOL:
operation = convertAlterTableChangeCol(alteredTable, qualified, ast);
break;
case HiveASTParser.TOK_ALTERTABLE_ADDPARTS:
operation = convertAlterTableAddParts(qualified, ast);
break;
case HiveASTParser.TOK_ALTERTABLE_DROPPARTS:
operation = convertAlterTableDropParts(qualified, ast);
break;
case HiveASTParser.TOK_ALTERTABLE_PROPERTIES:
operation = convertAlterTableProps(alteredTable, tableName, null, ast, false, false);
break;
case HiveASTParser.TOK_ALTERTABLE_DROPPROPERTIES:
operation = convertAlterTableProps(alteredTable, tableName, null, ast, false, true);
break;
case HiveASTParser.TOK_ALTERTABLE_UPDATESTATS:
operation = convertAlterTableProps(alteredTable, tableName, partSpec, ast, false, false);
break;
case HiveASTParser.TOK_ALTERTABLE_FILEFORMAT:
operation = convertAlterTableFileFormat(alteredTable, ast, tableName, partSpec);
break;
case HiveASTParser.TOK_ALTERTABLE_LOCATION:
operation = convertAlterTableLocation(alteredTable, ast, tableName, partSpec);
break;
case HiveASTParser.TOK_ALTERTABLE_SERIALIZER:
operation = convertAlterTableSerde(alteredTable, ast, tableName, partSpec);
break;
case HiveASTParser.TOK_ALTERTABLE_SERDEPROPERTIES:
operation = convertAlterTableSerdeProps(alteredTable, ast, tableName, partSpec);
break;
case HiveASTParser.TOK_ALTERTABLE_TOUCH:
case HiveASTParser.TOK_ALTERTABLE_ARCHIVE:
case HiveASTParser.TOK_ALTERTABLE_UNARCHIVE:
case HiveASTParser.TOK_ALTERTABLE_PARTCOLTYPE:
case HiveASTParser.TOK_ALTERTABLE_SKEWED:
case HiveASTParser.TOK_ALTERTABLE_EXCHANGEPARTITION:
case HiveASTParser.TOK_ALTERTABLE_MERGEFILES:
case HiveASTParser.TOK_ALTERTABLE_RENAMEPART:
case HiveASTParser.TOK_ALTERTABLE_SKEWED_LOCATION:
case HiveASTParser.TOK_ALTERTABLE_BUCKETS:
case HiveASTParser.TOK_ALTERTABLE_CLUSTER_SORT:
case HiveASTParser.TOK_ALTERTABLE_COMPACT:
case HiveASTParser.TOK_ALTERTABLE_UPDATECOLSTATS:
case HiveASTParser.TOK_ALTERTABLE_DROPCONSTRAINT:
case HiveASTParser.TOK_ALTERTABLE_ADDCONSTRAINT:
handleUnsupportedOperation(ast);
break;
default:
throw new ValidationException("Unknown AST node for ALTER TABLE: " + ast);
}
return operation;
}
Aggregations