use of org.apache.flink.table.planner.delegation.hive.parse.HiveParserDDLSemanticAnalyzer in project flink by apache.
the class HiveParser method processCmd.
private List<Operation> processCmd(String cmd, HiveConf hiveConf, HiveShim hiveShim, HiveCatalog hiveCatalog) {
try {
final HiveParserContext context = new HiveParserContext(hiveConf);
// parse statement to get AST
final HiveParserASTNode node = HiveASTParseUtils.parse(cmd, context);
Operation operation;
if (DDL_NODES.contains(node.getType())) {
HiveParserQueryState queryState = new HiveParserQueryState(hiveConf);
HiveParserDDLSemanticAnalyzer ddlAnalyzer = new HiveParserDDLSemanticAnalyzer(queryState, hiveCatalog, getCatalogManager(), this, hiveShim, context, dmlHelper);
operation = ddlAnalyzer.convertToOperation(node);
return Collections.singletonList(operation);
} else {
final boolean explain = node.getType() == HiveASTParser.TOK_EXPLAIN;
// first child is the underlying explicandum
HiveParserASTNode input = explain ? (HiveParserASTNode) node.getChild(0) : node;
operation = analyzeSql(context, hiveConf, hiveShim, input);
// explain an nop is also considered nop
if (explain && !(operation instanceof NopOperation)) {
operation = new ExplainOperation(operation);
}
}
return Collections.singletonList(operation);
} catch (HiveASTParseException e) {
// ParseException can happen for flink-specific statements, e.g. catalog DDLs
try {
return super.parse(cmd);
} catch (SqlParserException parserException) {
throw new SqlParserException("SQL parse failed", e);
}
} catch (SemanticException e) {
throw new ValidationException("HiveParser failed to parse " + cmd, e);
}
}
Aggregations