use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.
the class HiveParserUtils method getColAlias.
public static String[] getColAlias(HiveParserASTNode selExpr, String defaultName, HiveParserRowResolver inputRR, boolean includeFuncName, int colNum) {
String colAlias = null;
String tabAlias = null;
String[] colRef = new String[2];
// for queries with a windowing expressions, the selexpr may have a third child
if (selExpr.getChildCount() == 2 || (selExpr.getChildCount() == 3 && selExpr.getChild(2).getType() == HiveASTParser.TOK_WINDOWSPEC)) {
// return zz for "xx + yy AS zz"
colAlias = unescapeIdentifier(selExpr.getChild(1).getText().toLowerCase());
colRef[0] = tabAlias;
colRef[1] = colAlias;
return colRef;
}
HiveParserASTNode root = (HiveParserASTNode) selExpr.getChild(0);
if (root.getType() == HiveASTParser.TOK_TABLE_OR_COL) {
colAlias = unescapeIdentifier(root.getChild(0).getText().toLowerCase());
colRef[0] = tabAlias;
colRef[1] = colAlias;
return colRef;
}
if (root.getType() == HiveASTParser.DOT) {
HiveParserASTNode tab = (HiveParserASTNode) root.getChild(0);
if (tab.getType() == HiveASTParser.TOK_TABLE_OR_COL) {
String t = unescapeIdentifier(tab.getChild(0).getText());
if (inputRR.hasTableAlias(t)) {
tabAlias = t;
}
}
// Return zz for "xx.zz" and "xx.yy.zz"
HiveParserASTNode col = (HiveParserASTNode) root.getChild(1);
if (col.getType() == HiveASTParser.Identifier) {
colAlias = unescapeIdentifier(col.getText().toLowerCase());
}
}
// if specified generate alias using func name
if (includeFuncName && (root.getType() == HiveASTParser.TOK_FUNCTION)) {
String exprFlattened = root.toStringTree();
// remove all TOK tokens
String exprNoTok = exprFlattened.replaceAll("tok_\\S+", "");
// remove all non alphanumeric letters, replace whitespace spans with underscore
String exprFormatted = exprNoTok.replaceAll("\\W", " ").trim().replaceAll("\\s+", "_");
// limit length to 20 chars
if (exprFormatted.length() > HiveParserSemanticAnalyzer.AUTOGEN_COLALIAS_PRFX_MAXLENGTH) {
exprFormatted = exprFormatted.substring(0, HiveParserSemanticAnalyzer.AUTOGEN_COLALIAS_PRFX_MAXLENGTH);
}
// append colnum to make it unique
colAlias = exprFormatted.concat("_" + colNum);
}
if (colAlias == null) {
// Return defaultName if selExpr is not a simple xx.yy.zz
colAlias = defaultName + colNum;
}
colRef[0] = tabAlias;
colRef[1] = colAlias;
return colRef;
}
use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.
the class HiveParserUtils method buildSelExprSubTree.
private static HiveParserASTNode buildSelExprSubTree(String tableAlias, String col) {
HiveParserASTNode selexpr = new HiveParserASTNode(new CommonToken(HiveASTParser.TOK_SELEXPR, "TOK_SELEXPR"));
HiveParserASTNode tableOrCol = new HiveParserASTNode(new CommonToken(HiveASTParser.TOK_TABLE_OR_COL, "TOK_TABLE_OR_COL"));
HiveParserASTNode dot = new HiveParserASTNode(new CommonToken(HiveASTParser.DOT, "."));
tableOrCol.addChild(new HiveParserASTNode(new CommonToken(HiveASTParser.Identifier, tableAlias)));
dot.addChild(tableOrCol);
dot.addChild(new HiveParserASTNode(new CommonToken(HiveASTParser.Identifier, col)));
selexpr.addChild(dot);
return selexpr;
}
use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.
the class HiveParser method processCmd.
private List<Operation> processCmd(String cmd, HiveConf hiveConf, HiveShim hiveShim, HiveCatalog hiveCatalog) {
try {
final HiveParserContext context = new HiveParserContext(hiveConf);
// parse statement to get AST
final HiveParserASTNode node = HiveASTParseUtils.parse(cmd, context);
Operation operation;
if (DDL_NODES.contains(node.getType())) {
HiveParserQueryState queryState = new HiveParserQueryState(hiveConf);
HiveParserDDLSemanticAnalyzer ddlAnalyzer = new HiveParserDDLSemanticAnalyzer(queryState, hiveCatalog, getCatalogManager(), this, hiveShim, context, dmlHelper);
operation = ddlAnalyzer.convertToOperation(node);
return Collections.singletonList(operation);
} else {
final boolean explain = node.getType() == HiveASTParser.TOK_EXPLAIN;
// first child is the underlying explicandum
HiveParserASTNode input = explain ? (HiveParserASTNode) node.getChild(0) : node;
operation = analyzeSql(context, hiveConf, hiveShim, input);
// explain an nop is also considered nop
if (explain && !(operation instanceof NopOperation)) {
operation = new ExplainOperation(operation);
}
}
return Collections.singletonList(operation);
} catch (HiveASTParseException e) {
// ParseException can happen for flink-specific statements, e.g. catalog DDLs
try {
return super.parse(cmd);
} catch (SqlParserException parserException) {
throw new SqlParserException("SQL parse failed", e);
}
} catch (SemanticException e) {
throw new ValidationException("HiveParser failed to parse " + cmd, e);
}
}
use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method convertCreateTable.
private Operation convertCreateTable(HiveParserASTNode ast) throws SemanticException {
String[] qualifiedTabName = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) ast.getChild(0));
String dbDotTab = HiveParserBaseSemanticAnalyzer.getDotName(qualifiedTabName);
String likeTableName;
List<FieldSchema> cols = new ArrayList<>();
List<FieldSchema> partCols = new ArrayList<>();
List<PrimaryKey> primaryKeys = new ArrayList<>();
List<NotNullConstraint> notNulls = new ArrayList<>();
String comment = null;
String location = null;
Map<String, String> tblProps = null;
boolean ifNotExists = false;
boolean isExt = false;
boolean isTemporary = false;
HiveParserASTNode selectStmt = null;
// regular CREATE TABLE
final int createTable = 0;
// CREATE TABLE LIKE ... (CTLT)
final int ctlt = 1;
// CREATE TABLE AS SELECT ... (CTAS)
final int ctas = 2;
int commandType = createTable;
HiveParserBaseSemanticAnalyzer.HiveParserRowFormatParams rowFormatParams = new HiveParserBaseSemanticAnalyzer.HiveParserRowFormatParams();
HiveParserStorageFormat storageFormat = new HiveParserStorageFormat(conf);
LOG.info("Creating table " + dbDotTab + " position=" + ast.getCharPositionInLine());
int numCh = ast.getChildCount();
// 3) CTAS does not support partitioning (for now).
for (int num = 1; num < numCh; num++) {
HiveParserASTNode child = (HiveParserASTNode) ast.getChild(num);
if (storageFormat.fillStorageFormat(child)) {
continue;
}
switch(child.getToken().getType()) {
case HiveASTParser.TOK_IFNOTEXISTS:
ifNotExists = true;
break;
case HiveASTParser.KW_EXTERNAL:
isExt = true;
break;
case HiveASTParser.KW_TEMPORARY:
isTemporary = true;
break;
case HiveASTParser.TOK_LIKETABLE:
if (child.getChildCount() > 0) {
likeTableName = HiveParserBaseSemanticAnalyzer.getUnescapedName((HiveParserASTNode) child.getChild(0));
if (likeTableName != null) {
if (commandType == ctas) {
throw new ValidationException(ErrorMsg.CTAS_CTLT_COEXISTENCE.getMsg());
}
if (cols.size() != 0) {
throw new ValidationException(ErrorMsg.CTLT_COLLST_COEXISTENCE.getMsg());
}
}
commandType = ctlt;
handleUnsupportedOperation("CREATE TABLE LIKE is not supported");
}
break;
case // CTAS
HiveASTParser.TOK_QUERY:
if (commandType == ctlt) {
throw new ValidationException(ErrorMsg.CTAS_CTLT_COEXISTENCE.getMsg());
}
if (cols.size() != 0) {
throw new ValidationException(ErrorMsg.CTAS_COLLST_COEXISTENCE.getMsg());
}
if (partCols.size() != 0) {
throw new ValidationException(ErrorMsg.CTAS_PARCOL_COEXISTENCE.getMsg());
}
if (isExt) {
throw new ValidationException(ErrorMsg.CTAS_EXTTBL_COEXISTENCE.getMsg());
}
commandType = ctas;
selectStmt = child;
break;
case HiveASTParser.TOK_TABCOLLIST:
cols = HiveParserBaseSemanticAnalyzer.getColumns(child, true, primaryKeys, notNulls);
break;
case HiveASTParser.TOK_TABLECOMMENT:
comment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText());
break;
case HiveASTParser.TOK_TABLEPARTCOLS:
partCols = HiveParserBaseSemanticAnalyzer.getColumns((HiveParserASTNode) child.getChild(0), false);
break;
case HiveASTParser.TOK_TABLEROWFORMAT:
rowFormatParams.analyzeRowFormat(child);
break;
case HiveASTParser.TOK_TABLELOCATION:
location = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText());
break;
case HiveASTParser.TOK_TABLEPROPERTIES:
tblProps = getProps((HiveParserASTNode) child.getChild(0));
break;
case HiveASTParser.TOK_TABLESERIALIZER:
child = (HiveParserASTNode) child.getChild(0);
storageFormat.setSerde(HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText()));
if (child.getChildCount() == 2) {
HiveParserBaseSemanticAnalyzer.readProps((HiveParserASTNode) (child.getChild(1).getChild(0)), storageFormat.getSerdeProps());
}
break;
case HiveASTParser.TOK_ALTERTABLE_BUCKETS:
handleUnsupportedOperation("Bucketed table is not supported");
break;
case HiveASTParser.TOK_TABLESKEWED:
handleUnsupportedOperation("Skewed table is not supported");
break;
default:
throw new ValidationException("Unknown AST node for CREATE TABLE: " + child);
}
}
if (storageFormat.getStorageHandler() != null) {
handleUnsupportedOperation("Storage handler table is not supported");
}
if (commandType == createTable || commandType == ctlt) {
queryState.setCommandType(HiveOperation.CREATETABLE);
} else {
queryState.setCommandType(HiveOperation.CREATETABLE_AS_SELECT);
}
storageFormat.fillDefaultStorageFormat(isExt, false);
if (isTemporary) {
if (partCols.size() > 0) {
handleUnsupportedOperation("Partition columns are not supported on temporary tables");
}
handleUnsupportedOperation("Temporary hive table is not supported");
}
// Handle different types of CREATE TABLE command
switch(commandType) {
case // REGULAR CREATE TABLE DDL
createTable:
tblProps = addDefaultProperties(tblProps);
return convertCreateTable(dbDotTab, isExt, ifNotExists, isTemporary, cols, partCols, comment, location, tblProps, rowFormatParams, storageFormat, primaryKeys, notNulls);
case // create table like <tbl_name>
ctlt:
tblProps = addDefaultProperties(tblProps);
throw new SemanticException("CREATE TABLE LIKE is not supported yet");
case // create table as select
ctas:
tblProps = addDefaultProperties(tblProps);
// analyze the query
HiveParserCalcitePlanner calcitePlanner = hiveParser.createCalcitePlanner(context, queryState, hiveShim);
calcitePlanner.setCtasCols(cols);
RelNode queryRelNode = calcitePlanner.genLogicalPlan(selectStmt);
// create a table to represent the dest table
String[] dbTblName = dbDotTab.split("\\.");
Table destTable = new Table(Table.getEmptyTable(dbTblName[0], dbTblName[1]));
destTable.getSd().setCols(cols);
Tuple4<ObjectIdentifier, QueryOperation, Map<String, String>, Boolean> insertOperationInfo = dmlHelper.createInsertOperationInfo(queryRelNode, destTable, Collections.emptyMap(), Collections.emptyList(), false);
CreateTableOperation createTableOperation = convertCreateTable(dbDotTab, isExt, ifNotExists, isTemporary, cols, partCols, comment, location, tblProps, rowFormatParams, storageFormat, primaryKeys, notNulls);
return new CreateTableASOperation(createTableOperation, insertOperationInfo.f2, insertOperationInfo.f1, insertOperationInfo.f3);
default:
throw new ValidationException("Unrecognized command.");
}
}
use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method getPartitionSpecs.
// Get the partition specs from the tree
private List<Map<String, String>> getPartitionSpecs(CommonTree ast) {
List<Map<String, String>> partSpecs = new ArrayList<>();
// get partition metadata if partition specified
for (int childIndex = 0; childIndex < ast.getChildCount(); childIndex++) {
HiveParserASTNode partSpecNode = (HiveParserASTNode) ast.getChild(childIndex);
// sanity check
if (partSpecNode.getType() == HiveASTParser.TOK_PARTSPEC) {
Map<String, String> partSpec = getPartSpec(partSpecNode);
partSpecs.add(partSpec);
}
}
return partSpecs;
}
Aggregations