use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method convertAlterTableChangeCol.
private Operation convertAlterTableChangeCol(CatalogBaseTable alteredTable, String[] qualified, HiveParserASTNode ast) throws SemanticException {
String newComment = null;
boolean first = false;
String flagCol = null;
boolean isCascade = false;
// col_old_name col_new_name column_type [COMMENT col_comment] [FIRST|AFTER column_name]
// [CASCADE|RESTRICT]
String oldColName = ast.getChild(0).getText();
String newColName = ast.getChild(1).getText();
String newType = HiveParserBaseSemanticAnalyzer.getTypeStringFromAST((HiveParserASTNode) ast.getChild(2));
int childCount = ast.getChildCount();
for (int i = 3; i < childCount; i++) {
HiveParserASTNode child = (HiveParserASTNode) ast.getChild(i);
switch(child.getToken().getType()) {
case HiveASTParser.StringLiteral:
newComment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getText());
break;
case HiveASTParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
flagCol = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText());
break;
case HiveASTParser.KW_FIRST:
first = true;
break;
case HiveASTParser.TOK_CASCADE:
isCascade = true;
break;
case HiveASTParser.TOK_RESTRICT:
break;
default:
throw new ValidationException("Unsupported token: " + child.getToken() + " for alter table");
}
}
// Validate the operation of renaming a column name.
Table tab = getTable(new ObjectPath(qualified[0], qualified[1]));
SkewedInfo skewInfo = tab.getTTable().getSd().getSkewedInfo();
if ((null != skewInfo) && (null != skewInfo.getSkewedColNames()) && skewInfo.getSkewedColNames().contains(oldColName)) {
throw new ValidationException(oldColName + ErrorMsg.ALTER_TABLE_NOT_ALLOWED_RENAME_SKEWED_COLUMN.getMsg());
}
String tblName = HiveParserBaseSemanticAnalyzer.getDotName(qualified);
ObjectIdentifier tableIdentifier = parseObjectIdentifier(tblName);
CatalogTable oldTable = (CatalogTable) alteredTable;
String oldName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(oldColName);
String newName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(newColName);
if (oldTable.getPartitionKeys().contains(oldName)) {
// disallow changing partition columns
throw new ValidationException("CHANGE COLUMN cannot be applied to partition columns");
}
TableSchema oldSchema = oldTable.getSchema();
TableColumn newTableColumn = TableColumn.physical(newName, HiveTypeUtil.toFlinkType(TypeInfoUtils.getTypeInfoFromTypeString(newType)));
TableSchema newSchema = OperationConverterUtils.changeColumn(oldSchema, oldName, newTableColumn, first, flagCol);
Map<String, String> props = new HashMap<>(oldTable.getOptions());
props.put(ALTER_TABLE_OP, ALTER_COLUMNS.name());
if (isCascade) {
props.put(ALTER_COL_CASCADE, "true");
}
return new AlterTableSchemaOperation(tableIdentifier, new CatalogTableImpl(newSchema, oldTable.getPartitionKeys(), props, oldTable.getComment()));
}
use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method convertDescribeTable.
/**
* A query like this will generate a tree as follows "describe formatted default.maptable
* partition (b=100) id;" TOK_TABTYPE TOK_TABNAME --> root for tablename, 2 child nodes mean DB
* specified default maptable TOK_PARTSPEC --> root node for partition spec. else columnName
* TOK_PARTVAL b 100 id --> root node for columnName formatted
*/
private Operation convertDescribeTable(HiveParserASTNode ast) {
HiveParserASTNode tableTypeExpr = (HiveParserASTNode) ast.getChild(0);
String dbName = null;
String tableName;
String colPath;
Map<String, String> partSpec;
HiveParserASTNode tableNode;
// tablename is either TABLENAME or DBNAME.TABLENAME if db is given
if (tableTypeExpr.getChild(0).getType() == HiveASTParser.TOK_TABNAME) {
tableNode = (HiveParserASTNode) tableTypeExpr.getChild(0);
if (tableNode.getChildCount() == 1) {
tableName = tableNode.getChild(0).getText();
} else {
dbName = tableNode.getChild(0).getText();
tableName = dbName + "." + tableNode.getChild(1).getText();
}
} else {
throw new ValidationException(tableTypeExpr.getChild(0).getText() + " is not an expected token type");
}
// process the second child,if exists, node to get partition spec(s)
partSpec = QualifiedNameUtil.getPartitionSpec(tableTypeExpr);
// process the third child node,if exists, to get partition spec(s)
colPath = QualifiedNameUtil.getColPath(tableTypeExpr, dbName, tableName, partSpec);
if (partSpec != null) {
handleUnsupportedOperation("DESCRIBE PARTITION is not supported");
}
if (!colPath.equals(tableName)) {
handleUnsupportedOperation("DESCRIBE COLUMNS is not supported");
}
boolean isExt = false;
boolean isFormatted = false;
if (ast.getChildCount() == 2) {
int descOptions = ast.getChild(1).getType();
isExt = descOptions == HiveASTParser.KW_EXTENDED;
isFormatted = descOptions == HiveASTParser.KW_FORMATTED;
if (descOptions == HiveASTParser.KW_PRETTY) {
handleUnsupportedOperation("DESCRIBE PRETTY is not supported.");
}
}
ObjectIdentifier tableIdentifier = parseObjectIdentifier(tableName);
return new DescribeTableOperation(tableIdentifier, isExt || isFormatted);
}
use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method convertCreateDatabase.
private Operation convertCreateDatabase(HiveParserASTNode ast) {
String dbName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());
boolean ifNotExists = false;
String dbComment = null;
String dbLocation = null;
Map<String, String> dbProps = null;
for (int i = 1; i < ast.getChildCount(); i++) {
HiveParserASTNode childNode = (HiveParserASTNode) ast.getChild(i);
switch(childNode.getToken().getType()) {
case HiveASTParser.TOK_IFNOTEXISTS:
ifNotExists = true;
break;
case HiveASTParser.TOK_DATABASECOMMENT:
dbComment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(childNode.getChild(0).getText());
break;
case HiveASTParser.TOK_DATABASEPROPERTIES:
dbProps = getProps((HiveParserASTNode) childNode.getChild(0));
break;
case HiveASTParser.TOK_DATABASELOCATION:
dbLocation = HiveParserBaseSemanticAnalyzer.unescapeSQLString(childNode.getChild(0).getText());
break;
default:
throw new ValidationException("Unknown AST node for CREATE DATABASE: " + childNode);
}
}
Map<String, String> props = new HashMap<>();
if (dbProps != null) {
props.putAll(dbProps);
}
if (dbLocation != null) {
props.put(DATABASE_LOCATION_URI, dbLocation);
}
CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(props, dbComment);
return new CreateDatabaseOperation(catalogManager.getCurrentCatalog(), dbName, catalogDatabase, ifNotExists);
}
use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method convertAlterDatabaseProperties.
private Operation convertAlterDatabaseProperties(HiveParserASTNode ast) {
String dbName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());
Map<String, String> dbProps = null;
for (int i = 1; i < ast.getChildCount(); i++) {
HiveParserASTNode childNode = (HiveParserASTNode) ast.getChild(i);
if (childNode.getToken().getType() == HiveASTParser.TOK_DATABASEPROPERTIES) {
dbProps = getProps((HiveParserASTNode) childNode.getChild(0));
} else {
throw new ValidationException("Unknown AST node for ALTER DATABASE PROPERTIES: " + childNode);
}
}
CatalogDatabase originDB = getDatabase(dbName);
Map<String, String> props = new HashMap<>(originDB.getProperties());
props.put(ALTER_DATABASE_OP, SqlAlterHiveDatabase.AlterHiveDatabaseOp.CHANGE_PROPS.name());
props.putAll(dbProps);
CatalogDatabase newDB = new CatalogDatabaseImpl(props, originDB.getComment());
return new AlterDatabaseOperation(catalogManager.getCurrentCatalog(), dbName, newDB);
}
use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method convertAlterTableAddParts.
/**
* Add one or more partitions to a table. Useful when the data has been copied to the right
* location by some other process.
*/
private Operation convertAlterTableAddParts(String[] qualified, CommonTree ast) {
// ^(TOK_ALTERTABLE_ADDPARTS identifier ifNotExists?
// alterStatementSuffixAddPartitionsElement+)
boolean ifNotExists = ast.getChild(0).getType() == HiveASTParser.TOK_IFNOTEXISTS;
Table tab = getTable(new ObjectPath(qualified[0], qualified[1]));
boolean isView = tab.isView();
validateAlterTableType(tab);
int numCh = ast.getChildCount();
int start = ifNotExists ? 1 : 0;
String currentLocation = null;
Map<String, String> currentPartSpec = null;
// Parser has done some verification, so the order of tokens doesn't need to be verified
// here.
List<CatalogPartitionSpec> specs = new ArrayList<>();
List<CatalogPartition> partitions = new ArrayList<>();
for (int num = start; num < numCh; num++) {
HiveParserASTNode child = (HiveParserASTNode) ast.getChild(num);
switch(child.getToken().getType()) {
case HiveASTParser.TOK_PARTSPEC:
if (currentPartSpec != null) {
specs.add(new CatalogPartitionSpec(currentPartSpec));
Map<String, String> props = new HashMap<>();
if (currentLocation != null) {
props.put(TABLE_LOCATION_URI, currentLocation);
}
partitions.add(new CatalogPartitionImpl(props, null));
currentLocation = null;
}
currentPartSpec = getPartSpec(child);
// validate reserved values
validatePartitionValues(currentPartSpec);
break;
case HiveASTParser.TOK_PARTITIONLOCATION:
// if location specified, set in partition
if (isView) {
throw new ValidationException("LOCATION clause illegal for view partition");
}
currentLocation = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText());
break;
default:
throw new ValidationException("Unknown child: " + child);
}
}
// add the last one
if (currentPartSpec != null) {
specs.add(new CatalogPartitionSpec(currentPartSpec));
Map<String, String> props = new HashMap<>();
if (currentLocation != null) {
props.put(TABLE_LOCATION_URI, currentLocation);
}
partitions.add(new CatalogPartitionImpl(props, null));
}
ObjectIdentifier tableIdentifier = tab.getDbName() == null ? parseObjectIdentifier(tab.getTableName()) : catalogManager.qualifyIdentifier(UnresolvedIdentifier.of(tab.getDbName(), tab.getTableName()));
return new AddPartitionsOperation(tableIdentifier, ifNotExists, specs, partitions);
}
Aggregations