use of org.apache.flink.table.operations.ddl.AlterTableSchemaOperation in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method convertAlterTableModifyCols.
private Operation convertAlterTableModifyCols(CatalogBaseTable alteredTable, String tblName, HiveParserASTNode ast, boolean replace) throws SemanticException {
List<FieldSchema> newCols = HiveParserBaseSemanticAnalyzer.getColumns((HiveParserASTNode) ast.getChild(0));
boolean isCascade = false;
if (null != ast.getFirstChildWithType(HiveASTParser.TOK_CASCADE)) {
isCascade = true;
}
ObjectIdentifier tableIdentifier = parseObjectIdentifier(tblName);
CatalogTable oldTable = (CatalogTable) alteredTable;
// prepare properties
Map<String, String> props = new HashMap<>(oldTable.getOptions());
props.put(ALTER_TABLE_OP, ALTER_COLUMNS.name());
if (isCascade) {
props.put(ALTER_COL_CASCADE, "true");
}
TableSchema oldSchema = oldTable.getSchema();
final int numPartCol = oldTable.getPartitionKeys().size();
TableSchema.Builder builder = TableSchema.builder();
// add existing non-part col if we're not replacing
if (!replace) {
List<TableColumn> nonPartCols = oldSchema.getTableColumns().subList(0, oldSchema.getFieldCount() - numPartCol);
for (TableColumn column : nonPartCols) {
builder.add(column);
}
setWatermarkAndPK(builder, oldSchema);
}
// add new cols
for (FieldSchema col : newCols) {
builder.add(TableColumn.physical(col.getName(), HiveTypeUtil.toFlinkType(TypeInfoUtils.getTypeInfoFromTypeString(col.getType()))));
}
// add part cols
List<TableColumn> partCols = oldSchema.getTableColumns().subList(oldSchema.getFieldCount() - numPartCol, oldSchema.getFieldCount());
for (TableColumn column : partCols) {
builder.add(column);
}
return new AlterTableSchemaOperation(tableIdentifier, new CatalogTableImpl(builder.build(), oldTable.getPartitionKeys(), props, oldTable.getComment()));
}
use of org.apache.flink.table.operations.ddl.AlterTableSchemaOperation in project flink by apache.
the class TableEnvironmentImpl method executeInternal.
@Override
public TableResultInternal executeInternal(Operation operation) {
if (operation instanceof ModifyOperation) {
return executeInternal(Collections.singletonList((ModifyOperation) operation));
} else if (operation instanceof StatementSetOperation) {
return executeInternal(((StatementSetOperation) operation).getOperations());
} else if (operation instanceof CreateTableOperation) {
CreateTableOperation createTableOperation = (CreateTableOperation) operation;
if (createTableOperation.isTemporary()) {
catalogManager.createTemporaryTable(createTableOperation.getCatalogTable(), createTableOperation.getTableIdentifier(), createTableOperation.isIgnoreIfExists());
} else {
catalogManager.createTable(createTableOperation.getCatalogTable(), createTableOperation.getTableIdentifier(), createTableOperation.isIgnoreIfExists());
}
return TableResultImpl.TABLE_RESULT_OK;
} else if (operation instanceof DropTableOperation) {
DropTableOperation dropTableOperation = (DropTableOperation) operation;
if (dropTableOperation.isTemporary()) {
catalogManager.dropTemporaryTable(dropTableOperation.getTableIdentifier(), dropTableOperation.isIfExists());
} else {
catalogManager.dropTable(dropTableOperation.getTableIdentifier(), dropTableOperation.isIfExists());
}
return TableResultImpl.TABLE_RESULT_OK;
} else if (operation instanceof AlterTableOperation) {
AlterTableOperation alterTableOperation = (AlterTableOperation) operation;
Catalog catalog = getCatalogOrThrowException(alterTableOperation.getTableIdentifier().getCatalogName());
String exMsg = getDDLOpExecuteErrorMsg(alterTableOperation.asSummaryString());
try {
if (alterTableOperation instanceof AlterTableRenameOperation) {
AlterTableRenameOperation alterTableRenameOp = (AlterTableRenameOperation) operation;
catalog.renameTable(alterTableRenameOp.getTableIdentifier().toObjectPath(), alterTableRenameOp.getNewTableIdentifier().getObjectName(), false);
} else if (alterTableOperation instanceof AlterTableOptionsOperation) {
AlterTableOptionsOperation alterTablePropertiesOp = (AlterTableOptionsOperation) operation;
catalogManager.alterTable(alterTablePropertiesOp.getCatalogTable(), alterTablePropertiesOp.getTableIdentifier(), false);
} else if (alterTableOperation instanceof AlterTableAddConstraintOperation) {
AlterTableAddConstraintOperation addConstraintOP = (AlterTableAddConstraintOperation) operation;
CatalogTable oriTable = catalogManager.getTable(addConstraintOP.getTableIdentifier()).get().getTable();
TableSchema.Builder builder = TableSchemaUtils.builderWithGivenSchema(oriTable.getSchema());
if (addConstraintOP.getConstraintName().isPresent()) {
builder.primaryKey(addConstraintOP.getConstraintName().get(), addConstraintOP.getColumnNames());
} else {
builder.primaryKey(addConstraintOP.getColumnNames());
}
CatalogTable newTable = new CatalogTableImpl(builder.build(), oriTable.getPartitionKeys(), oriTable.getOptions(), oriTable.getComment());
catalogManager.alterTable(newTable, addConstraintOP.getTableIdentifier(), false);
} else if (alterTableOperation instanceof AlterTableDropConstraintOperation) {
AlterTableDropConstraintOperation dropConstraintOperation = (AlterTableDropConstraintOperation) operation;
CatalogTable oriTable = catalogManager.getTable(dropConstraintOperation.getTableIdentifier()).get().getTable();
CatalogTable newTable = new CatalogTableImpl(TableSchemaUtils.dropConstraint(oriTable.getSchema(), dropConstraintOperation.getConstraintName()), oriTable.getPartitionKeys(), oriTable.getOptions(), oriTable.getComment());
catalogManager.alterTable(newTable, dropConstraintOperation.getTableIdentifier(), false);
} else if (alterTableOperation instanceof AlterPartitionPropertiesOperation) {
AlterPartitionPropertiesOperation alterPartPropsOp = (AlterPartitionPropertiesOperation) operation;
catalog.alterPartition(alterPartPropsOp.getTableIdentifier().toObjectPath(), alterPartPropsOp.getPartitionSpec(), alterPartPropsOp.getCatalogPartition(), false);
} else if (alterTableOperation instanceof AlterTableSchemaOperation) {
AlterTableSchemaOperation alterTableSchemaOperation = (AlterTableSchemaOperation) alterTableOperation;
catalogManager.alterTable(alterTableSchemaOperation.getCatalogTable(), alterTableSchemaOperation.getTableIdentifier(), false);
} else if (alterTableOperation instanceof AddPartitionsOperation) {
AddPartitionsOperation addPartitionsOperation = (AddPartitionsOperation) alterTableOperation;
List<CatalogPartitionSpec> specs = addPartitionsOperation.getPartitionSpecs();
List<CatalogPartition> partitions = addPartitionsOperation.getCatalogPartitions();
boolean ifNotExists = addPartitionsOperation.ifNotExists();
ObjectPath tablePath = addPartitionsOperation.getTableIdentifier().toObjectPath();
for (int i = 0; i < specs.size(); i++) {
catalog.createPartition(tablePath, specs.get(i), partitions.get(i), ifNotExists);
}
} else if (alterTableOperation instanceof DropPartitionsOperation) {
DropPartitionsOperation dropPartitionsOperation = (DropPartitionsOperation) alterTableOperation;
ObjectPath tablePath = dropPartitionsOperation.getTableIdentifier().toObjectPath();
boolean ifExists = dropPartitionsOperation.ifExists();
for (CatalogPartitionSpec spec : dropPartitionsOperation.getPartitionSpecs()) {
catalog.dropPartition(tablePath, spec, ifExists);
}
}
return TableResultImpl.TABLE_RESULT_OK;
} catch (TableAlreadyExistException | TableNotExistException e) {
throw new ValidationException(exMsg, e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
} else if (operation instanceof CreateViewOperation) {
CreateViewOperation createViewOperation = (CreateViewOperation) operation;
if (createViewOperation.isTemporary()) {
catalogManager.createTemporaryTable(createViewOperation.getCatalogView(), createViewOperation.getViewIdentifier(), createViewOperation.isIgnoreIfExists());
} else {
catalogManager.createTable(createViewOperation.getCatalogView(), createViewOperation.getViewIdentifier(), createViewOperation.isIgnoreIfExists());
}
return TableResultImpl.TABLE_RESULT_OK;
} else if (operation instanceof DropViewOperation) {
DropViewOperation dropViewOperation = (DropViewOperation) operation;
if (dropViewOperation.isTemporary()) {
catalogManager.dropTemporaryView(dropViewOperation.getViewIdentifier(), dropViewOperation.isIfExists());
} else {
catalogManager.dropView(dropViewOperation.getViewIdentifier(), dropViewOperation.isIfExists());
}
return TableResultImpl.TABLE_RESULT_OK;
} else if (operation instanceof AlterViewOperation) {
AlterViewOperation alterViewOperation = (AlterViewOperation) operation;
Catalog catalog = getCatalogOrThrowException(alterViewOperation.getViewIdentifier().getCatalogName());
String exMsg = getDDLOpExecuteErrorMsg(alterViewOperation.asSummaryString());
try {
if (alterViewOperation instanceof AlterViewRenameOperation) {
AlterViewRenameOperation alterTableRenameOp = (AlterViewRenameOperation) operation;
catalog.renameTable(alterTableRenameOp.getViewIdentifier().toObjectPath(), alterTableRenameOp.getNewViewIdentifier().getObjectName(), false);
} else if (alterViewOperation instanceof AlterViewPropertiesOperation) {
AlterViewPropertiesOperation alterTablePropertiesOp = (AlterViewPropertiesOperation) operation;
catalogManager.alterTable(alterTablePropertiesOp.getCatalogView(), alterTablePropertiesOp.getViewIdentifier(), false);
} else if (alterViewOperation instanceof AlterViewAsOperation) {
AlterViewAsOperation alterViewAsOperation = (AlterViewAsOperation) alterViewOperation;
catalogManager.alterTable(alterViewAsOperation.getNewView(), alterViewAsOperation.getViewIdentifier(), false);
}
return TableResultImpl.TABLE_RESULT_OK;
} catch (TableAlreadyExistException | TableNotExistException e) {
throw new ValidationException(exMsg, e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
} else if (operation instanceof CreateDatabaseOperation) {
CreateDatabaseOperation createDatabaseOperation = (CreateDatabaseOperation) operation;
Catalog catalog = getCatalogOrThrowException(createDatabaseOperation.getCatalogName());
String exMsg = getDDLOpExecuteErrorMsg(createDatabaseOperation.asSummaryString());
try {
catalog.createDatabase(createDatabaseOperation.getDatabaseName(), createDatabaseOperation.getCatalogDatabase(), createDatabaseOperation.isIgnoreIfExists());
return TableResultImpl.TABLE_RESULT_OK;
} catch (DatabaseAlreadyExistException e) {
throw new ValidationException(exMsg, e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
} else if (operation instanceof DropDatabaseOperation) {
DropDatabaseOperation dropDatabaseOperation = (DropDatabaseOperation) operation;
Catalog catalog = getCatalogOrThrowException(dropDatabaseOperation.getCatalogName());
String exMsg = getDDLOpExecuteErrorMsg(dropDatabaseOperation.asSummaryString());
try {
catalog.dropDatabase(dropDatabaseOperation.getDatabaseName(), dropDatabaseOperation.isIfExists(), dropDatabaseOperation.isCascade());
return TableResultImpl.TABLE_RESULT_OK;
} catch (DatabaseNotExistException | DatabaseNotEmptyException e) {
throw new ValidationException(exMsg, e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
} else if (operation instanceof AlterDatabaseOperation) {
AlterDatabaseOperation alterDatabaseOperation = (AlterDatabaseOperation) operation;
Catalog catalog = getCatalogOrThrowException(alterDatabaseOperation.getCatalogName());
String exMsg = getDDLOpExecuteErrorMsg(alterDatabaseOperation.asSummaryString());
try {
catalog.alterDatabase(alterDatabaseOperation.getDatabaseName(), alterDatabaseOperation.getCatalogDatabase(), false);
return TableResultImpl.TABLE_RESULT_OK;
} catch (DatabaseNotExistException e) {
throw new ValidationException(exMsg, e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
} else if (operation instanceof CreateCatalogFunctionOperation) {
return createCatalogFunction((CreateCatalogFunctionOperation) operation);
} else if (operation instanceof CreateTempSystemFunctionOperation) {
return createSystemFunction((CreateTempSystemFunctionOperation) operation);
} else if (operation instanceof DropCatalogFunctionOperation) {
return dropCatalogFunction((DropCatalogFunctionOperation) operation);
} else if (operation instanceof DropTempSystemFunctionOperation) {
return dropSystemFunction((DropTempSystemFunctionOperation) operation);
} else if (operation instanceof AlterCatalogFunctionOperation) {
return alterCatalogFunction((AlterCatalogFunctionOperation) operation);
} else if (operation instanceof CreateCatalogOperation) {
return createCatalog((CreateCatalogOperation) operation);
} else if (operation instanceof DropCatalogOperation) {
DropCatalogOperation dropCatalogOperation = (DropCatalogOperation) operation;
String exMsg = getDDLOpExecuteErrorMsg(dropCatalogOperation.asSummaryString());
try {
catalogManager.unregisterCatalog(dropCatalogOperation.getCatalogName(), dropCatalogOperation.isIfExists());
return TableResultImpl.TABLE_RESULT_OK;
} catch (CatalogException e) {
throw new ValidationException(exMsg, e);
}
} else if (operation instanceof LoadModuleOperation) {
return loadModule((LoadModuleOperation) operation);
} else if (operation instanceof UnloadModuleOperation) {
return unloadModule((UnloadModuleOperation) operation);
} else if (operation instanceof UseModulesOperation) {
return useModules((UseModulesOperation) operation);
} else if (operation instanceof UseCatalogOperation) {
UseCatalogOperation useCatalogOperation = (UseCatalogOperation) operation;
catalogManager.setCurrentCatalog(useCatalogOperation.getCatalogName());
return TableResultImpl.TABLE_RESULT_OK;
} else if (operation instanceof UseDatabaseOperation) {
UseDatabaseOperation useDatabaseOperation = (UseDatabaseOperation) operation;
catalogManager.setCurrentCatalog(useDatabaseOperation.getCatalogName());
catalogManager.setCurrentDatabase(useDatabaseOperation.getDatabaseName());
return TableResultImpl.TABLE_RESULT_OK;
} else if (operation instanceof ShowCatalogsOperation) {
return buildShowResult("catalog name", listCatalogs());
} else if (operation instanceof ShowCreateTableOperation) {
ShowCreateTableOperation showCreateTableOperation = (ShowCreateTableOperation) operation;
ContextResolvedTable table = catalogManager.getTable(showCreateTableOperation.getTableIdentifier()).orElseThrow(() -> new ValidationException(String.format("Could not execute SHOW CREATE TABLE. Table with identifier %s does not exist.", showCreateTableOperation.getTableIdentifier().asSerializableString())));
return TableResultImpl.builder().resultKind(ResultKind.SUCCESS_WITH_CONTENT).schema(ResolvedSchema.of(Column.physical("result", DataTypes.STRING()))).data(Collections.singletonList(Row.of(ShowCreateUtil.buildShowCreateTableRow(table.getResolvedTable(), showCreateTableOperation.getTableIdentifier(), table.isTemporary())))).build();
} else if (operation instanceof ShowCreateViewOperation) {
ShowCreateViewOperation showCreateViewOperation = (ShowCreateViewOperation) operation;
final ContextResolvedTable table = catalogManager.getTable(showCreateViewOperation.getViewIdentifier()).orElseThrow(() -> new ValidationException(String.format("Could not execute SHOW CREATE VIEW. View with identifier %s does not exist.", showCreateViewOperation.getViewIdentifier().asSerializableString())));
return TableResultImpl.builder().resultKind(ResultKind.SUCCESS_WITH_CONTENT).schema(ResolvedSchema.of(Column.physical("result", DataTypes.STRING()))).data(Collections.singletonList(Row.of(ShowCreateUtil.buildShowCreateViewRow(table.getResolvedTable(), showCreateViewOperation.getViewIdentifier(), table.isTemporary())))).build();
} else if (operation instanceof ShowCurrentCatalogOperation) {
return buildShowResult("current catalog name", new String[] { catalogManager.getCurrentCatalog() });
} else if (operation instanceof ShowDatabasesOperation) {
return buildShowResult("database name", listDatabases());
} else if (operation instanceof ShowCurrentDatabaseOperation) {
return buildShowResult("current database name", new String[] { catalogManager.getCurrentDatabase() });
} else if (operation instanceof ShowModulesOperation) {
ShowModulesOperation showModulesOperation = (ShowModulesOperation) operation;
if (showModulesOperation.requireFull()) {
return buildShowFullModulesResult(listFullModules());
} else {
return buildShowResult("module name", listModules());
}
} else if (operation instanceof ShowTablesOperation) {
return buildShowResult("table name", listTables());
} else if (operation instanceof ShowFunctionsOperation) {
ShowFunctionsOperation showFunctionsOperation = (ShowFunctionsOperation) operation;
String[] functionNames = null;
ShowFunctionsOperation.FunctionScope functionScope = showFunctionsOperation.getFunctionScope();
switch(functionScope) {
case USER:
functionNames = listUserDefinedFunctions();
break;
case ALL:
functionNames = listFunctions();
break;
default:
throw new UnsupportedOperationException(String.format("SHOW FUNCTIONS with %s scope is not supported.", functionScope));
}
return buildShowResult("function name", functionNames);
} else if (operation instanceof ShowViewsOperation) {
return buildShowResult("view name", listViews());
} else if (operation instanceof ShowColumnsOperation) {
ShowColumnsOperation showColumnsOperation = (ShowColumnsOperation) operation;
Optional<ContextResolvedTable> result = catalogManager.getTable(showColumnsOperation.getTableIdentifier());
if (result.isPresent()) {
return buildShowColumnsResult(result.get().getResolvedSchema(), showColumnsOperation);
} else {
throw new ValidationException(String.format("Tables or views with the identifier '%s' doesn't exist.", showColumnsOperation.getTableIdentifier().asSummaryString()));
}
} else if (operation instanceof ShowPartitionsOperation) {
String exMsg = getDDLOpExecuteErrorMsg(operation.asSummaryString());
try {
ShowPartitionsOperation showPartitionsOperation = (ShowPartitionsOperation) operation;
Catalog catalog = getCatalogOrThrowException(showPartitionsOperation.getTableIdentifier().getCatalogName());
ObjectPath tablePath = showPartitionsOperation.getTableIdentifier().toObjectPath();
CatalogPartitionSpec partitionSpec = showPartitionsOperation.getPartitionSpec();
List<CatalogPartitionSpec> partitionSpecs = partitionSpec == null ? catalog.listPartitions(tablePath) : catalog.listPartitions(tablePath, partitionSpec);
List<String> partitionNames = new ArrayList<>(partitionSpecs.size());
for (CatalogPartitionSpec spec : partitionSpecs) {
List<String> partitionKVs = new ArrayList<>(spec.getPartitionSpec().size());
for (Map.Entry<String, String> partitionKV : spec.getPartitionSpec().entrySet()) {
partitionKVs.add(partitionKV.getKey() + "=" + partitionKV.getValue());
}
partitionNames.add(String.join("/", partitionKVs));
}
return buildShowResult("partition name", partitionNames.toArray(new String[0]));
} catch (TableNotExistException e) {
throw new ValidationException(exMsg, e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
} else if (operation instanceof ExplainOperation) {
ExplainOperation explainOperation = (ExplainOperation) operation;
ExplainDetail[] explainDetails = explainOperation.getExplainDetails().stream().map(ExplainDetail::valueOf).toArray(ExplainDetail[]::new);
Operation child = ((ExplainOperation) operation).getChild();
List<Operation> operations;
if (child instanceof StatementSetOperation) {
operations = new ArrayList<>(((StatementSetOperation) child).getOperations());
} else {
operations = Collections.singletonList(child);
}
String explanation = explainInternal(operations, explainDetails);
return TableResultImpl.builder().resultKind(ResultKind.SUCCESS_WITH_CONTENT).schema(ResolvedSchema.of(Column.physical("result", DataTypes.STRING()))).data(Collections.singletonList(Row.of(explanation))).build();
} else if (operation instanceof DescribeTableOperation) {
DescribeTableOperation describeTableOperation = (DescribeTableOperation) operation;
Optional<ContextResolvedTable> result = catalogManager.getTable(describeTableOperation.getSqlIdentifier());
if (result.isPresent()) {
return buildDescribeResult(result.get().getResolvedSchema());
} else {
throw new ValidationException(String.format("Tables or views with the identifier '%s' doesn't exist", describeTableOperation.getSqlIdentifier().asSummaryString()));
}
} else if (operation instanceof QueryOperation) {
return executeQueryOperation((QueryOperation) operation);
} else if (operation instanceof CreateTableASOperation) {
CreateTableASOperation createTableASOperation = (CreateTableASOperation) operation;
executeInternal(createTableASOperation.getCreateTableOperation());
return executeInternal(createTableASOperation.toSinkModifyOperation(catalogManager));
} else if (operation instanceof ExecutePlanOperation) {
ExecutePlanOperation executePlanOperation = (ExecutePlanOperation) operation;
return (TableResultInternal) executePlan(PlanReference.fromFile(executePlanOperation.getFilePath()));
} else if (operation instanceof CompilePlanOperation) {
CompilePlanOperation compilePlanOperation = (CompilePlanOperation) operation;
compilePlanAndWrite(compilePlanOperation.getFilePath(), compilePlanOperation.isIfNotExists(), compilePlanOperation.getOperation());
return TableResultImpl.TABLE_RESULT_OK;
} else if (operation instanceof CompileAndExecutePlanOperation) {
CompileAndExecutePlanOperation compileAndExecutePlanOperation = (CompileAndExecutePlanOperation) operation;
CompiledPlan compiledPlan = compilePlanAndWrite(compileAndExecutePlanOperation.getFilePath(), true, compileAndExecutePlanOperation.getOperation());
return (TableResultInternal) executePlan(compiledPlan);
} else if (operation instanceof NopOperation) {
return TableResultImpl.TABLE_RESULT_OK;
} else {
throw new TableException(UNSUPPORTED_QUERY_IN_EXECUTE_SQL_MSG);
}
}
use of org.apache.flink.table.operations.ddl.AlterTableSchemaOperation in project flink by apache.
the class OperationConverterUtils method convertChangeColumn.
public static Operation convertChangeColumn(ObjectIdentifier tableIdentifier, SqlChangeColumn changeColumn, CatalogTable catalogTable, SqlValidator sqlValidator) {
String oldName = changeColumn.getOldName().getSimple();
if (catalogTable.getPartitionKeys().indexOf(oldName) >= 0) {
// disallow changing partition columns
throw new ValidationException("CHANGE COLUMN cannot be applied to partition columns");
}
TableSchema oldSchema = catalogTable.getSchema();
boolean first = changeColumn.isFirst();
String after = changeColumn.getAfter() == null ? null : changeColumn.getAfter().getSimple();
TableColumn newTableColumn = toTableColumn(changeColumn.getNewColumn(), sqlValidator);
TableSchema newSchema = changeColumn(oldSchema, oldName, newTableColumn, first, after);
Map<String, String> newProperties = new HashMap<>(catalogTable.getOptions());
newProperties.putAll(extractProperties(changeColumn.getProperties()));
return new AlterTableSchemaOperation(tableIdentifier, new CatalogTableImpl(newSchema, catalogTable.getPartitionKeys(), newProperties, catalogTable.getComment()));
// TODO: handle watermark and constraints
}
use of org.apache.flink.table.operations.ddl.AlterTableSchemaOperation in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method convertAlterTableChangeCol.
private Operation convertAlterTableChangeCol(CatalogBaseTable alteredTable, String[] qualified, HiveParserASTNode ast) throws SemanticException {
String newComment = null;
boolean first = false;
String flagCol = null;
boolean isCascade = false;
// col_old_name col_new_name column_type [COMMENT col_comment] [FIRST|AFTER column_name]
// [CASCADE|RESTRICT]
String oldColName = ast.getChild(0).getText();
String newColName = ast.getChild(1).getText();
String newType = HiveParserBaseSemanticAnalyzer.getTypeStringFromAST((HiveParserASTNode) ast.getChild(2));
int childCount = ast.getChildCount();
for (int i = 3; i < childCount; i++) {
HiveParserASTNode child = (HiveParserASTNode) ast.getChild(i);
switch(child.getToken().getType()) {
case HiveASTParser.StringLiteral:
newComment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getText());
break;
case HiveASTParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
flagCol = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText());
break;
case HiveASTParser.KW_FIRST:
first = true;
break;
case HiveASTParser.TOK_CASCADE:
isCascade = true;
break;
case HiveASTParser.TOK_RESTRICT:
break;
default:
throw new ValidationException("Unsupported token: " + child.getToken() + " for alter table");
}
}
// Validate the operation of renaming a column name.
Table tab = getTable(new ObjectPath(qualified[0], qualified[1]));
SkewedInfo skewInfo = tab.getTTable().getSd().getSkewedInfo();
if ((null != skewInfo) && (null != skewInfo.getSkewedColNames()) && skewInfo.getSkewedColNames().contains(oldColName)) {
throw new ValidationException(oldColName + ErrorMsg.ALTER_TABLE_NOT_ALLOWED_RENAME_SKEWED_COLUMN.getMsg());
}
String tblName = HiveParserBaseSemanticAnalyzer.getDotName(qualified);
ObjectIdentifier tableIdentifier = parseObjectIdentifier(tblName);
CatalogTable oldTable = (CatalogTable) alteredTable;
String oldName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(oldColName);
String newName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(newColName);
if (oldTable.getPartitionKeys().contains(oldName)) {
// disallow changing partition columns
throw new ValidationException("CHANGE COLUMN cannot be applied to partition columns");
}
TableSchema oldSchema = oldTable.getSchema();
TableColumn newTableColumn = TableColumn.physical(newName, HiveTypeUtil.toFlinkType(TypeInfoUtils.getTypeInfoFromTypeString(newType)));
TableSchema newSchema = OperationConverterUtils.changeColumn(oldSchema, oldName, newTableColumn, first, flagCol);
Map<String, String> props = new HashMap<>(oldTable.getOptions());
props.put(ALTER_TABLE_OP, ALTER_COLUMNS.name());
if (isCascade) {
props.put(ALTER_COL_CASCADE, "true");
}
return new AlterTableSchemaOperation(tableIdentifier, new CatalogTableImpl(newSchema, oldTable.getPartitionKeys(), props, oldTable.getComment()));
}
use of org.apache.flink.table.operations.ddl.AlterTableSchemaOperation in project flink by apache.
the class OperationConverterUtils method convertAddReplaceColumns.
public static Operation convertAddReplaceColumns(ObjectIdentifier tableIdentifier, SqlAddReplaceColumns addReplaceColumns, CatalogTable catalogTable, SqlValidator sqlValidator) {
// This is only used by the Hive dialect at the moment. In Hive, only non-partition columns
// can be
// added/replaced and users will only define non-partition columns in the new column list.
// Therefore, we require
// that partitions columns must appear last in the schema (which is inline with Hive).
// Otherwise, we won't be
// able to determine the column positions after the non-partition columns are replaced.
TableSchema oldSchema = catalogTable.getSchema();
int numPartCol = catalogTable.getPartitionKeys().size();
Set<String> lastCols = oldSchema.getTableColumns().subList(oldSchema.getFieldCount() - numPartCol, oldSchema.getFieldCount()).stream().map(TableColumn::getName).collect(Collectors.toSet());
if (!lastCols.equals(new HashSet<>(catalogTable.getPartitionKeys()))) {
throw new ValidationException("ADD/REPLACE COLUMNS on partitioned tables requires partition columns to appear last");
}
// set non-partition columns
TableSchema.Builder builder = TableSchema.builder();
if (!addReplaceColumns.isReplace()) {
List<TableColumn> nonPartCols = oldSchema.getTableColumns().subList(0, oldSchema.getFieldCount() - numPartCol);
for (TableColumn column : nonPartCols) {
builder.add(column);
}
setWatermarkAndPK(builder, catalogTable.getSchema());
}
for (SqlNode sqlNode : addReplaceColumns.getNewColumns()) {
builder.add(toTableColumn((SqlTableColumn) sqlNode, sqlValidator));
}
// set partition columns
List<TableColumn> partCols = oldSchema.getTableColumns().subList(oldSchema.getFieldCount() - numPartCol, oldSchema.getFieldCount());
for (TableColumn column : partCols) {
builder.add(column);
}
// set properties
Map<String, String> newProperties = new HashMap<>(catalogTable.getOptions());
newProperties.putAll(extractProperties(addReplaceColumns.getProperties()));
return new AlterTableSchemaOperation(tableIdentifier, new CatalogTableImpl(builder.build(), catalogTable.getPartitionKeys(), newProperties, catalogTable.getComment()));
}
Aggregations