use of org.apache.flink.table.catalog.CatalogView in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method convertCreateView.
private Operation convertCreateView(HiveParserASTNode ast) throws SemanticException {
String[] qualTabName = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) ast.getChild(0));
String dbDotTable = HiveParserBaseSemanticAnalyzer.getDotName(qualTabName);
List<FieldSchema> cols = null;
boolean ifNotExists = false;
boolean isAlterViewAs = false;
String comment = null;
HiveParserASTNode selectStmt = null;
Map<String, String> tblProps = null;
boolean isMaterialized = ast.getToken().getType() == HiveASTParser.TOK_CREATE_MATERIALIZED_VIEW;
if (isMaterialized) {
handleUnsupportedOperation("MATERIALIZED VIEW is not supported");
}
HiveParserStorageFormat storageFormat = new HiveParserStorageFormat(conf);
LOG.info("Creating view " + dbDotTable + " position=" + ast.getCharPositionInLine());
int numCh = ast.getChildCount();
for (int num = 1; num < numCh; num++) {
HiveParserASTNode child = (HiveParserASTNode) ast.getChild(num);
if (storageFormat.fillStorageFormat(child)) {
handleUnsupportedOperation("FILE FORMAT for view is not supported");
}
switch(child.getToken().getType()) {
case HiveASTParser.TOK_IFNOTEXISTS:
ifNotExists = true;
break;
case HiveASTParser.TOK_REWRITE_ENABLED:
handleUnsupportedOperation("MATERIALIZED VIEW REWRITE is not supported");
break;
case HiveASTParser.TOK_ORREPLACE:
handleUnsupportedOperation("CREATE OR REPLACE VIEW is not supported");
break;
case HiveASTParser.TOK_QUERY:
selectStmt = child;
break;
case HiveASTParser.TOK_TABCOLNAME:
cols = HiveParserBaseSemanticAnalyzer.getColumns(child);
break;
case HiveASTParser.TOK_TABLECOMMENT:
comment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText());
break;
case HiveASTParser.TOK_TABLEPROPERTIES:
tblProps = getProps((HiveParserASTNode) child.getChild(0));
break;
case HiveASTParser.TOK_TABLEROWFORMAT:
handleUnsupportedOperation("ROW FORMAT for view is not supported");
break;
case HiveASTParser.TOK_TABLESERIALIZER:
handleUnsupportedOperation("SERDE for view is not supported");
break;
case HiveASTParser.TOK_TABLELOCATION:
handleUnsupportedOperation("LOCATION for view is not supported");
break;
case HiveASTParser.TOK_VIEWPARTCOLS:
handleUnsupportedOperation("PARTITION COLUMN for view is not supported");
break;
default:
throw new ValidationException("Unknown AST node for CREATE/ALTER VIEW: " + child);
}
}
if (ast.getToken().getType() == HiveASTParser.TOK_ALTERVIEW && ast.getChild(1).getType() == HiveASTParser.TOK_QUERY) {
isAlterViewAs = true;
}
queryState.setCommandType(HiveOperation.CREATEVIEW);
HiveParserCreateViewInfo createViewInfo = new HiveParserCreateViewInfo(dbDotTable, cols, selectStmt);
hiveParser.analyzeCreateView(createViewInfo, context, queryState, hiveShim);
ObjectIdentifier viewIdentifier = parseObjectIdentifier(createViewInfo.getCompoundName());
TableSchema schema = HiveTableUtil.createTableSchema(createViewInfo.getSchema(), Collections.emptyList(), Collections.emptySet(), null);
Map<String, String> props = new HashMap<>();
if (isAlterViewAs) {
CatalogBaseTable baseTable = getCatalogBaseTable(viewIdentifier);
props.putAll(baseTable.getOptions());
comment = baseTable.getComment();
} else {
if (tblProps != null) {
props.putAll(tblProps);
}
}
CatalogView catalogView = new CatalogViewImpl(createViewInfo.getOriginalText(), createViewInfo.getExpandedText(), schema, props, comment);
if (isAlterViewAs) {
return new AlterViewAsOperation(viewIdentifier, catalogView);
} else {
return new CreateViewOperation(viewIdentifier, catalogView, ifNotExists, false);
}
}
use of org.apache.flink.table.catalog.CatalogView in project flink by apache.
the class SqlToOperationConverter method convertAlterTable.
/**
* convert ALTER TABLE statement.
*/
private Operation convertAlterTable(SqlAlterTable sqlAlterTable) {
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlAlterTable.fullTableName());
ObjectIdentifier tableIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
Optional<ContextResolvedTable> optionalCatalogTable = catalogManager.getTable(tableIdentifier);
if (!optionalCatalogTable.isPresent() || optionalCatalogTable.get().isTemporary()) {
throw new ValidationException(String.format("Table %s doesn't exist or is a temporary table.", tableIdentifier));
}
CatalogBaseTable baseTable = optionalCatalogTable.get().getTable();
if (baseTable instanceof CatalogView) {
throw new ValidationException("ALTER TABLE for a view is not allowed");
}
if (sqlAlterTable instanceof SqlAlterTableRename) {
UnresolvedIdentifier newUnresolvedIdentifier = UnresolvedIdentifier.of(((SqlAlterTableRename) sqlAlterTable).fullNewTableName());
ObjectIdentifier newTableIdentifier = catalogManager.qualifyIdentifier(newUnresolvedIdentifier);
return new AlterTableRenameOperation(tableIdentifier, newTableIdentifier);
} else if (sqlAlterTable instanceof SqlAlterTableOptions) {
return convertAlterTableOptions(tableIdentifier, (CatalogTable) baseTable, (SqlAlterTableOptions) sqlAlterTable);
} else if (sqlAlterTable instanceof SqlAlterTableReset) {
return convertAlterTableReset(tableIdentifier, (CatalogTable) baseTable, (SqlAlterTableReset) sqlAlterTable);
} else if (sqlAlterTable instanceof SqlAlterTableAddConstraint) {
SqlTableConstraint constraint = ((SqlAlterTableAddConstraint) sqlAlterTable).getConstraint();
validateTableConstraint(constraint);
TableSchema oriSchema = TableSchema.fromResolvedSchema(baseTable.getUnresolvedSchema().resolve(catalogManager.getSchemaResolver()));
// Sanity check for constraint.
TableSchema.Builder builder = TableSchemaUtils.builderWithGivenSchema(oriSchema);
if (constraint.getConstraintName().isPresent()) {
builder.primaryKey(constraint.getConstraintName().get(), constraint.getColumnNames());
} else {
builder.primaryKey(constraint.getColumnNames());
}
builder.build();
return new AlterTableAddConstraintOperation(tableIdentifier, constraint.getConstraintName().orElse(null), constraint.getColumnNames());
} else if (sqlAlterTable instanceof SqlAlterTableDropConstraint) {
SqlAlterTableDropConstraint dropConstraint = ((SqlAlterTableDropConstraint) sqlAlterTable);
String constraintName = dropConstraint.getConstraintName().getSimple();
TableSchema oriSchema = TableSchema.fromResolvedSchema(baseTable.getUnresolvedSchema().resolve(catalogManager.getSchemaResolver()));
if (!oriSchema.getPrimaryKey().filter(pk -> pk.getName().equals(constraintName)).isPresent()) {
throw new ValidationException(String.format("CONSTRAINT [%s] does not exist", constraintName));
}
return new AlterTableDropConstraintOperation(tableIdentifier, constraintName);
} else if (sqlAlterTable instanceof SqlAddReplaceColumns) {
return OperationConverterUtils.convertAddReplaceColumns(tableIdentifier, (SqlAddReplaceColumns) sqlAlterTable, (CatalogTable) baseTable, flinkPlanner.getOrCreateSqlValidator());
} else if (sqlAlterTable instanceof SqlChangeColumn) {
return OperationConverterUtils.convertChangeColumn(tableIdentifier, (SqlChangeColumn) sqlAlterTable, (CatalogTable) baseTable, flinkPlanner.getOrCreateSqlValidator());
} else if (sqlAlterTable instanceof SqlAddPartitions) {
List<CatalogPartitionSpec> specs = new ArrayList<>();
List<CatalogPartition> partitions = new ArrayList<>();
SqlAddPartitions addPartitions = (SqlAddPartitions) sqlAlterTable;
for (int i = 0; i < addPartitions.getPartSpecs().size(); i++) {
specs.add(new CatalogPartitionSpec(addPartitions.getPartitionKVs(i)));
Map<String, String> props = OperationConverterUtils.extractProperties(addPartitions.getPartProps().get(i));
partitions.add(new CatalogPartitionImpl(props, null));
}
return new AddPartitionsOperation(tableIdentifier, addPartitions.ifNotExists(), specs, partitions);
} else if (sqlAlterTable instanceof SqlDropPartitions) {
SqlDropPartitions dropPartitions = (SqlDropPartitions) sqlAlterTable;
List<CatalogPartitionSpec> specs = new ArrayList<>();
for (int i = 0; i < dropPartitions.getPartSpecs().size(); i++) {
specs.add(new CatalogPartitionSpec(dropPartitions.getPartitionKVs(i)));
}
return new DropPartitionsOperation(tableIdentifier, dropPartitions.ifExists(), specs);
} else if (sqlAlterTable instanceof SqlAlterTableCompact) {
return convertAlterTableCompact(tableIdentifier, optionalCatalogTable.get(), (SqlAlterTableCompact) sqlAlterTable);
} else {
throw new ValidationException(String.format("[%s] needs to implement", sqlAlterTable.toSqlString(CalciteSqlDialect.DEFAULT)));
}
}
Aggregations