use of org.apache.flink.table.catalog.ObjectIdentifier in project flink-mirror by flink-ci.
the class SqlCreateTableConverter method lookupLikeSourceTable.
private CatalogTable lookupLikeSourceTable(SqlTableLike sqlTableLike) {
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlTableLike.getSourceTable().names);
ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
ContextResolvedTable lookupResult = catalogManager.getTable(identifier).orElseThrow(() -> new ValidationException(String.format("Source table '%s' of the LIKE clause not found in the catalog, at %s", identifier, sqlTableLike.getSourceTable().getParserPosition())));
if (!(lookupResult.getTable() instanceof CatalogTable)) {
throw new ValidationException(String.format("Source table '%s' of the LIKE clause can not be a VIEW, at %s", identifier, sqlTableLike.getSourceTable().getParserPosition()));
}
return lookupResult.getTable();
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink-mirror by flink-ci.
the class SqlToOperationConverterTest method testAlterTable.
@Test
public void testAlterTable() throws Exception {
prepareNonManagedTable(false);
final String[] renameTableSqls = new String[] { "alter table cat1.db1.tb1 rename to tb2", "alter table db1.tb1 rename to tb2", "alter table tb1 rename to cat1.db1.tb2" };
final ObjectIdentifier expectedIdentifier = ObjectIdentifier.of("cat1", "db1", "tb1");
final ObjectIdentifier expectedNewIdentifier = ObjectIdentifier.of("cat1", "db1", "tb2");
// test rename table converter
for (int i = 0; i < renameTableSqls.length; i++) {
Operation operation = parse(renameTableSqls[i], SqlDialect.DEFAULT);
assertThat(operation).isInstanceOf(AlterTableRenameOperation.class);
final AlterTableRenameOperation alterTableRenameOperation = (AlterTableRenameOperation) operation;
assertThat(alterTableRenameOperation.getTableIdentifier()).isEqualTo(expectedIdentifier);
assertThat(alterTableRenameOperation.getNewTableIdentifier()).isEqualTo(expectedNewIdentifier);
}
// test alter table options
Operation operation = parse("alter table cat1.db1.tb1 set ('k1' = 'v1', 'K2' = 'V2')", SqlDialect.DEFAULT);
Map<String, String> expectedOptions = new HashMap<>();
expectedOptions.put("connector", "dummy");
expectedOptions.put("k", "v");
expectedOptions.put("k1", "v1");
expectedOptions.put("K2", "V2");
assertAlterTableOptions(operation, expectedIdentifier, expectedOptions);
// test alter table reset
operation = parse("alter table cat1.db1.tb1 reset ('k')", SqlDialect.DEFAULT);
assertAlterTableOptions(operation, expectedIdentifier, Collections.singletonMap("connector", "dummy"));
assertThatThrownBy(() -> parse("alter table cat1.db1.tb1 reset ('connector')", SqlDialect.DEFAULT)).isInstanceOf(ValidationException.class).hasMessageContaining("ALTER TABLE RESET does not support changing 'connector'");
assertThatThrownBy(() -> parse("alter table cat1.db1.tb1 reset ()", SqlDialect.DEFAULT)).isInstanceOf(ValidationException.class).hasMessageContaining("ALTER TABLE RESET does not support empty key");
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink-mirror by flink-ci.
the class FlinkCalciteCatalogReaderTest method testGetFlinkPreparingTableBase.
@Test
public void testGetFlinkPreparingTableBase() {
// Mock CatalogSchemaTable.
final ObjectIdentifier objectIdentifier = ObjectIdentifier.of("a", "b", "c");
final ResolvedSchema schema = new ResolvedSchema(Collections.emptyList(), Collections.emptyList(), null);
final CatalogTable catalogTable = ConnectorCatalogTable.source(new TestTableSource(true, TableSchema.fromResolvedSchema(schema)), true);
final ResolvedCatalogTable resolvedCatalogTable = new ResolvedCatalogTable(catalogTable, schema);
CatalogSchemaTable mockTable = new CatalogSchemaTable(ContextResolvedTable.permanent(objectIdentifier, CatalogManagerMocks.createEmptyCatalog(), resolvedCatalogTable), FlinkStatistic.UNKNOWN(), true);
rootSchemaPlus.add(tableMockName, mockTable);
Prepare.PreparingTable preparingTable = catalogReader.getTable(Collections.singletonList(tableMockName));
assertTrue(preparingTable instanceof FlinkPreparingTableBase);
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink-mirror by flink-ci.
the class SqlToOperationConverter method convertDropView.
/**
* Convert DROP VIEW statement.
*/
private Operation convertDropView(SqlDropView sqlDropView) {
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlDropView.fullViewName());
ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
return new DropViewOperation(identifier, sqlDropView.getIfExists(), sqlDropView.isTemporary());
}
use of org.apache.flink.table.catalog.ObjectIdentifier in project flink-mirror by flink-ci.
the class SqlToOperationConverter method convertAlterView.
/**
* convert ALTER VIEW statement.
*/
private Operation convertAlterView(SqlAlterView alterView) {
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(alterView.fullViewName());
ObjectIdentifier viewIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
Optional<ContextResolvedTable> optionalCatalogTable = catalogManager.getTable(viewIdentifier);
if (!optionalCatalogTable.isPresent() || optionalCatalogTable.get().isTemporary()) {
throw new ValidationException(String.format("View %s doesn't exist or is a temporary view.", viewIdentifier.toString()));
}
CatalogBaseTable baseTable = optionalCatalogTable.get().getTable();
if (baseTable instanceof CatalogTable) {
throw new ValidationException("ALTER VIEW for a table is not allowed");
}
if (alterView instanceof SqlAlterViewRename) {
UnresolvedIdentifier newUnresolvedIdentifier = UnresolvedIdentifier.of(((SqlAlterViewRename) alterView).fullNewViewName());
ObjectIdentifier newTableIdentifier = catalogManager.qualifyIdentifier(newUnresolvedIdentifier);
return new AlterViewRenameOperation(viewIdentifier, newTableIdentifier);
} else if (alterView instanceof SqlAlterViewProperties) {
SqlAlterViewProperties alterViewProperties = (SqlAlterViewProperties) alterView;
CatalogView oldView = (CatalogView) baseTable;
Map<String, String> newProperties = new HashMap<>(oldView.getOptions());
newProperties.putAll(OperationConverterUtils.extractProperties(alterViewProperties.getPropertyList()));
CatalogView newView = new CatalogViewImpl(oldView.getOriginalQuery(), oldView.getExpandedQuery(), oldView.getSchema(), newProperties, oldView.getComment());
return new AlterViewPropertiesOperation(viewIdentifier, newView);
} else if (alterView instanceof SqlAlterViewAs) {
SqlAlterViewAs alterViewAs = (SqlAlterViewAs) alterView;
final SqlNode newQuery = alterViewAs.getNewQuery();
CatalogView oldView = (CatalogView) baseTable;
CatalogView newView = convertViewQuery(newQuery, Collections.emptyList(), oldView.getOptions(), oldView.getComment());
return new AlterViewAsOperation(viewIdentifier, newView);
} else {
throw new ValidationException(String.format("[%s] needs to implement", alterView.toSqlString(CalciteSqlDialect.DEFAULT)));
}
}
Aggregations