Search in sources :

Example 56 with ObjectIdentifier

use of org.apache.flink.table.catalog.ObjectIdentifier in project flink-mirror by flink-ci.

the class SqlCreateTableConverter method lookupLikeSourceTable.

private CatalogTable lookupLikeSourceTable(SqlTableLike sqlTableLike) {
    UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlTableLike.getSourceTable().names);
    ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
    ContextResolvedTable lookupResult = catalogManager.getTable(identifier).orElseThrow(() -> new ValidationException(String.format("Source table '%s' of the LIKE clause not found in the catalog, at %s", identifier, sqlTableLike.getSourceTable().getParserPosition())));
    if (!(lookupResult.getTable() instanceof CatalogTable)) {
        throw new ValidationException(String.format("Source table '%s' of the LIKE clause can not be a VIEW, at %s", identifier, sqlTableLike.getSourceTable().getParserPosition()));
    }
    return lookupResult.getTable();
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) UnresolvedIdentifier(org.apache.flink.table.catalog.UnresolvedIdentifier) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 57 with ObjectIdentifier

use of org.apache.flink.table.catalog.ObjectIdentifier in project flink-mirror by flink-ci.

the class SqlToOperationConverterTest method testAlterTable.

@Test
public void testAlterTable() throws Exception {
    prepareNonManagedTable(false);
    final String[] renameTableSqls = new String[] { "alter table cat1.db1.tb1 rename to tb2", "alter table db1.tb1 rename to tb2", "alter table tb1 rename to cat1.db1.tb2" };
    final ObjectIdentifier expectedIdentifier = ObjectIdentifier.of("cat1", "db1", "tb1");
    final ObjectIdentifier expectedNewIdentifier = ObjectIdentifier.of("cat1", "db1", "tb2");
    // test rename table converter
    for (int i = 0; i < renameTableSqls.length; i++) {
        Operation operation = parse(renameTableSqls[i], SqlDialect.DEFAULT);
        assertThat(operation).isInstanceOf(AlterTableRenameOperation.class);
        final AlterTableRenameOperation alterTableRenameOperation = (AlterTableRenameOperation) operation;
        assertThat(alterTableRenameOperation.getTableIdentifier()).isEqualTo(expectedIdentifier);
        assertThat(alterTableRenameOperation.getNewTableIdentifier()).isEqualTo(expectedNewIdentifier);
    }
    // test alter table options
    Operation operation = parse("alter table cat1.db1.tb1 set ('k1' = 'v1', 'K2' = 'V2')", SqlDialect.DEFAULT);
    Map<String, String> expectedOptions = new HashMap<>();
    expectedOptions.put("connector", "dummy");
    expectedOptions.put("k", "v");
    expectedOptions.put("k1", "v1");
    expectedOptions.put("K2", "V2");
    assertAlterTableOptions(operation, expectedIdentifier, expectedOptions);
    // test alter table reset
    operation = parse("alter table cat1.db1.tb1 reset ('k')", SqlDialect.DEFAULT);
    assertAlterTableOptions(operation, expectedIdentifier, Collections.singletonMap("connector", "dummy"));
    assertThatThrownBy(() -> parse("alter table cat1.db1.tb1 reset ('connector')", SqlDialect.DEFAULT)).isInstanceOf(ValidationException.class).hasMessageContaining("ALTER TABLE RESET does not support changing 'connector'");
    assertThatThrownBy(() -> parse("alter table cat1.db1.tb1 reset ()", SqlDialect.DEFAULT)).isInstanceOf(ValidationException.class).hasMessageContaining("ALTER TABLE RESET does not support empty key");
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) HashMap(java.util.HashMap) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) ShowJarsOperation(org.apache.flink.table.operations.command.ShowJarsOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) EndStatementSetOperation(org.apache.flink.table.operations.EndStatementSetOperation) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) ShowTablesOperation(org.apache.flink.table.operations.ShowTablesOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) SetOperation(org.apache.flink.table.operations.command.SetOperation) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) Operation(org.apache.flink.table.operations.Operation) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) RemoveJarOperation(org.apache.flink.table.operations.command.RemoveJarOperation) BeginStatementSetOperation(org.apache.flink.table.operations.BeginStatementSetOperation) AddJarOperation(org.apache.flink.table.operations.command.AddJarOperation) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) ResetOperation(org.apache.flink.table.operations.command.ResetOperation) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) Test(org.junit.Test)

Example 58 with ObjectIdentifier

use of org.apache.flink.table.catalog.ObjectIdentifier in project flink-mirror by flink-ci.

the class FlinkCalciteCatalogReaderTest method testGetFlinkPreparingTableBase.

@Test
public void testGetFlinkPreparingTableBase() {
    // Mock CatalogSchemaTable.
    final ObjectIdentifier objectIdentifier = ObjectIdentifier.of("a", "b", "c");
    final ResolvedSchema schema = new ResolvedSchema(Collections.emptyList(), Collections.emptyList(), null);
    final CatalogTable catalogTable = ConnectorCatalogTable.source(new TestTableSource(true, TableSchema.fromResolvedSchema(schema)), true);
    final ResolvedCatalogTable resolvedCatalogTable = new ResolvedCatalogTable(catalogTable, schema);
    CatalogSchemaTable mockTable = new CatalogSchemaTable(ContextResolvedTable.permanent(objectIdentifier, CatalogManagerMocks.createEmptyCatalog(), resolvedCatalogTable), FlinkStatistic.UNKNOWN(), true);
    rootSchemaPlus.add(tableMockName, mockTable);
    Prepare.PreparingTable preparingTable = catalogReader.getTable(Collections.singletonList(tableMockName));
    assertTrue(preparingTable instanceof FlinkPreparingTableBase);
}
Also used : TestTableSource(org.apache.flink.table.planner.utils.TestTableSource) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) CatalogSchemaTable(org.apache.flink.table.planner.catalog.CatalogSchemaTable) Prepare(org.apache.calcite.prepare.Prepare) FlinkPreparingTableBase(org.apache.flink.table.planner.plan.schema.FlinkPreparingTableBase) ConnectorCatalogTable(org.apache.flink.table.catalog.ConnectorCatalogTable) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) Test(org.junit.Test)

Example 59 with ObjectIdentifier

use of org.apache.flink.table.catalog.ObjectIdentifier in project flink-mirror by flink-ci.

the class SqlToOperationConverter method convertDropView.

/**
 * Convert DROP VIEW statement.
 */
private Operation convertDropView(SqlDropView sqlDropView) {
    UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlDropView.fullViewName());
    ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
    return new DropViewOperation(identifier, sqlDropView.getIfExists(), sqlDropView.isTemporary());
}
Also used : DropViewOperation(org.apache.flink.table.operations.ddl.DropViewOperation) UnresolvedIdentifier(org.apache.flink.table.catalog.UnresolvedIdentifier) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 60 with ObjectIdentifier

use of org.apache.flink.table.catalog.ObjectIdentifier in project flink-mirror by flink-ci.

the class SqlToOperationConverter method convertAlterView.

/**
 * convert ALTER VIEW statement.
 */
private Operation convertAlterView(SqlAlterView alterView) {
    UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(alterView.fullViewName());
    ObjectIdentifier viewIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
    Optional<ContextResolvedTable> optionalCatalogTable = catalogManager.getTable(viewIdentifier);
    if (!optionalCatalogTable.isPresent() || optionalCatalogTable.get().isTemporary()) {
        throw new ValidationException(String.format("View %s doesn't exist or is a temporary view.", viewIdentifier.toString()));
    }
    CatalogBaseTable baseTable = optionalCatalogTable.get().getTable();
    if (baseTable instanceof CatalogTable) {
        throw new ValidationException("ALTER VIEW for a table is not allowed");
    }
    if (alterView instanceof SqlAlterViewRename) {
        UnresolvedIdentifier newUnresolvedIdentifier = UnresolvedIdentifier.of(((SqlAlterViewRename) alterView).fullNewViewName());
        ObjectIdentifier newTableIdentifier = catalogManager.qualifyIdentifier(newUnresolvedIdentifier);
        return new AlterViewRenameOperation(viewIdentifier, newTableIdentifier);
    } else if (alterView instanceof SqlAlterViewProperties) {
        SqlAlterViewProperties alterViewProperties = (SqlAlterViewProperties) alterView;
        CatalogView oldView = (CatalogView) baseTable;
        Map<String, String> newProperties = new HashMap<>(oldView.getOptions());
        newProperties.putAll(OperationConverterUtils.extractProperties(alterViewProperties.getPropertyList()));
        CatalogView newView = new CatalogViewImpl(oldView.getOriginalQuery(), oldView.getExpandedQuery(), oldView.getSchema(), newProperties, oldView.getComment());
        return new AlterViewPropertiesOperation(viewIdentifier, newView);
    } else if (alterView instanceof SqlAlterViewAs) {
        SqlAlterViewAs alterViewAs = (SqlAlterViewAs) alterView;
        final SqlNode newQuery = alterViewAs.getNewQuery();
        CatalogView oldView = (CatalogView) baseTable;
        CatalogView newView = convertViewQuery(newQuery, Collections.emptyList(), oldView.getOptions(), oldView.getComment());
        return new AlterViewAsOperation(viewIdentifier, newView);
    } else {
        throw new ValidationException(String.format("[%s] needs to implement", alterView.toSqlString(CalciteSqlDialect.DEFAULT)));
    }
}
Also used : AlterViewPropertiesOperation(org.apache.flink.table.operations.ddl.AlterViewPropertiesOperation) AlterViewAsOperation(org.apache.flink.table.operations.ddl.AlterViewAsOperation) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ValidationException(org.apache.flink.table.api.ValidationException) CatalogViewImpl(org.apache.flink.table.catalog.CatalogViewImpl) SqlAlterViewRename(org.apache.flink.sql.parser.ddl.SqlAlterViewRename) UnresolvedIdentifier(org.apache.flink.table.catalog.UnresolvedIdentifier) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) AlterViewRenameOperation(org.apache.flink.table.operations.ddl.AlterViewRenameOperation) SqlAlterViewAs(org.apache.flink.sql.parser.ddl.SqlAlterViewAs) SqlAlterViewProperties(org.apache.flink.sql.parser.ddl.SqlAlterViewProperties) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogView(org.apache.flink.table.catalog.CatalogView) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) SqlNode(org.apache.calcite.sql.SqlNode)

Aggregations

ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)183 CatalogTable (org.apache.flink.table.catalog.CatalogTable)64 UnresolvedIdentifier (org.apache.flink.table.catalog.UnresolvedIdentifier)60 ValidationException (org.apache.flink.table.api.ValidationException)59 HashMap (java.util.HashMap)57 LinkedHashMap (java.util.LinkedHashMap)48 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)42 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)41 ResolvedCatalogTable (org.apache.flink.table.catalog.ResolvedCatalogTable)32 ArrayList (java.util.ArrayList)30 Map (java.util.Map)27 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)27 CatalogPartitionSpec (org.apache.flink.table.catalog.CatalogPartitionSpec)24 NotNullConstraint (org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint)24 TableException (org.apache.flink.table.api.TableException)23 TableSchema (org.apache.flink.table.api.TableSchema)21 CatalogView (org.apache.flink.table.catalog.CatalogView)21 QueryOperation (org.apache.flink.table.operations.QueryOperation)18 HiveParserASTNode (org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode)18 List (java.util.List)16