Search in sources :

Example 31 with CatalogTable

use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.

the class PushPartitionIntoTableSourceScanRule method matches.

@Override
public boolean matches(RelOptRuleCall call) {
    Filter filter = call.rel(0);
    if (filter.getCondition() == null) {
        return false;
    }
    TableSourceTable tableSourceTable = call.rel(1).getTable().unwrap(TableSourceTable.class);
    if (tableSourceTable == null) {
        return false;
    }
    DynamicTableSource dynamicTableSource = tableSourceTable.tableSource();
    if (!(dynamicTableSource instanceof SupportsPartitionPushDown)) {
        return false;
    }
    CatalogTable catalogTable = tableSourceTable.contextResolvedTable().getTable();
    if (!catalogTable.isPartitioned() || catalogTable.getPartitionKeys().isEmpty()) {
        return false;
    }
    return Arrays.stream(tableSourceTable.abilitySpecs()).noneMatch(spec -> spec instanceof PartitionPushDownSpec);
}
Also used : PartitionPushDownSpec(org.apache.flink.table.planner.plan.abilities.source.PartitionPushDownSpec) Filter(org.apache.calcite.rel.core.Filter) TableSourceTable(org.apache.flink.table.planner.plan.schema.TableSourceTable) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) DynamicTableSource(org.apache.flink.table.connector.source.DynamicTableSource) SupportsPartitionPushDown(org.apache.flink.table.connector.source.abilities.SupportsPartitionPushDown)

Example 32 with CatalogTable

use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.

the class SqlToOperationConverter method convertAlterView.

/**
 * convert ALTER VIEW statement.
 */
private Operation convertAlterView(SqlAlterView alterView) {
    UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(alterView.fullViewName());
    ObjectIdentifier viewIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
    Optional<ContextResolvedTable> optionalCatalogTable = catalogManager.getTable(viewIdentifier);
    if (!optionalCatalogTable.isPresent() || optionalCatalogTable.get().isTemporary()) {
        throw new ValidationException(String.format("View %s doesn't exist or is a temporary view.", viewIdentifier.toString()));
    }
    CatalogBaseTable baseTable = optionalCatalogTable.get().getTable();
    if (baseTable instanceof CatalogTable) {
        throw new ValidationException("ALTER VIEW for a table is not allowed");
    }
    if (alterView instanceof SqlAlterViewRename) {
        UnresolvedIdentifier newUnresolvedIdentifier = UnresolvedIdentifier.of(((SqlAlterViewRename) alterView).fullNewViewName());
        ObjectIdentifier newTableIdentifier = catalogManager.qualifyIdentifier(newUnresolvedIdentifier);
        return new AlterViewRenameOperation(viewIdentifier, newTableIdentifier);
    } else if (alterView instanceof SqlAlterViewProperties) {
        SqlAlterViewProperties alterViewProperties = (SqlAlterViewProperties) alterView;
        CatalogView oldView = (CatalogView) baseTable;
        Map<String, String> newProperties = new HashMap<>(oldView.getOptions());
        newProperties.putAll(OperationConverterUtils.extractProperties(alterViewProperties.getPropertyList()));
        CatalogView newView = new CatalogViewImpl(oldView.getOriginalQuery(), oldView.getExpandedQuery(), oldView.getSchema(), newProperties, oldView.getComment());
        return new AlterViewPropertiesOperation(viewIdentifier, newView);
    } else if (alterView instanceof SqlAlterViewAs) {
        SqlAlterViewAs alterViewAs = (SqlAlterViewAs) alterView;
        final SqlNode newQuery = alterViewAs.getNewQuery();
        CatalogView oldView = (CatalogView) baseTable;
        CatalogView newView = convertViewQuery(newQuery, Collections.emptyList(), oldView.getOptions(), oldView.getComment());
        return new AlterViewAsOperation(viewIdentifier, newView);
    } else {
        throw new ValidationException(String.format("[%s] needs to implement", alterView.toSqlString(CalciteSqlDialect.DEFAULT)));
    }
}
Also used : AlterViewPropertiesOperation(org.apache.flink.table.operations.ddl.AlterViewPropertiesOperation) AlterViewAsOperation(org.apache.flink.table.operations.ddl.AlterViewAsOperation) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ValidationException(org.apache.flink.table.api.ValidationException) CatalogViewImpl(org.apache.flink.table.catalog.CatalogViewImpl) SqlAlterViewRename(org.apache.flink.sql.parser.ddl.SqlAlterViewRename) UnresolvedIdentifier(org.apache.flink.table.catalog.UnresolvedIdentifier) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) AlterViewRenameOperation(org.apache.flink.table.operations.ddl.AlterViewRenameOperation) SqlAlterViewAs(org.apache.flink.sql.parser.ddl.SqlAlterViewAs) SqlAlterViewProperties(org.apache.flink.sql.parser.ddl.SqlAlterViewProperties) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogView(org.apache.flink.table.catalog.CatalogView) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) SqlNode(org.apache.calcite.sql.SqlNode)

Example 33 with CatalogTable

use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.

the class SqlCreateTableConverter method convertCreateTable.

/**
 * Convert the {@link SqlCreateTable} node.
 */
Operation convertCreateTable(SqlCreateTable sqlCreateTable) {
    sqlCreateTable.getTableConstraints().forEach(validateTableConstraint);
    CatalogTable catalogTable = createCatalogTable(sqlCreateTable);
    UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlCreateTable.fullTableName());
    ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
    return new CreateTableOperation(identifier, catalogTable, sqlCreateTable.isIfNotExists(), sqlCreateTable.isTemporary());
}
Also used : UnresolvedIdentifier(org.apache.flink.table.catalog.UnresolvedIdentifier) CatalogTable(org.apache.flink.table.catalog.CatalogTable) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 34 with CatalogTable

use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.

the class SqlToOperationConverterTest method prepareTable.

private void prepareTable(boolean managedTable, boolean hasPartition, boolean hasConstraint) throws Exception {
    Catalog catalog = new GenericInMemoryCatalog("default", "default");
    catalogManager.registerCatalog("cat1", catalog);
    catalog.createDatabase("db1", new CatalogDatabaseImpl(new HashMap<>(), null), true);
    Schema.Builder builder = Schema.newBuilder().column("a", DataTypes.STRING().notNull()).column("b", DataTypes.BIGINT().notNull()).column("c", DataTypes.BIGINT());
    Map<String, String> options = new HashMap<>();
    options.put("k", "v");
    if (!managedTable) {
        options.put("connector", "dummy");
    }
    CatalogTable catalogTable = CatalogTable.of(hasConstraint ? builder.primaryKeyNamed("ct1", "a", "b").build() : builder.build(), "tb1", hasPartition ? Arrays.asList("b", "c") : Collections.emptyList(), Collections.unmodifiableMap(options));
    catalogManager.setCurrentCatalog("cat1");
    catalogManager.setCurrentDatabase("db1");
    ObjectIdentifier tableIdentifier = ObjectIdentifier.of("cat1", "db1", "tb1");
    catalogManager.createTable(catalogTable, tableIdentifier, true);
}
Also used : HashMap(java.util.HashMap) TableSchema(org.apache.flink.table.api.TableSchema) OperationMatchers.withSchema(org.apache.flink.table.planner.utils.OperationMatchers.withSchema) CatalogManagerCalciteSchema(org.apache.flink.table.planner.catalog.CatalogManagerCalciteSchema) Schema(org.apache.flink.table.api.Schema) CalciteSchemaBuilder.asRootSchema(org.apache.calcite.jdbc.CalciteSchemaBuilder.asRootSchema) CatalogTable(org.apache.flink.table.catalog.CatalogTable) Catalog(org.apache.flink.table.catalog.Catalog) GenericInMemoryCatalog(org.apache.flink.table.catalog.GenericInMemoryCatalog) FunctionCatalog(org.apache.flink.table.catalog.FunctionCatalog) GenericInMemoryCatalog(org.apache.flink.table.catalog.GenericInMemoryCatalog) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 35 with CatalogTable

use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.

the class SqlToOperationConverterTest method testCreateTableLikeInvalidPartition.

@Test
public void testCreateTableLikeInvalidPartition() {
    CatalogTable catalogTable = CatalogTable.of(Schema.newBuilder().column("f0", DataTypes.INT().notNull()).build(), null, Collections.emptyList(), Collections.emptyMap());
    catalogManager.createTable(catalogTable, ObjectIdentifier.of("builtin", "default", "sourceTable"), false);
    final String sql = "create table derivedTable(\n" + "  a int\n" + ")\n" + "PARTITIONED BY (f3)\n" + "like sourceTable";
    assertThatThrownBy(() -> parseAndConvert(sql)).isInstanceOf(ValidationException.class).hasMessageContaining("Partition column 'f3' not defined in the table schema. Available columns: ['f0', 'a']");
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) CatalogTable(org.apache.flink.table.catalog.CatalogTable) Test(org.junit.Test)

Aggregations

CatalogTable (org.apache.flink.table.catalog.CatalogTable)68 Test (org.junit.Test)35 HashMap (java.util.HashMap)30 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)24 TableSchema (org.apache.flink.table.api.TableSchema)17 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)17 CreateTableOperation (org.apache.flink.table.operations.ddl.CreateTableOperation)14 ValidationException (org.apache.flink.table.api.ValidationException)13 ObjectPath (org.apache.flink.table.catalog.ObjectPath)13 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)12 Operation (org.apache.flink.table.operations.Operation)12 AlterTableAddConstraintOperation (org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation)12 AlterTableDropConstraintOperation (org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation)12 AlterTableOptionsOperation (org.apache.flink.table.operations.ddl.AlterTableOptionsOperation)12 AlterTableRenameOperation (org.apache.flink.table.operations.ddl.AlterTableRenameOperation)12 ExplainOperation (org.apache.flink.table.operations.ExplainOperation)11 LoadModuleOperation (org.apache.flink.table.operations.LoadModuleOperation)11 QueryOperation (org.apache.flink.table.operations.QueryOperation)11 ShowFunctionsOperation (org.apache.flink.table.operations.ShowFunctionsOperation)11 ShowModulesOperation (org.apache.flink.table.operations.ShowModulesOperation)11