Search in sources :

Example 31 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by apache.

the class TableEnvironmentTest method testCreateTableFromDescriptor.

@Test
public void testCreateTableFromDescriptor() throws Exception {
    final TableEnvironmentMock tEnv = TableEnvironmentMock.getStreamingInstance();
    final String catalog = tEnv.getCurrentCatalog();
    final String database = tEnv.getCurrentDatabase();
    final Schema schema = Schema.newBuilder().column("f0", DataTypes.INT()).build();
    tEnv.createTable("T", TableDescriptor.forConnector("fake").schema(schema).option("a", "Test").build());
    final ObjectPath objectPath = new ObjectPath(database, "T");
    assertThat(tEnv.getCatalog(catalog).orElseThrow(AssertionError::new).tableExists(objectPath)).isTrue();
    final CatalogBaseTable catalogTable = tEnv.getCatalog(catalog).orElseThrow(AssertionError::new).getTable(objectPath);
    assertThat(catalogTable).isInstanceOf(CatalogTable.class);
    assertThat(catalogTable.getUnresolvedSchema()).isEqualTo(schema);
    assertThat(catalogTable.getOptions()).contains(entry("connector", "fake"), entry("a", "Test"));
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) TableEnvironmentMock(org.apache.flink.table.utils.TableEnvironmentMock) Test(org.junit.jupiter.api.Test)

Example 32 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by apache.

the class TableEnvironmentImpl method registerTableSinkInternal.

@Override
public void registerTableSinkInternal(String name, TableSink<?> tableSink) {
    ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(UnresolvedIdentifier.of(name));
    Optional<CatalogBaseTable> table = getTemporaryTable(objectIdentifier);
    if (table.isPresent()) {
        if (table.get() instanceof ConnectorCatalogTable<?, ?>) {
            ConnectorCatalogTable<?, ?> sourceSinkTable = (ConnectorCatalogTable<?, ?>) table.get();
            if (sourceSinkTable.getTableSink().isPresent()) {
                throw new ValidationException(String.format("Table '%s' already exists. Please choose a different name.", name));
            } else {
                // wrapper contains only sink (not source)
                ConnectorCatalogTable sourceAndSink = ConnectorCatalogTable.sourceAndSink(sourceSinkTable.getTableSource().get(), tableSink, !IS_STREAM_TABLE);
                catalogManager.dropTemporaryTable(objectIdentifier, false);
                catalogManager.createTemporaryTable(sourceAndSink, objectIdentifier, false);
            }
        } else {
            throw new ValidationException(String.format("Table '%s' already exists. Please choose a different name.", name));
        }
    } else {
        ConnectorCatalogTable sink = ConnectorCatalogTable.sink(tableSink, !IS_STREAM_TABLE);
        catalogManager.createTemporaryTable(sink, objectIdentifier, false);
    }
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ValidationException(org.apache.flink.table.api.ValidationException) ConnectorCatalogTable(org.apache.flink.table.catalog.ConnectorCatalogTable) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 33 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by apache.

the class SqlToOperationConverter method convertAlterTable.

/**
 * convert ALTER TABLE statement.
 */
private Operation convertAlterTable(SqlAlterTable sqlAlterTable) {
    UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlAlterTable.fullTableName());
    ObjectIdentifier tableIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
    Optional<ContextResolvedTable> optionalCatalogTable = catalogManager.getTable(tableIdentifier);
    if (!optionalCatalogTable.isPresent() || optionalCatalogTable.get().isTemporary()) {
        throw new ValidationException(String.format("Table %s doesn't exist or is a temporary table.", tableIdentifier));
    }
    CatalogBaseTable baseTable = optionalCatalogTable.get().getTable();
    if (baseTable instanceof CatalogView) {
        throw new ValidationException("ALTER TABLE for a view is not allowed");
    }
    if (sqlAlterTable instanceof SqlAlterTableRename) {
        UnresolvedIdentifier newUnresolvedIdentifier = UnresolvedIdentifier.of(((SqlAlterTableRename) sqlAlterTable).fullNewTableName());
        ObjectIdentifier newTableIdentifier = catalogManager.qualifyIdentifier(newUnresolvedIdentifier);
        return new AlterTableRenameOperation(tableIdentifier, newTableIdentifier);
    } else if (sqlAlterTable instanceof SqlAlterTableOptions) {
        return convertAlterTableOptions(tableIdentifier, (CatalogTable) baseTable, (SqlAlterTableOptions) sqlAlterTable);
    } else if (sqlAlterTable instanceof SqlAlterTableReset) {
        return convertAlterTableReset(tableIdentifier, (CatalogTable) baseTable, (SqlAlterTableReset) sqlAlterTable);
    } else if (sqlAlterTable instanceof SqlAlterTableAddConstraint) {
        SqlTableConstraint constraint = ((SqlAlterTableAddConstraint) sqlAlterTable).getConstraint();
        validateTableConstraint(constraint);
        TableSchema oriSchema = TableSchema.fromResolvedSchema(baseTable.getUnresolvedSchema().resolve(catalogManager.getSchemaResolver()));
        // Sanity check for constraint.
        TableSchema.Builder builder = TableSchemaUtils.builderWithGivenSchema(oriSchema);
        if (constraint.getConstraintName().isPresent()) {
            builder.primaryKey(constraint.getConstraintName().get(), constraint.getColumnNames());
        } else {
            builder.primaryKey(constraint.getColumnNames());
        }
        builder.build();
        return new AlterTableAddConstraintOperation(tableIdentifier, constraint.getConstraintName().orElse(null), constraint.getColumnNames());
    } else if (sqlAlterTable instanceof SqlAlterTableDropConstraint) {
        SqlAlterTableDropConstraint dropConstraint = ((SqlAlterTableDropConstraint) sqlAlterTable);
        String constraintName = dropConstraint.getConstraintName().getSimple();
        TableSchema oriSchema = TableSchema.fromResolvedSchema(baseTable.getUnresolvedSchema().resolve(catalogManager.getSchemaResolver()));
        if (!oriSchema.getPrimaryKey().filter(pk -> pk.getName().equals(constraintName)).isPresent()) {
            throw new ValidationException(String.format("CONSTRAINT [%s] does not exist", constraintName));
        }
        return new AlterTableDropConstraintOperation(tableIdentifier, constraintName);
    } else if (sqlAlterTable instanceof SqlAddReplaceColumns) {
        return OperationConverterUtils.convertAddReplaceColumns(tableIdentifier, (SqlAddReplaceColumns) sqlAlterTable, (CatalogTable) baseTable, flinkPlanner.getOrCreateSqlValidator());
    } else if (sqlAlterTable instanceof SqlChangeColumn) {
        return OperationConverterUtils.convertChangeColumn(tableIdentifier, (SqlChangeColumn) sqlAlterTable, (CatalogTable) baseTable, flinkPlanner.getOrCreateSqlValidator());
    } else if (sqlAlterTable instanceof SqlAddPartitions) {
        List<CatalogPartitionSpec> specs = new ArrayList<>();
        List<CatalogPartition> partitions = new ArrayList<>();
        SqlAddPartitions addPartitions = (SqlAddPartitions) sqlAlterTable;
        for (int i = 0; i < addPartitions.getPartSpecs().size(); i++) {
            specs.add(new CatalogPartitionSpec(addPartitions.getPartitionKVs(i)));
            Map<String, String> props = OperationConverterUtils.extractProperties(addPartitions.getPartProps().get(i));
            partitions.add(new CatalogPartitionImpl(props, null));
        }
        return new AddPartitionsOperation(tableIdentifier, addPartitions.ifNotExists(), specs, partitions);
    } else if (sqlAlterTable instanceof SqlDropPartitions) {
        SqlDropPartitions dropPartitions = (SqlDropPartitions) sqlAlterTable;
        List<CatalogPartitionSpec> specs = new ArrayList<>();
        for (int i = 0; i < dropPartitions.getPartSpecs().size(); i++) {
            specs.add(new CatalogPartitionSpec(dropPartitions.getPartitionKVs(i)));
        }
        return new DropPartitionsOperation(tableIdentifier, dropPartitions.ifExists(), specs);
    } else if (sqlAlterTable instanceof SqlAlterTableCompact) {
        return convertAlterTableCompact(tableIdentifier, optionalCatalogTable.get(), (SqlAlterTableCompact) sqlAlterTable);
    } else {
        throw new ValidationException(String.format("[%s] needs to implement", sqlAlterTable.toSqlString(CalciteSqlDialect.DEFAULT)));
    }
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) TableSchema(org.apache.flink.table.api.TableSchema) SqlAlterTableReset(org.apache.flink.sql.parser.ddl.SqlAlterTableReset) ArrayList(java.util.ArrayList) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) SqlAlterTableRename(org.apache.flink.sql.parser.ddl.SqlAlterTableRename) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) SqlAddReplaceColumns(org.apache.flink.sql.parser.ddl.SqlAddReplaceColumns) SqlAddPartitions(org.apache.flink.sql.parser.ddl.SqlAddPartitions) SqlAlterTableAddConstraint(org.apache.flink.sql.parser.ddl.SqlAlterTableAddConstraint) SqlAlterTableCompact(org.apache.flink.sql.parser.ddl.SqlAlterTableCompact) ArrayList(java.util.ArrayList) List(java.util.List) SqlNodeList(org.apache.calcite.sql.SqlNodeList) SqlTableConstraint(org.apache.flink.sql.parser.ddl.constraint.SqlTableConstraint) CatalogView(org.apache.flink.table.catalog.CatalogView) AddPartitionsOperation(org.apache.flink.table.operations.ddl.AddPartitionsOperation) CatalogPartitionSpec(org.apache.flink.table.catalog.CatalogPartitionSpec) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) DropPartitionsOperation(org.apache.flink.table.operations.ddl.DropPartitionsOperation) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) UnresolvedIdentifier(org.apache.flink.table.catalog.UnresolvedIdentifier) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) SqlAlterTableAddConstraint(org.apache.flink.sql.parser.ddl.SqlAlterTableAddConstraint) SqlTableConstraint(org.apache.flink.sql.parser.ddl.constraint.SqlTableConstraint) RelHint(org.apache.calcite.rel.hint.RelHint) SqlAlterTableDropConstraint(org.apache.flink.sql.parser.ddl.SqlAlterTableDropConstraint) SqlChangeColumn(org.apache.flink.sql.parser.ddl.SqlChangeColumn) SqlAlterTableOptions(org.apache.flink.sql.parser.ddl.SqlAlterTableOptions) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) SqlAlterTableDropConstraint(org.apache.flink.sql.parser.ddl.SqlAlterTableDropConstraint) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) CatalogPartitionImpl(org.apache.flink.table.catalog.CatalogPartitionImpl) SqlDropPartitions(org.apache.flink.sql.parser.ddl.SqlDropPartitions)

Example 34 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by apache.

the class TestValuesCatalog method listPartitionsByFilter.

@Override
public List<CatalogPartitionSpec> listPartitionsByFilter(ObjectPath tablePath, List<Expression> filters) throws TableNotExistException, TableNotPartitionedException, CatalogException {
    if (!supportListPartitionByFilter) {
        throw new UnsupportedOperationException("TestValuesCatalog doesn't support list partition by filters");
    }
    List<CatalogPartitionSpec> partitions = listPartitions(tablePath);
    if (partitions.isEmpty()) {
        return partitions;
    }
    CatalogBaseTable table = this.getTable(tablePath);
    TableSchema schema = table.getSchema();
    List<ResolvedExpression> resolvedExpressions = filters.stream().map(filter -> {
        if (filter instanceof ResolvedExpression) {
            return (ResolvedExpression) filter;
        }
        throw new UnsupportedOperationException(String.format("TestValuesCatalog only works with resolved expressions. Get unresolved expression: %s", filter));
    }).collect(Collectors.toList());
    return partitions.stream().filter(partition -> {
        Function<String, Comparable<?>> getter = getValueGetter(partition.getPartitionSpec(), schema);
        return FilterUtils.isRetainedAfterApplyingFilterPredicates(resolvedExpressions, getter);
    }).collect(Collectors.toList());
}
Also used : DataType(org.apache.flink.table.types.DataType) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException) FilterUtils(org.apache.flink.table.planner.utils.FilterUtils) IntType(org.apache.flink.table.types.logical.IntType) TableException(org.apache.flink.table.api.TableException) TableSchema(org.apache.flink.table.api.TableSchema) VarCharType(org.apache.flink.table.types.logical.VarCharType) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) Expression(org.apache.flink.table.expressions.Expression) ObjectPath(org.apache.flink.table.catalog.ObjectPath) Function(java.util.function.Function) Collectors(java.util.stream.Collectors) CharType(org.apache.flink.table.types.logical.CharType) CatalogPartitionSpec(org.apache.flink.table.catalog.CatalogPartitionSpec) List(java.util.List) TableNotPartitionedException(org.apache.flink.table.catalog.exceptions.TableNotPartitionedException) DoubleType(org.apache.flink.table.types.logical.DoubleType) LogicalType(org.apache.flink.table.types.logical.LogicalType) BooleanType(org.apache.flink.table.types.logical.BooleanType) ResolvedExpression(org.apache.flink.table.expressions.ResolvedExpression) Map(java.util.Map) Optional(java.util.Optional) GenericInMemoryCatalog(org.apache.flink.table.catalog.GenericInMemoryCatalog) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) Function(java.util.function.Function) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) TableSchema(org.apache.flink.table.api.TableSchema) ResolvedExpression(org.apache.flink.table.expressions.ResolvedExpression) CatalogPartitionSpec(org.apache.flink.table.catalog.CatalogPartitionSpec)

Aggregations

CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)34 ObjectPath (org.apache.flink.table.catalog.ObjectPath)17 CatalogTable (org.apache.flink.table.catalog.CatalogTable)15 Test (org.junit.Test)14 ValidationException (org.apache.flink.table.api.ValidationException)11 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)10 CatalogView (org.apache.flink.table.catalog.CatalogView)9 TableSchema (org.apache.flink.table.api.TableSchema)8 Table (org.apache.hadoop.hive.metastore.api.Table)7 HashMap (java.util.HashMap)6 SqlCreateHiveTable (org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable)6 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)5 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)5 LinkedHashMap (java.util.LinkedHashMap)4 Map (java.util.Map)4 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)4 AlterViewAsOperation (org.apache.flink.table.operations.ddl.AlterViewAsOperation)4 ArrayList (java.util.ArrayList)3 TableEnvironment (org.apache.flink.table.api.TableEnvironment)3 CatalogException (org.apache.flink.table.catalog.exceptions.CatalogException)3