Search in sources :

Example 61 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class SqlToOperationConverterTest method testCreateTableWithFullDataTypes.

// TODO: tweak the tests when FLINK-13604 is fixed.
@Test
public void testCreateTableWithFullDataTypes() {
    final List<TestItem> testItems = Arrays.asList(createTestItem("CHAR", DataTypes.CHAR(1)), createTestItem("CHAR NOT NULL", DataTypes.CHAR(1).notNull()), createTestItem("CHAR NULL", DataTypes.CHAR(1)), createTestItem("CHAR(33)", DataTypes.CHAR(33)), createTestItem("VARCHAR", DataTypes.STRING()), createTestItem("VARCHAR(33)", DataTypes.VARCHAR(33)), createTestItem("STRING", DataTypes.STRING()), createTestItem("BOOLEAN", DataTypes.BOOLEAN()), createTestItem("BINARY", DataTypes.BINARY(1)), createTestItem("BINARY(33)", DataTypes.BINARY(33)), createTestItem("VARBINARY", DataTypes.BYTES()), createTestItem("VARBINARY(33)", DataTypes.VARBINARY(33)), createTestItem("BYTES", DataTypes.BYTES()), createTestItem("DECIMAL", DataTypes.DECIMAL(10, 0)), createTestItem("DEC", DataTypes.DECIMAL(10, 0)), createTestItem("NUMERIC", DataTypes.DECIMAL(10, 0)), createTestItem("DECIMAL(10)", DataTypes.DECIMAL(10, 0)), createTestItem("DEC(10)", DataTypes.DECIMAL(10, 0)), createTestItem("NUMERIC(10)", DataTypes.DECIMAL(10, 0)), createTestItem("DECIMAL(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("DEC(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("NUMERIC(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("TINYINT", DataTypes.TINYINT()), createTestItem("SMALLINT", DataTypes.SMALLINT()), createTestItem("INTEGER", DataTypes.INT()), createTestItem("INT", DataTypes.INT()), createTestItem("BIGINT", DataTypes.BIGINT()), createTestItem("FLOAT", DataTypes.FLOAT()), createTestItem("DOUBLE", DataTypes.DOUBLE()), createTestItem("DOUBLE PRECISION", DataTypes.DOUBLE()), createTestItem("DATE", DataTypes.DATE()), createTestItem("TIME", DataTypes.TIME()), createTestItem("TIME WITHOUT TIME ZONE", DataTypes.TIME()), // Expect to be TIME(3).
    createTestItem("TIME(3)", DataTypes.TIME()), // Expect to be TIME(3).
    createTestItem("TIME(3) WITHOUT TIME ZONE", DataTypes.TIME()), createTestItem("TIMESTAMP", DataTypes.TIMESTAMP(6)), createTestItem("TIMESTAMP WITHOUT TIME ZONE", DataTypes.TIMESTAMP(6)), createTestItem("TIMESTAMP(3)", DataTypes.TIMESTAMP(3)), createTestItem("TIMESTAMP(3) WITHOUT TIME ZONE", DataTypes.TIMESTAMP(3)), createTestItem("TIMESTAMP WITH LOCAL TIME ZONE", DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(6)), createTestItem("TIMESTAMP(3) WITH LOCAL TIME ZONE", DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3)), createTestItem("ARRAY<TIMESTAMP(3) WITH LOCAL TIME ZONE>", DataTypes.ARRAY(DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3))), createTestItem("ARRAY<INT NOT NULL>", DataTypes.ARRAY(DataTypes.INT().notNull())), createTestItem("INT ARRAY", DataTypes.ARRAY(DataTypes.INT())), createTestItem("INT NOT NULL ARRAY", DataTypes.ARRAY(DataTypes.INT().notNull())), createTestItem("INT ARRAY NOT NULL", DataTypes.ARRAY(DataTypes.INT()).notNull()), createTestItem("MULTISET<INT NOT NULL>", DataTypes.MULTISET(DataTypes.INT().notNull())), createTestItem("INT MULTISET", DataTypes.MULTISET(DataTypes.INT())), createTestItem("INT NOT NULL MULTISET", DataTypes.MULTISET(DataTypes.INT().notNull())), createTestItem("INT MULTISET NOT NULL", DataTypes.MULTISET(DataTypes.INT()).notNull()), createTestItem("MAP<BIGINT, BOOLEAN>", DataTypes.MAP(DataTypes.BIGINT(), DataTypes.BOOLEAN())), // Expect to be ROW<`f0` INT NOT NULL, `f1` BOOLEAN>.
    createTestItem("ROW<f0 INT NOT NULL, f1 BOOLEAN>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), // Expect to be ROW<`f0` INT NOT NULL, `f1` BOOLEAN>.
    createTestItem("ROW(f0 INT NOT NULL, f1 BOOLEAN)", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), createTestItem("ROW<`f0` INT>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()))), createTestItem("ROW(`f0` INT)", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()))), createTestItem("ROW<>", DataTypes.ROW()), createTestItem("ROW()", DataTypes.ROW()), // Expect to be ROW<`f0` INT NOT NULL '...', `f1` BOOLEAN '...'>.
    createTestItem("ROW<f0 INT NOT NULL 'This is a comment.'," + " f1 BOOLEAN 'This as well.'>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), createTestItem("ARRAY<ROW<f0 INT, f1 BOOLEAN>>", DataTypes.ARRAY(DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem("ROW<f0 INT, f1 BOOLEAN> MULTISET", DataTypes.MULTISET(DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem("MULTISET<ROW<f0 INT, f1 BOOLEAN>>", DataTypes.MULTISET(DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem("ROW<f0 Row<f00 INT, f01 BOOLEAN>, " + "f1 INT ARRAY, " + "f2 BOOLEAN MULTISET>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.ROW(DataTypes.FIELD("f00", DataTypes.INT()), DataTypes.FIELD("f01", DataTypes.BOOLEAN()))), DataTypes.FIELD("f1", DataTypes.ARRAY(DataTypes.INT())), DataTypes.FIELD("f2", DataTypes.MULTISET(DataTypes.BOOLEAN())))));
    StringBuilder buffer = new StringBuilder("create table t1(\n");
    for (int i = 0; i < testItems.size(); i++) {
        buffer.append("f").append(i).append(" ").append(testItems.get(i).testExpr);
        if (i == testItems.size() - 1) {
            buffer.append(")");
        } else {
            buffer.append(",\n");
        }
    }
    final String sql = buffer.toString();
    final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
    final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
    SqlNode node = parser.parse(sql);
    assertThat(node).isInstanceOf(SqlCreateTable.class);
    Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get();
    TableSchema schema = ((CreateTableOperation) operation).getCatalogTable().getSchema();
    Object[] expectedDataTypes = testItems.stream().map(item -> item.expectedType).toArray();
    assertThat(schema.getFieldDataTypes()).isEqualTo(expectedDataTypes);
}
Also used : FunctionAlreadyExistException(org.apache.flink.table.catalog.exceptions.FunctionAlreadyExistException) DataType(org.apache.flink.table.types.DataType) Arrays(java.util.Arrays) FlinkPlannerImpl(org.apache.flink.table.planner.calcite.FlinkPlannerImpl) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ExplainDetail(org.apache.flink.table.api.ExplainDetail) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) Map(java.util.Map) SqlCreateTable(org.apache.flink.sql.parser.ddl.SqlCreateTable) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) ModuleManager(org.apache.flink.table.module.ModuleManager) TableConfig(org.apache.flink.table.api.TableConfig) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) Set(java.util.Set) TableSchema(org.apache.flink.table.api.TableSchema) JavaUserDefinedScalarFunctions(org.apache.flink.table.planner.runtime.utils.JavaUserDefinedScalarFunctions) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) ExpressionResolverMocks(org.apache.flink.table.utils.ExpressionResolverMocks) CatalogFunction(org.apache.flink.table.catalog.CatalogFunction) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl) ParserImpl(org.apache.flink.table.planner.delegation.ParserImpl) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) ShowJarsOperation(org.apache.flink.table.operations.command.ShowJarsOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) Func8$(org.apache.flink.table.planner.expressions.utils.Func8$) CatalogFunctionImpl(org.apache.flink.table.catalog.CatalogFunctionImpl) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException) EndStatementSetOperation(org.apache.flink.table.operations.EndStatementSetOperation) Func0$(org.apache.flink.table.planner.expressions.utils.Func0$) Supplier(java.util.function.Supplier) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) TableColumn(org.apache.flink.table.api.TableColumn) Catalog(org.apache.flink.table.catalog.Catalog) TableAlreadyExistException(org.apache.flink.table.catalog.exceptions.TableAlreadyExistException) TestManagedTableFactory(org.apache.flink.table.factories.TestManagedTableFactory) Nullable(javax.annotation.Nullable) Before(org.junit.Before) OperationMatchers.withSchema(org.apache.flink.table.planner.utils.OperationMatchers.withSchema) FunctionScope(org.apache.flink.table.operations.ShowFunctionsOperation.FunctionScope) Func1$(org.apache.flink.table.planner.expressions.utils.Func1$) Test(org.junit.Test) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) TreeMap(java.util.TreeMap) CatalogManagerMocks(org.apache.flink.table.utils.CatalogManagerMocks) CatalogManagerCalciteSchema(org.apache.flink.table.planner.catalog.CatalogManagerCalciteSchema) Schema(org.apache.flink.table.api.Schema) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) OperationMatchers.partitionedBy(org.apache.flink.table.planner.utils.OperationMatchers.partitionedBy) CalciteParser(org.apache.flink.table.planner.parse.CalciteParser) SqlNode(org.apache.calcite.sql.SqlNode) SetOperation(org.apache.flink.table.operations.command.SetOperation) After(org.junit.After) ExecutionOptions(org.apache.flink.configuration.ExecutionOptions) ComputedColumn(org.apache.flink.table.api.TableColumn.ComputedColumn) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) Operation(org.apache.flink.table.operations.Operation) InstanceOfAssertFactories.type(org.assertj.core.api.InstanceOfAssertFactories.type) Collectors(java.util.stream.Collectors) CalciteSchemaBuilder.asRootSchema(org.apache.calcite.jdbc.CalciteSchemaBuilder.asRootSchema) List(java.util.List) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) SqlRichExplain(org.apache.flink.sql.parser.dql.SqlRichExplain) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) ValidationException(org.apache.flink.table.api.ValidationException) GenericInMemoryCatalog(org.apache.flink.table.catalog.GenericInMemoryCatalog) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) RemoveJarOperation(org.apache.flink.table.operations.command.RemoveJarOperation) CatalogManager(org.apache.flink.table.catalog.CatalogManager) HamcrestCondition(org.assertj.core.api.HamcrestCondition) OperationMatchers.entry(org.apache.flink.table.planner.utils.OperationMatchers.entry) BeginStatementSetOperation(org.apache.flink.table.operations.BeginStatementSetOperation) HashMap(java.util.HashMap) FunctionCatalog(org.apache.flink.table.catalog.FunctionCatalog) AddJarOperation(org.apache.flink.table.operations.command.AddJarOperation) ObjectPath(org.apache.flink.table.catalog.ObjectPath) AtomicReference(java.util.concurrent.atomic.AtomicReference) OperationMatchers.withOptions(org.apache.flink.table.planner.utils.OperationMatchers.withOptions) PlannerContext(org.apache.flink.table.planner.delegation.PlannerContext) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) HashSet(java.util.HashSet) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) Assertions.assertThatThrownBy(org.assertj.core.api.Assertions.assertThatThrownBy) ResetOperation(org.apache.flink.table.operations.command.ResetOperation) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException) Configuration(org.apache.flink.configuration.Configuration) Parser(org.apache.flink.table.delegation.Parser) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) DataTypes(org.apache.flink.table.api.DataTypes) SqlDialect(org.apache.flink.table.api.SqlDialect) RuntimeExecutionMode(org.apache.flink.api.common.RuntimeExecutionMode) Collections(java.util.Collections) TableSchema(org.apache.flink.table.api.TableSchema) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) ShowJarsOperation(org.apache.flink.table.operations.command.ShowJarsOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) EndStatementSetOperation(org.apache.flink.table.operations.EndStatementSetOperation) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) SetOperation(org.apache.flink.table.operations.command.SetOperation) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) Operation(org.apache.flink.table.operations.Operation) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) RemoveJarOperation(org.apache.flink.table.operations.command.RemoveJarOperation) BeginStatementSetOperation(org.apache.flink.table.operations.BeginStatementSetOperation) AddJarOperation(org.apache.flink.table.operations.command.AddJarOperation) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) ResetOperation(org.apache.flink.table.operations.command.ResetOperation) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) FlinkPlannerImpl(org.apache.flink.table.planner.calcite.FlinkPlannerImpl) CalciteParser(org.apache.flink.table.planner.parse.CalciteParser) SqlNode(org.apache.calcite.sql.SqlNode) Test(org.junit.Test)

Example 62 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class BatchOperatorNameTest method testLegacySourceSink.

@Test
public void testLegacySourceSink() {
    TableSchema schema = TestLegacyFilterableTableSource.defaultSchema();
    TestLegacyFilterableTableSource.createTemporaryTable(tEnv, schema, "MySource", true, TestLegacyFilterableTableSource.defaultRows().toList(), TestLegacyFilterableTableSource.defaultFilterableFields());
    TableSink<Row> sink = ((BatchTableTestUtil) util).createCollectTableSink(schema.getFieldNames(), schema.getTableColumns().stream().map(col -> col.getType().getLogicalType()).toArray(LogicalType[]::new));
    util.testingTableEnv().registerTableSinkInternal("MySink", sink);
    verifyInsert("insert into MySink select * from MySource");
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) BatchTableTestUtil(org.apache.flink.table.planner.utils.BatchTableTestUtil) LogicalType(org.apache.flink.table.types.logical.LogicalType) Row(org.apache.flink.types.Row) Test(org.junit.Test)

Example 63 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class StreamOperatorNameTest method testLegacySourceSink.

/**
 * Verify LegacySource and LegacySink.
 */
@Test
public void testLegacySourceSink() {
    TableSchema schema = TestLegacyFilterableTableSource.defaultSchema();
    TestLegacyFilterableTableSource.createTemporaryTable(tEnv, schema, "MySource", true, TestLegacyFilterableTableSource.defaultRows().toList(), TestLegacyFilterableTableSource.defaultFilterableFields());
    TableSink<Row> sink = util.createAppendTableSink(schema.getFieldNames(), schema.getTableColumns().stream().map(col -> col.getType().getLogicalType()).toArray(LogicalType[]::new));
    util.testingTableEnv().registerTableSinkInternal("MySink", sink);
    verifyInsert("insert into MySink select * from MySource");
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) LogicalType(org.apache.flink.table.types.logical.LogicalType) Row(org.apache.flink.types.Row) Test(org.junit.Test)

Example 64 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class MergeTableLikeUtilTest method mergeOverwritingComputedColumnWithMetadataColumn.

@Test
public void mergeOverwritingComputedColumnWithMetadataColumn() {
    TableSchema sourceSchema = TableSchema.builder().add(TableColumn.physical("one", DataTypes.INT())).add(TableColumn.computed("two", DataTypes.INT(), "one + 3")).build();
    List<SqlNode> derivedColumns = Collections.singletonList(metadataColumn("two", DataTypes.BOOLEAN(), false));
    Map<FeatureOption, MergingStrategy> mergingStrategies = getDefaultMergingStrategies();
    mergingStrategies.put(FeatureOption.METADATA, MergingStrategy.OVERWRITING);
    thrown.expect(ValidationException.class);
    thrown.expectMessage("A column named 'two' already exists in the base table." + " Metadata columns can only overwrite other metadata columns.");
    util.mergeTables(mergingStrategies, sourceSchema, derivedColumns, Collections.emptyList(), null);
}
Also used : FeatureOption(org.apache.flink.sql.parser.ddl.SqlTableLike.FeatureOption) TableSchema(org.apache.flink.table.api.TableSchema) MergingStrategy(org.apache.flink.sql.parser.ddl.SqlTableLike.MergingStrategy) SqlNode(org.apache.calcite.sql.SqlNode) Test(org.junit.Test)

Example 65 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class MergeTableLikeUtilTest method mergeWithIncludeFailsOnDuplicateRegularColumnAndMetadataColumn.

@Test
public void mergeWithIncludeFailsOnDuplicateRegularColumnAndMetadataColumn() {
    TableSchema sourceSchema = TableSchema.builder().add(TableColumn.physical("one", DataTypes.INT())).build();
    List<SqlNode> derivedColumns = Arrays.asList(metadataColumn("two", DataTypes.INT(), true), computedColumn("three", plus("two", "3")), regularColumn("two", DataTypes.INT()), regularColumn("four", DataTypes.STRING()));
    thrown.expect(ValidationException.class);
    thrown.expectMessage("A column named 'two' already exists in the table. " + "Duplicate columns exist in the metadata column and regular column. ");
    util.mergeTables(getDefaultMergingStrategies(), sourceSchema, derivedColumns, Collections.emptyList(), null);
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) SqlNode(org.apache.calcite.sql.SqlNode) Test(org.junit.Test)

Aggregations

TableSchema (org.apache.flink.table.api.TableSchema)86 Test (org.junit.Test)54 HashMap (java.util.HashMap)26 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)21 SqlNode (org.apache.calcite.sql.SqlNode)19 ObjectPath (org.apache.flink.table.catalog.ObjectPath)19 CatalogTable (org.apache.flink.table.catalog.CatalogTable)18 DataType (org.apache.flink.table.types.DataType)16 ValidationException (org.apache.flink.table.api.ValidationException)14 TableColumn (org.apache.flink.table.api.TableColumn)10 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)10 ArrayList (java.util.ArrayList)9 List (java.util.List)9 Map (java.util.Map)9 FeatureOption (org.apache.flink.sql.parser.ddl.SqlTableLike.FeatureOption)9 MergingStrategy (org.apache.flink.sql.parser.ddl.SqlTableLike.MergingStrategy)9 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)8 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)8 Arrays (java.util.Arrays)7 Configuration (org.apache.flink.configuration.Configuration)7