Search in sources :

Example 16 with CatalogManager

use of org.apache.flink.table.catalog.CatalogManager in project flink by apache.

the class AbstractStreamTableEnvironmentImpl method fromStreamInternal.

protected <T> Table fromStreamInternal(DataStream<T> dataStream, @Nullable Schema schema, @Nullable String viewPath, ChangelogMode changelogMode) {
    Preconditions.checkNotNull(dataStream, "Data stream must not be null.");
    Preconditions.checkNotNull(changelogMode, "Changelog mode must not be null.");
    if (dataStream.getExecutionEnvironment() != executionEnvironment) {
        throw new ValidationException("The DataStream's StreamExecutionEnvironment must be identical to the one that " + "has been passed to the StreamTableEnvironment during instantiation.");
    }
    final CatalogManager catalogManager = getCatalogManager();
    final OperationTreeBuilder operationTreeBuilder = getOperationTreeBuilder();
    final SchemaTranslator.ConsumingResult schemaTranslationResult = SchemaTranslator.createConsumingResult(catalogManager.getDataTypeFactory(), dataStream.getType(), schema);
    final ResolvedCatalogTable resolvedCatalogTable = catalogManager.resolveCatalogTable(new ExternalCatalogTable(schemaTranslationResult.getSchema()));
    final ContextResolvedTable contextResolvedTable;
    if (viewPath != null) {
        UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(viewPath);
        final ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
        contextResolvedTable = ContextResolvedTable.temporary(objectIdentifier, resolvedCatalogTable);
    } else {
        contextResolvedTable = ContextResolvedTable.anonymous("datastream_source", resolvedCatalogTable);
    }
    final QueryOperation scanOperation = new ExternalQueryOperation<>(contextResolvedTable, dataStream, schemaTranslationResult.getPhysicalDataType(), schemaTranslationResult.isTopLevelRecord(), changelogMode);
    final List<String> projections = schemaTranslationResult.getProjections();
    if (projections == null) {
        return createTable(scanOperation);
    }
    final QueryOperation projectOperation = operationTreeBuilder.project(projections.stream().map(ApiExpressionUtils::unresolvedRef).collect(Collectors.toList()), scanOperation);
    return createTable(projectOperation);
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) ExternalCatalogTable(org.apache.flink.table.catalog.ExternalCatalogTable) UnresolvedIdentifier(org.apache.flink.table.catalog.UnresolvedIdentifier) ApiExpressionUtils(org.apache.flink.table.expressions.ApiExpressionUtils) CatalogManager(org.apache.flink.table.catalog.CatalogManager) SchemaTranslator(org.apache.flink.table.catalog.SchemaTranslator) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) OperationTreeBuilder(org.apache.flink.table.operations.utils.OperationTreeBuilder) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) ExternalQueryOperation(org.apache.flink.table.operations.ExternalQueryOperation) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) QueryOperation(org.apache.flink.table.operations.QueryOperation) DataStreamQueryOperation(org.apache.flink.table.operations.DataStreamQueryOperation) ExternalQueryOperation(org.apache.flink.table.operations.ExternalQueryOperation)

Example 17 with CatalogManager

use of org.apache.flink.table.catalog.CatalogManager in project flink by apache.

the class TableEnvironmentImpl method create.

private static TableEnvironmentImpl create(EnvironmentSettings settings, Configuration configuration) {
    // temporary solution until FLINK-15635 is fixed
    final ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
    // use configuration to init table config
    final TableConfig tableConfig = new TableConfig();
    tableConfig.addConfiguration(configuration);
    final ModuleManager moduleManager = new ModuleManager();
    final CatalogManager catalogManager = CatalogManager.newBuilder().classLoader(classLoader).config(tableConfig.getConfiguration()).defaultCatalog(settings.getBuiltInCatalogName(), new GenericInMemoryCatalog(settings.getBuiltInCatalogName(), settings.getBuiltInDatabaseName())).build();
    final FunctionCatalog functionCatalog = new FunctionCatalog(tableConfig, catalogManager, moduleManager);
    final ExecutorFactory executorFactory = FactoryUtil.discoverFactory(classLoader, ExecutorFactory.class, settings.getExecutor());
    final Executor executor = executorFactory.create(configuration);
    final Planner planner = PlannerFactoryUtil.createPlanner(settings.getPlanner(), executor, tableConfig, moduleManager, catalogManager, functionCatalog);
    return new TableEnvironmentImpl(catalogManager, moduleManager, tableConfig, executor, functionCatalog, planner, settings.isStreamingMode(), classLoader);
}
Also used : FunctionCatalog(org.apache.flink.table.catalog.FunctionCatalog) Executor(org.apache.flink.table.delegation.Executor) ExecutorFactory(org.apache.flink.table.delegation.ExecutorFactory) TableConfig(org.apache.flink.table.api.TableConfig) Planner(org.apache.flink.table.delegation.Planner) ModuleManager(org.apache.flink.table.module.ModuleManager) CatalogManager(org.apache.flink.table.catalog.CatalogManager) GenericInMemoryCatalog(org.apache.flink.table.catalog.GenericInMemoryCatalog)

Example 18 with CatalogManager

use of org.apache.flink.table.catalog.CatalogManager in project flink by apache.

the class StreamTableEnvironmentImpl method create.

public static StreamTableEnvironment create(StreamExecutionEnvironment executionEnvironment, EnvironmentSettings settings, TableConfig tableConfig) {
    // temporary solution until FLINK-15635 is fixed
    final ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
    final ModuleManager moduleManager = new ModuleManager();
    final CatalogManager catalogManager = CatalogManager.newBuilder().classLoader(classLoader).config(tableConfig.getConfiguration()).defaultCatalog(settings.getBuiltInCatalogName(), new GenericInMemoryCatalog(settings.getBuiltInCatalogName(), settings.getBuiltInDatabaseName())).executionConfig(executionEnvironment.getConfig()).build();
    final FunctionCatalog functionCatalog = new FunctionCatalog(tableConfig, catalogManager, moduleManager);
    final Executor executor = lookupExecutor(classLoader, settings.getExecutor(), executionEnvironment);
    final Planner planner = PlannerFactoryUtil.createPlanner(settings.getPlanner(), executor, tableConfig, moduleManager, catalogManager, functionCatalog);
    return new StreamTableEnvironmentImpl(catalogManager, moduleManager, functionCatalog, tableConfig, executionEnvironment, planner, executor, settings.isStreamingMode(), classLoader);
}
Also used : FunctionCatalog(org.apache.flink.table.catalog.FunctionCatalog) Executor(org.apache.flink.table.delegation.Executor) Planner(org.apache.flink.table.delegation.Planner) ModuleManager(org.apache.flink.table.module.ModuleManager) AbstractStreamTableEnvironmentImpl(org.apache.flink.table.api.bridge.internal.AbstractStreamTableEnvironmentImpl) CatalogManager(org.apache.flink.table.catalog.CatalogManager) GenericInMemoryCatalog(org.apache.flink.table.catalog.GenericInMemoryCatalog)

Example 19 with CatalogManager

use of org.apache.flink.table.catalog.CatalogManager in project flink by apache.

the class SqlToOperationConverterTest method testCreateTableWithFullDataTypes.

// TODO: tweak the tests when FLINK-13604 is fixed.
@Test
public void testCreateTableWithFullDataTypes() {
    final List<TestItem> testItems = Arrays.asList(createTestItem("CHAR", DataTypes.CHAR(1)), createTestItem("CHAR NOT NULL", DataTypes.CHAR(1).notNull()), createTestItem("CHAR NULL", DataTypes.CHAR(1)), createTestItem("CHAR(33)", DataTypes.CHAR(33)), createTestItem("VARCHAR", DataTypes.STRING()), createTestItem("VARCHAR(33)", DataTypes.VARCHAR(33)), createTestItem("STRING", DataTypes.STRING()), createTestItem("BOOLEAN", DataTypes.BOOLEAN()), createTestItem("BINARY", DataTypes.BINARY(1)), createTestItem("BINARY(33)", DataTypes.BINARY(33)), createTestItem("VARBINARY", DataTypes.BYTES()), createTestItem("VARBINARY(33)", DataTypes.VARBINARY(33)), createTestItem("BYTES", DataTypes.BYTES()), createTestItem("DECIMAL", DataTypes.DECIMAL(10, 0)), createTestItem("DEC", DataTypes.DECIMAL(10, 0)), createTestItem("NUMERIC", DataTypes.DECIMAL(10, 0)), createTestItem("DECIMAL(10)", DataTypes.DECIMAL(10, 0)), createTestItem("DEC(10)", DataTypes.DECIMAL(10, 0)), createTestItem("NUMERIC(10)", DataTypes.DECIMAL(10, 0)), createTestItem("DECIMAL(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("DEC(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("NUMERIC(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("TINYINT", DataTypes.TINYINT()), createTestItem("SMALLINT", DataTypes.SMALLINT()), createTestItem("INTEGER", DataTypes.INT()), createTestItem("INT", DataTypes.INT()), createTestItem("BIGINT", DataTypes.BIGINT()), createTestItem("FLOAT", DataTypes.FLOAT()), createTestItem("DOUBLE", DataTypes.DOUBLE()), createTestItem("DOUBLE PRECISION", DataTypes.DOUBLE()), createTestItem("DATE", DataTypes.DATE()), createTestItem("TIME", DataTypes.TIME()), createTestItem("TIME WITHOUT TIME ZONE", DataTypes.TIME()), // Expect to be TIME(3).
    createTestItem("TIME(3)", DataTypes.TIME()), // Expect to be TIME(3).
    createTestItem("TIME(3) WITHOUT TIME ZONE", DataTypes.TIME()), createTestItem("TIMESTAMP", DataTypes.TIMESTAMP(6)), createTestItem("TIMESTAMP WITHOUT TIME ZONE", DataTypes.TIMESTAMP(6)), createTestItem("TIMESTAMP(3)", DataTypes.TIMESTAMP(3)), createTestItem("TIMESTAMP(3) WITHOUT TIME ZONE", DataTypes.TIMESTAMP(3)), createTestItem("TIMESTAMP WITH LOCAL TIME ZONE", DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(6)), createTestItem("TIMESTAMP(3) WITH LOCAL TIME ZONE", DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3)), createTestItem("ARRAY<TIMESTAMP(3) WITH LOCAL TIME ZONE>", DataTypes.ARRAY(DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3))), createTestItem("ARRAY<INT NOT NULL>", DataTypes.ARRAY(DataTypes.INT().notNull())), createTestItem("INT ARRAY", DataTypes.ARRAY(DataTypes.INT())), createTestItem("INT NOT NULL ARRAY", DataTypes.ARRAY(DataTypes.INT().notNull())), createTestItem("INT ARRAY NOT NULL", DataTypes.ARRAY(DataTypes.INT()).notNull()), createTestItem("MULTISET<INT NOT NULL>", DataTypes.MULTISET(DataTypes.INT().notNull())), createTestItem("INT MULTISET", DataTypes.MULTISET(DataTypes.INT())), createTestItem("INT NOT NULL MULTISET", DataTypes.MULTISET(DataTypes.INT().notNull())), createTestItem("INT MULTISET NOT NULL", DataTypes.MULTISET(DataTypes.INT()).notNull()), createTestItem("MAP<BIGINT, BOOLEAN>", DataTypes.MAP(DataTypes.BIGINT(), DataTypes.BOOLEAN())), // Expect to be ROW<`f0` INT NOT NULL, `f1` BOOLEAN>.
    createTestItem("ROW<f0 INT NOT NULL, f1 BOOLEAN>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), // Expect to be ROW<`f0` INT NOT NULL, `f1` BOOLEAN>.
    createTestItem("ROW(f0 INT NOT NULL, f1 BOOLEAN)", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), createTestItem("ROW<`f0` INT>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()))), createTestItem("ROW(`f0` INT)", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()))), createTestItem("ROW<>", DataTypes.ROW()), createTestItem("ROW()", DataTypes.ROW()), // Expect to be ROW<`f0` INT NOT NULL '...', `f1` BOOLEAN '...'>.
    createTestItem("ROW<f0 INT NOT NULL 'This is a comment.'," + " f1 BOOLEAN 'This as well.'>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), createTestItem("ARRAY<ROW<f0 INT, f1 BOOLEAN>>", DataTypes.ARRAY(DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem("ROW<f0 INT, f1 BOOLEAN> MULTISET", DataTypes.MULTISET(DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem("MULTISET<ROW<f0 INT, f1 BOOLEAN>>", DataTypes.MULTISET(DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem("ROW<f0 Row<f00 INT, f01 BOOLEAN>, " + "f1 INT ARRAY, " + "f2 BOOLEAN MULTISET>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.ROW(DataTypes.FIELD("f00", DataTypes.INT()), DataTypes.FIELD("f01", DataTypes.BOOLEAN()))), DataTypes.FIELD("f1", DataTypes.ARRAY(DataTypes.INT())), DataTypes.FIELD("f2", DataTypes.MULTISET(DataTypes.BOOLEAN())))));
    StringBuilder buffer = new StringBuilder("create table t1(\n");
    for (int i = 0; i < testItems.size(); i++) {
        buffer.append("f").append(i).append(" ").append(testItems.get(i).testExpr);
        if (i == testItems.size() - 1) {
            buffer.append(")");
        } else {
            buffer.append(",\n");
        }
    }
    final String sql = buffer.toString();
    final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
    final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
    SqlNode node = parser.parse(sql);
    assertThat(node).isInstanceOf(SqlCreateTable.class);
    Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get();
    TableSchema schema = ((CreateTableOperation) operation).getCatalogTable().getSchema();
    Object[] expectedDataTypes = testItems.stream().map(item -> item.expectedType).toArray();
    assertThat(schema.getFieldDataTypes()).isEqualTo(expectedDataTypes);
}
Also used : FunctionAlreadyExistException(org.apache.flink.table.catalog.exceptions.FunctionAlreadyExistException) DataType(org.apache.flink.table.types.DataType) Arrays(java.util.Arrays) FlinkPlannerImpl(org.apache.flink.table.planner.calcite.FlinkPlannerImpl) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ExplainDetail(org.apache.flink.table.api.ExplainDetail) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) Map(java.util.Map) SqlCreateTable(org.apache.flink.sql.parser.ddl.SqlCreateTable) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) ModuleManager(org.apache.flink.table.module.ModuleManager) TableConfig(org.apache.flink.table.api.TableConfig) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) Set(java.util.Set) TableSchema(org.apache.flink.table.api.TableSchema) JavaUserDefinedScalarFunctions(org.apache.flink.table.planner.runtime.utils.JavaUserDefinedScalarFunctions) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) ExpressionResolverMocks(org.apache.flink.table.utils.ExpressionResolverMocks) CatalogFunction(org.apache.flink.table.catalog.CatalogFunction) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl) ParserImpl(org.apache.flink.table.planner.delegation.ParserImpl) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) ShowJarsOperation(org.apache.flink.table.operations.command.ShowJarsOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) Func8$(org.apache.flink.table.planner.expressions.utils.Func8$) CatalogFunctionImpl(org.apache.flink.table.catalog.CatalogFunctionImpl) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException) EndStatementSetOperation(org.apache.flink.table.operations.EndStatementSetOperation) Func0$(org.apache.flink.table.planner.expressions.utils.Func0$) Supplier(java.util.function.Supplier) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) TableColumn(org.apache.flink.table.api.TableColumn) Catalog(org.apache.flink.table.catalog.Catalog) TableAlreadyExistException(org.apache.flink.table.catalog.exceptions.TableAlreadyExistException) TestManagedTableFactory(org.apache.flink.table.factories.TestManagedTableFactory) Nullable(javax.annotation.Nullable) Before(org.junit.Before) OperationMatchers.withSchema(org.apache.flink.table.planner.utils.OperationMatchers.withSchema) FunctionScope(org.apache.flink.table.operations.ShowFunctionsOperation.FunctionScope) Func1$(org.apache.flink.table.planner.expressions.utils.Func1$) Test(org.junit.Test) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) TreeMap(java.util.TreeMap) CatalogManagerMocks(org.apache.flink.table.utils.CatalogManagerMocks) CatalogManagerCalciteSchema(org.apache.flink.table.planner.catalog.CatalogManagerCalciteSchema) Schema(org.apache.flink.table.api.Schema) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) OperationMatchers.partitionedBy(org.apache.flink.table.planner.utils.OperationMatchers.partitionedBy) CalciteParser(org.apache.flink.table.planner.parse.CalciteParser) SqlNode(org.apache.calcite.sql.SqlNode) SetOperation(org.apache.flink.table.operations.command.SetOperation) After(org.junit.After) ExecutionOptions(org.apache.flink.configuration.ExecutionOptions) ComputedColumn(org.apache.flink.table.api.TableColumn.ComputedColumn) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) Operation(org.apache.flink.table.operations.Operation) InstanceOfAssertFactories.type(org.assertj.core.api.InstanceOfAssertFactories.type) Collectors(java.util.stream.Collectors) CalciteSchemaBuilder.asRootSchema(org.apache.calcite.jdbc.CalciteSchemaBuilder.asRootSchema) List(java.util.List) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) SqlRichExplain(org.apache.flink.sql.parser.dql.SqlRichExplain) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) ValidationException(org.apache.flink.table.api.ValidationException) GenericInMemoryCatalog(org.apache.flink.table.catalog.GenericInMemoryCatalog) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) RemoveJarOperation(org.apache.flink.table.operations.command.RemoveJarOperation) CatalogManager(org.apache.flink.table.catalog.CatalogManager) HamcrestCondition(org.assertj.core.api.HamcrestCondition) OperationMatchers.entry(org.apache.flink.table.planner.utils.OperationMatchers.entry) BeginStatementSetOperation(org.apache.flink.table.operations.BeginStatementSetOperation) HashMap(java.util.HashMap) FunctionCatalog(org.apache.flink.table.catalog.FunctionCatalog) AddJarOperation(org.apache.flink.table.operations.command.AddJarOperation) ObjectPath(org.apache.flink.table.catalog.ObjectPath) AtomicReference(java.util.concurrent.atomic.AtomicReference) OperationMatchers.withOptions(org.apache.flink.table.planner.utils.OperationMatchers.withOptions) PlannerContext(org.apache.flink.table.planner.delegation.PlannerContext) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) HashSet(java.util.HashSet) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) Assertions.assertThatThrownBy(org.assertj.core.api.Assertions.assertThatThrownBy) ResetOperation(org.apache.flink.table.operations.command.ResetOperation) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException) Configuration(org.apache.flink.configuration.Configuration) Parser(org.apache.flink.table.delegation.Parser) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) DataTypes(org.apache.flink.table.api.DataTypes) SqlDialect(org.apache.flink.table.api.SqlDialect) RuntimeExecutionMode(org.apache.flink.api.common.RuntimeExecutionMode) Collections(java.util.Collections) TableSchema(org.apache.flink.table.api.TableSchema) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) ShowJarsOperation(org.apache.flink.table.operations.command.ShowJarsOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) EndStatementSetOperation(org.apache.flink.table.operations.EndStatementSetOperation) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) SetOperation(org.apache.flink.table.operations.command.SetOperation) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) Operation(org.apache.flink.table.operations.Operation) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) RemoveJarOperation(org.apache.flink.table.operations.command.RemoveJarOperation) BeginStatementSetOperation(org.apache.flink.table.operations.BeginStatementSetOperation) AddJarOperation(org.apache.flink.table.operations.command.AddJarOperation) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) ResetOperation(org.apache.flink.table.operations.command.ResetOperation) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) FlinkPlannerImpl(org.apache.flink.table.planner.calcite.FlinkPlannerImpl) CalciteParser(org.apache.flink.table.planner.parse.CalciteParser) SqlNode(org.apache.calcite.sql.SqlNode) Test(org.junit.Test)

Example 20 with CatalogManager

use of org.apache.flink.table.catalog.CatalogManager in project flink by apache.

the class DynamicTableSourceSpecSerdeTest method testDynamicTableSourceSpecSerdeWithEnrichmentOptions.

@Test
void testDynamicTableSourceSpecSerdeWithEnrichmentOptions() throws Exception {
    // Test model
    ObjectIdentifier identifier = ObjectIdentifier.of(DEFAULT_BUILTIN_CATALOG, DEFAULT_BUILTIN_DATABASE, "my_table");
    String formatPrefix = FactoryUtil.getFormatPrefix(FORMAT, TestFormatFactory.IDENTIFIER);
    Map<String, String> planOptions = new HashMap<>();
    planOptions.put(CONNECTOR.key(), TestDynamicTableFactory.IDENTIFIER);
    planOptions.put(TARGET.key(), "abc");
    planOptions.put(PASSWORD.key(), "abc");
    planOptions.put(FORMAT.key(), TestFormatFactory.IDENTIFIER);
    planOptions.put(formatPrefix + DELIMITER.key(), "|");
    Map<String, String> catalogOptions = new HashMap<>();
    catalogOptions.put(CONNECTOR.key(), TestDynamicTableFactory.IDENTIFIER);
    catalogOptions.put(TARGET.key(), "abc");
    catalogOptions.put(PASSWORD.key(), "xyz");
    catalogOptions.put(FORMAT.key(), TestFormatFactory.IDENTIFIER);
    catalogOptions.put(formatPrefix + DELIMITER.key(), ",");
    ResolvedCatalogTable planResolvedCatalogTable = tableWithOnlyPhysicalColumns(planOptions);
    ResolvedCatalogTable catalogResolvedCatalogTable = tableWithOnlyPhysicalColumns(catalogOptions);
    // Create planner mocks
    PlannerMocks plannerMocks = PlannerMocks.create(new Configuration().set(PLAN_RESTORE_CATALOG_OBJECTS, CatalogPlanRestore.ALL).set(PLAN_COMPILE_CATALOG_OBJECTS, CatalogPlanCompilation.ALL));
    CatalogManager catalogManager = plannerMocks.getCatalogManager();
    catalogManager.createTable(catalogResolvedCatalogTable, identifier, false);
    // Mock the context
    SerdeContext serdeCtx = configuredSerdeContext(catalogManager, plannerMocks.getTableConfig());
    DynamicTableSourceSpec planSpec = new DynamicTableSourceSpec(ContextResolvedTable.permanent(identifier, catalogManager.getCatalog(catalogManager.getCurrentCatalog()).get(), planResolvedCatalogTable), Collections.emptyList());
    String actualJson = toJson(serdeCtx, planSpec);
    DynamicTableSourceSpec actual = toObject(serdeCtx, actualJson, DynamicTableSourceSpec.class);
    assertThat(actual.getContextResolvedTable()).isEqualTo(planSpec.getContextResolvedTable());
    assertThat(actual.getSourceAbilities()).isNull();
    TestDynamicTableFactory.DynamicTableSourceMock dynamicTableSource = (TestDynamicTableFactory.DynamicTableSourceMock) actual.getScanTableSource(plannerMocks.getPlannerContext().getFlinkContext());
    assertThat(dynamicTableSource.password).isEqualTo("xyz");
    assertThat(((TestFormatFactory.DecodingFormatMock) dynamicTableSource.valueFormat).delimiter).isEqualTo(",");
}
Also used : Configuration(org.apache.flink.configuration.Configuration) HashMap(java.util.HashMap) JsonSerdeTestUtil.configuredSerdeContext(org.apache.flink.table.planner.plan.nodes.exec.serde.JsonSerdeTestUtil.configuredSerdeContext) DynamicTableSourceSpec(org.apache.flink.table.planner.plan.nodes.exec.spec.DynamicTableSourceSpec) TestDynamicTableFactory(org.apache.flink.table.factories.TestDynamicTableFactory) CatalogManager(org.apache.flink.table.catalog.CatalogManager) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) PlannerMocks(org.apache.flink.table.planner.utils.PlannerMocks) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) Test(org.junit.jupiter.api.Test) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest)

Aggregations

CatalogManager (org.apache.flink.table.catalog.CatalogManager)26 TableConfig (org.apache.flink.table.api.TableConfig)13 ModuleManager (org.apache.flink.table.module.ModuleManager)13 FunctionCatalog (org.apache.flink.table.catalog.FunctionCatalog)12 Executor (org.apache.flink.table.delegation.Executor)8 Planner (org.apache.flink.table.delegation.Planner)8 StreamExecutionEnvironment (org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)6 TableException (org.apache.flink.table.api.TableException)6 GenericInMemoryCatalog (org.apache.flink.table.catalog.GenericInMemoryCatalog)6 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)6 ParameterizedTest (org.junit.jupiter.params.ParameterizedTest)6 Constructor (java.lang.reflect.Constructor)5 Configuration (org.apache.flink.configuration.Configuration)5 TableEnvironment (org.apache.flink.table.api.TableEnvironment)5 ValidationException (org.apache.flink.table.api.ValidationException)5 JsonSerdeTestUtil.configuredSerdeContext (org.apache.flink.table.planner.plan.nodes.exec.serde.JsonSerdeTestUtil.configuredSerdeContext)5 HashMap (java.util.HashMap)4 CatalogTable (org.apache.flink.table.catalog.CatalogTable)4 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)4 ResolvedCatalogTable (org.apache.flink.table.catalog.ResolvedCatalogTable)4