Search in sources :

Example 6 with TableConfig

use of org.apache.flink.table.api.TableConfig in project flink by apache.

the class PushLocalAggIntoTableSourceScanRuleTest method setup.

@Before
public void setup() {
    TableConfig tableConfig = util.tableEnv().getConfig();
    tableConfig.set(OptimizerConfigOptions.TABLE_OPTIMIZER_SOURCE_AGGREGATE_PUSHDOWN_ENABLED, true);
    String ddl = "CREATE TABLE inventory (\n" + "  id BIGINT,\n" + "  name STRING,\n" + "  amount BIGINT,\n" + "  price BIGINT,\n" + "  type STRING\n" + ") WITH (\n" + " 'connector' = 'values',\n" + " 'filterable-fields' = 'id;type',\n" + " 'bounded' = 'true'\n" + ")";
    util.tableEnv().executeSql(ddl);
    String ddl2 = "CREATE TABLE inventory_meta (\n" + "  id BIGINT,\n" + "  name STRING,\n" + "  amount BIGINT,\n" + "  price BIGINT,\n" + "  type STRING,\n" + "  metadata_1 BIGINT METADATA,\n" + "  metadata_2 STRING METADATA,\n" + "  PRIMARY KEY (`id`) NOT ENFORCED\n" + ") WITH (\n" + " 'connector' = 'values',\n" + " 'filterable-fields' = 'id;type',\n" + " 'readable-metadata' = 'metadata_1:BIGINT, metadata_2:STRING',\n" + " 'bounded' = 'true'\n" + ")";
    util.tableEnv().executeSql(ddl2);
    // partitioned table
    String ddl3 = "CREATE TABLE inventory_part (\n" + "  id BIGINT,\n" + "  name STRING,\n" + "  amount BIGINT,\n" + "  price BIGINT,\n" + "  type STRING\n" + ") PARTITIONED BY (type)\n" + "WITH (\n" + " 'connector' = 'values',\n" + " 'filterable-fields' = 'id;type',\n" + " 'partition-list' = 'type:a;type:b',\n" + " 'bounded' = 'true'\n" + ")";
    util.tableEnv().executeSql(ddl3);
    // disable projection push down
    String ddl4 = "CREATE TABLE inventory_no_proj (\n" + "  id BIGINT,\n" + "  name STRING,\n" + "  amount BIGINT,\n" + "  price BIGINT,\n" + "  type STRING\n" + ")\n" + "WITH (\n" + " 'connector' = 'values',\n" + " 'filterable-fields' = 'id;type',\n" + " 'enable-projection-push-down' = 'false',\n" + " 'bounded' = 'true'\n" + ")";
    util.tableEnv().executeSql(ddl4);
}
Also used : TableConfig(org.apache.flink.table.api.TableConfig) Before(org.junit.Before)

Example 7 with TableConfig

use of org.apache.flink.table.api.TableConfig in project flink by apache.

the class JsonSerdeTestUtil method configuredSerdeContext.

static SerdeContext configuredSerdeContext(Configuration configuration) {
    final TableConfig tableConfig = TableConfig.getDefault();
    tableConfig.addConfiguration(configuration);
    return configuredSerdeContext(CatalogManagerMocks.createEmptyCatalogManager(), configuration);
}
Also used : TableConfig(org.apache.flink.table.api.TableConfig)

Example 8 with TableConfig

use of org.apache.flink.table.api.TableConfig in project flink by apache.

the class JsonSerdeTestUtil method configuredSerdeContext.

static SerdeContext configuredSerdeContext(CatalogManager catalogManager, Configuration configuration) {
    final TableConfig tableConfig = TableConfig.getDefault();
    tableConfig.addConfiguration(configuration);
    return configuredSerdeContext(catalogManager, tableConfig);
}
Also used : TableConfig(org.apache.flink.table.api.TableConfig)

Example 9 with TableConfig

use of org.apache.flink.table.api.TableConfig in project flink by apache.

the class ExecutionContext method createTableEnvironment.

// ------------------------------------------------------------------------------------------------------------------
// Helper to create Table Environment
// ------------------------------------------------------------------------------------------------------------------
private StreamTableEnvironment createTableEnvironment() {
    // checks the value of RUNTIME_MODE
    EnvironmentSettings settings = EnvironmentSettings.fromConfiguration(flinkConfig);
    if (!settings.isBlinkPlanner()) {
        throw new TableException("The old planner is not supported anymore. Please update to new default planner.");
    }
    TableConfig tableConfig = new TableConfig();
    tableConfig.addConfiguration(flinkConfig);
    StreamExecutionEnvironment streamExecEnv = createStreamExecutionEnvironment();
    final Executor executor = lookupExecutor(settings.getExecutor(), streamExecEnv);
    return createStreamTableEnvironment(streamExecEnv, settings, tableConfig, executor, sessionState.catalogManager, sessionState.moduleManager, sessionState.functionCatalog, classLoader);
}
Also used : EnvironmentSettings(org.apache.flink.table.api.EnvironmentSettings) TableException(org.apache.flink.table.api.TableException) Executor(org.apache.flink.table.delegation.Executor) TableConfig(org.apache.flink.table.api.TableConfig) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)

Example 10 with TableConfig

use of org.apache.flink.table.api.TableConfig in project flink by apache.

the class LogicalTypeJsonSerdeTest method testIdentifierSerde.

@Test
public void testIdentifierSerde() throws IOException {
    final DataTypeFactoryMock dataTypeFactoryMock = new DataTypeFactoryMock();
    final TableConfig tableConfig = TableConfig.getDefault();
    final Configuration config = tableConfig.getConfiguration();
    final CatalogManager catalogManager = preparedCatalogManager().dataTypeFactory(dataTypeFactoryMock).build();
    final SerdeContext serdeContext = configuredSerdeContext(catalogManager, tableConfig);
    // minimal plan content
    config.set(TableConfigOptions.PLAN_COMPILE_CATALOG_OBJECTS, IDENTIFIER);
    final String minimalJson = toJson(serdeContext, STRUCTURED_TYPE);
    assertThat(minimalJson).isEqualTo("\"`default_catalog`.`default_database`.`MyType`\"");
    // catalog lookup with miss
    config.set(TableConfigOptions.PLAN_RESTORE_CATALOG_OBJECTS, TableConfigOptions.CatalogPlanRestore.IDENTIFIER);
    dataTypeFactoryMock.logicalType = Optional.empty();
    assertThatThrownBy(() -> toObject(serdeContext, minimalJson, LogicalType.class)).satisfies(anyCauseMatches(ValidationException.class, "No type found."));
    // catalog lookup
    config.set(TableConfigOptions.PLAN_RESTORE_CATALOG_OBJECTS, TableConfigOptions.CatalogPlanRestore.IDENTIFIER);
    dataTypeFactoryMock.logicalType = Optional.of(STRUCTURED_TYPE);
    assertThat(toObject(serdeContext, minimalJson, LogicalType.class)).isEqualTo(STRUCTURED_TYPE);
    // maximum plan content
    config.set(TableConfigOptions.PLAN_COMPILE_CATALOG_OBJECTS, ALL);
    final String maximumJson = toJson(serdeContext, STRUCTURED_TYPE);
    final ObjectMapper mapper = new ObjectMapper();
    final JsonNode maximumJsonNode = mapper.readTree(maximumJson);
    assertThat(maximumJsonNode.get(LogicalTypeJsonSerializer.FIELD_NAME_ATTRIBUTES)).isNotNull();
    assertThat(maximumJsonNode.get(LogicalTypeJsonSerializer.FIELD_NAME_DESCRIPTION).asText()).isEqualTo("My original type.");
    // catalog lookup with miss
    config.set(TableConfigOptions.PLAN_RESTORE_CATALOG_OBJECTS, TableConfigOptions.CatalogPlanRestore.IDENTIFIER);
    dataTypeFactoryMock.logicalType = Optional.empty();
    assertThatThrownBy(() -> toObject(serdeContext, maximumJson, LogicalType.class)).satisfies(anyCauseMatches(ValidationException.class, "No type found."));
    // catalog lookup
    config.set(TableConfigOptions.PLAN_RESTORE_CATALOG_OBJECTS, TableConfigOptions.CatalogPlanRestore.IDENTIFIER);
    dataTypeFactoryMock.logicalType = Optional.of(UPDATED_STRUCTURED_TYPE);
    assertThat(toObject(serdeContext, maximumJson, LogicalType.class)).isEqualTo(UPDATED_STRUCTURED_TYPE);
    // no lookup
    config.set(TableConfigOptions.PLAN_RESTORE_CATALOG_OBJECTS, TableConfigOptions.CatalogPlanRestore.ALL);
    dataTypeFactoryMock.logicalType = Optional.of(UPDATED_STRUCTURED_TYPE);
    assertThat(toObject(serdeContext, maximumJson, LogicalType.class)).isEqualTo(STRUCTURED_TYPE);
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) Configuration(org.apache.flink.configuration.Configuration) JsonSerdeTestUtil.configuredSerdeContext(org.apache.flink.table.planner.plan.nodes.exec.serde.JsonSerdeTestUtil.configuredSerdeContext) DataTypeFactoryMock(org.apache.flink.table.types.utils.DataTypeFactoryMock) TableConfig(org.apache.flink.table.api.TableConfig) JsonNode(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode) CatalogManager(org.apache.flink.table.catalog.CatalogManager) CatalogManagerMocks.preparedCatalogManager(org.apache.flink.table.utils.CatalogManagerMocks.preparedCatalogManager) ObjectMapper(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper) Test(org.junit.jupiter.api.Test) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest)

Aggregations

TableConfig (org.apache.flink.table.api.TableConfig)41 RowType (org.apache.flink.table.types.logical.RowType)19 Test (org.junit.Test)10 ArrayList (java.util.ArrayList)6 RexNode (org.apache.calcite.rex.RexNode)6 CatalogManager (org.apache.flink.table.catalog.CatalogManager)6 IOException (java.io.IOException)5 OutputStream (java.io.OutputStream)5 PrintStream (java.io.PrintStream)5 Arrays (java.util.Arrays)5 Collections (java.util.Collections)5 List (java.util.List)5 RexBuilder (org.apache.calcite.rex.RexBuilder)5 RexInputRef (org.apache.calcite.rex.RexInputRef)5 Table (org.apache.flink.table.api.Table)5 FlinkTypeFactory (org.apache.flink.table.planner.calcite.FlinkTypeFactory)5 IntType (org.apache.flink.table.types.logical.IntType)5 Row (org.apache.flink.types.Row)5 Random (java.util.Random)4 Consumer (java.util.function.Consumer)4