Search in sources :

Example 26 with ReadableConfig

use of org.apache.flink.configuration.ReadableConfig in project flink by apache.

the class CommonExecSink method applyUpsertMaterialize.

private Transformation<RowData> applyUpsertMaterialize(Transformation<RowData> inputTransform, int[] primaryKeys, int sinkParallelism, ReadableConfig config, RowType physicalRowType) {
    GeneratedRecordEqualiser equaliser = new EqualiserCodeGenerator(physicalRowType).generateRecordEqualiser("SinkMaterializeEqualiser");
    SinkUpsertMaterializer operator = new SinkUpsertMaterializer(StateConfigUtil.createTtlConfig(config.get(ExecutionConfigOptions.IDLE_STATE_RETENTION).toMillis()), InternalSerializers.create(physicalRowType), equaliser);
    final String[] fieldNames = physicalRowType.getFieldNames().toArray(new String[0]);
    final List<String> pkFieldNames = Arrays.stream(primaryKeys).mapToObj(idx -> fieldNames[idx]).collect(Collectors.toList());
    OneInputTransformation<RowData, RowData> materializeTransform = ExecNodeUtil.createOneInputTransformation(inputTransform, createTransformationMeta(UPSERT_MATERIALIZE_TRANSFORMATION, String.format("SinkMaterializer(pk=[%s])", String.join(", ", pkFieldNames)), "SinkMaterializer", config), operator, inputTransform.getOutputType(), sinkParallelism);
    RowDataKeySelector keySelector = KeySelectorUtil.getRowDataSelector(primaryKeys, InternalTypeInfo.of(physicalRowType));
    materializeTransform.setStateKeySelector(keySelector);
    materializeTransform.setStateKeyType(keySelector.getProducedType());
    return materializeTransform;
}
Also used : TransformationMetadata(org.apache.flink.table.planner.plan.nodes.exec.utils.TransformationMetadata) Arrays(java.util.Arrays) InputProperty(org.apache.flink.table.planner.plan.nodes.exec.InputProperty) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) ExecNode(org.apache.flink.table.planner.plan.nodes.exec.ExecNode) CharType(org.apache.flink.table.types.logical.CharType) KeySelectorUtil(org.apache.flink.table.planner.plan.utils.KeySelectorUtil) InternalSerializers(org.apache.flink.table.runtime.typeutils.InternalSerializers) ConstraintEnforcer(org.apache.flink.table.runtime.operators.sink.ConstraintEnforcer) OutputFormat(org.apache.flink.api.common.io.OutputFormat) SinkUpsertMaterializer(org.apache.flink.table.runtime.operators.sink.SinkUpsertMaterializer) PartitionTransformation(org.apache.flink.streaming.api.transformations.PartitionTransformation) SinkFunction(org.apache.flink.streaming.api.functions.sink.SinkFunction) DynamicTableSink(org.apache.flink.table.connector.sink.DynamicTableSink) StateConfigUtil(org.apache.flink.table.runtime.util.StateConfigUtil) RowDataKeySelector(org.apache.flink.table.runtime.keyselector.RowDataKeySelector) Collectors(java.util.stream.Collectors) JsonProperty(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonProperty) SimpleOperatorFactory(org.apache.flink.streaming.api.operators.SimpleOperatorFactory) List(java.util.List) InternalTypeInfo(org.apache.flink.table.runtime.typeutils.InternalTypeInfo) LogicalType(org.apache.flink.table.types.logical.LogicalType) DataStreamSinkProvider(org.apache.flink.table.connector.sink.DataStreamSinkProvider) LegacySinkTransformation(org.apache.flink.streaming.api.transformations.LegacySinkTransformation) Optional(java.util.Optional) ExecNodeBase(org.apache.flink.table.planner.plan.nodes.exec.ExecNodeBase) KeyGroupRangeAssignment(org.apache.flink.runtime.state.KeyGroupRangeAssignment) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) SinkRuntimeProvider(org.apache.flink.table.connector.sink.DynamicTableSink.SinkRuntimeProvider) IntStream(java.util.stream.IntStream) BinaryType(org.apache.flink.table.types.logical.BinaryType) ParallelismProvider(org.apache.flink.table.connector.ParallelismProvider) ChangelogMode(org.apache.flink.table.connector.ChangelogMode) MultipleTransformationTranslator(org.apache.flink.table.planner.plan.nodes.exec.MultipleTransformationTranslator) StreamRecordTimestampInserter(org.apache.flink.table.runtime.operators.sink.StreamRecordTimestampInserter) TransformationSinkProvider(org.apache.flink.table.planner.connectors.TransformationSinkProvider) RowType(org.apache.flink.table.types.logical.RowType) ArrayList(java.util.ArrayList) SinkV2Provider(org.apache.flink.table.connector.sink.SinkV2Provider) OutputFormatSinkFunction(org.apache.flink.streaming.api.functions.sink.OutputFormatSinkFunction) DynamicTableSinkSpec(org.apache.flink.table.planner.plan.nodes.exec.spec.DynamicTableSinkSpec) ExecNodeUtil(org.apache.flink.table.planner.plan.nodes.exec.utils.ExecNodeUtil) ReadableConfig(org.apache.flink.configuration.ReadableConfig) SinkFunctionProvider(org.apache.flink.table.connector.sink.SinkFunctionProvider) DataStreamSink(org.apache.flink.streaming.api.datastream.DataStreamSink) ExecNodeContext(org.apache.flink.table.planner.plan.nodes.exec.ExecNodeContext) RowData(org.apache.flink.table.data.RowData) ProviderContext(org.apache.flink.table.connector.ProviderContext) SinkOperator(org.apache.flink.table.runtime.operators.sink.SinkOperator) TableException(org.apache.flink.table.api.TableException) SinkProvider(org.apache.flink.table.connector.sink.SinkProvider) OneInputTransformation(org.apache.flink.streaming.api.transformations.OneInputTransformation) DataStream(org.apache.flink.streaming.api.datastream.DataStream) OutputFormatProvider(org.apache.flink.table.connector.sink.OutputFormatProvider) KeyGroupStreamPartitioner(org.apache.flink.streaming.runtime.partitioner.KeyGroupStreamPartitioner) EqualiserCodeGenerator(org.apache.flink.table.planner.codegen.EqualiserCodeGenerator) SinkRuntimeProviderContext(org.apache.flink.table.runtime.connector.sink.SinkRuntimeProviderContext) RowKind(org.apache.flink.types.RowKind) StreamExecNode(org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecNode) GeneratedRecordEqualiser(org.apache.flink.table.runtime.generated.GeneratedRecordEqualiser) Transformation(org.apache.flink.api.dag.Transformation) InputTypeConfigurable(org.apache.flink.api.java.typeutils.InputTypeConfigurable) ExecutionConfigOptions(org.apache.flink.table.api.config.ExecutionConfigOptions) LogicalTypeRoot(org.apache.flink.table.types.logical.LogicalTypeRoot) LogicalTypeChecks(org.apache.flink.table.types.logical.utils.LogicalTypeChecks) RowData(org.apache.flink.table.data.RowData) SinkUpsertMaterializer(org.apache.flink.table.runtime.operators.sink.SinkUpsertMaterializer) RowDataKeySelector(org.apache.flink.table.runtime.keyselector.RowDataKeySelector) EqualiserCodeGenerator(org.apache.flink.table.planner.codegen.EqualiserCodeGenerator) GeneratedRecordEqualiser(org.apache.flink.table.runtime.generated.GeneratedRecordEqualiser)

Example 27 with ReadableConfig

use of org.apache.flink.configuration.ReadableConfig in project flink by apache.

the class CatalogSourceTable method createDynamicTableSource.

private DynamicTableSource createDynamicTableSource(FlinkContext context, ResolvedCatalogTable catalogTable) {
    final ReadableConfig config = context.getTableConfig().getConfiguration();
    final Optional<DynamicTableSourceFactory> factoryFromCatalog = schemaTable.getContextResolvedTable().getCatalog().flatMap(Catalog::getFactory).map(f -> f instanceof DynamicTableSourceFactory ? (DynamicTableSourceFactory) f : null);
    final Optional<DynamicTableSourceFactory> factoryFromModule = context.getModuleManager().getFactory(Module::getTableSourceFactory);
    // Since the catalog is more specific, we give it precedence over a factory provided by any
    // modules.
    final DynamicTableSourceFactory factory = firstPresent(factoryFromCatalog, factoryFromModule).orElse(null);
    return FactoryUtil.createDynamicTableSource(factory, schemaTable.getContextResolvedTable().getIdentifier(), catalogTable, config, Thread.currentThread().getContextClassLoader(), schemaTable.isTemporary());
}
Also used : ReadableConfig(org.apache.flink.configuration.ReadableConfig) DynamicTableSourceFactory(org.apache.flink.table.factories.DynamicTableSourceFactory) Module(org.apache.flink.table.module.Module)

Example 28 with ReadableConfig

use of org.apache.flink.configuration.ReadableConfig in project flink by apache.

the class CatalogSchemaTable method findAndCreateTableSource.

private Optional<TableSource<?>> findAndCreateTableSource() {
    Optional<TableSource<?>> tableSource = Optional.empty();
    try {
        if (contextResolvedTable.getTable() instanceof CatalogTable) {
            // Use an empty config for TableSourceFactoryContextImpl since we can't fetch the
            // actual TableConfig here. And currently the empty config do not affect the logic.
            ReadableConfig config = new Configuration();
            TableSourceFactory.Context context = new TableSourceFactoryContextImpl(contextResolvedTable.getIdentifier(), contextResolvedTable.getTable(), config, contextResolvedTable.isTemporary());
            TableSource<?> source = TableFactoryUtil.findAndCreateTableSource(context);
            if (source instanceof StreamTableSource) {
                if (!isStreamingMode && !((StreamTableSource<?>) source).isBounded()) {
                    throw new ValidationException("Cannot query on an unbounded source in batch mode, but " + contextResolvedTable.getIdentifier().asSummaryString() + " is unbounded.");
                }
                tableSource = Optional.of(source);
            } else {
                throw new ValidationException("Catalog tables only support " + "StreamTableSource and InputFormatTableSource.");
            }
        }
    } catch (Exception e) {
        tableSource = Optional.empty();
    }
    return tableSource;
}
Also used : TableSourceFactory(org.apache.flink.table.factories.TableSourceFactory) ReadableConfig(org.apache.flink.configuration.ReadableConfig) TableSource(org.apache.flink.table.sources.TableSource) StreamTableSource(org.apache.flink.table.sources.StreamTableSource) ValidationException(org.apache.flink.table.api.ValidationException) Configuration(org.apache.flink.configuration.Configuration) ConnectorCatalogTable(org.apache.flink.table.catalog.ConnectorCatalogTable) CatalogTable(org.apache.flink.table.catalog.CatalogTable) StreamTableSource(org.apache.flink.table.sources.StreamTableSource) ValidationException(org.apache.flink.table.api.ValidationException) TableSourceFactoryContextImpl(org.apache.flink.table.factories.TableSourceFactoryContextImpl)

Aggregations

ReadableConfig (org.apache.flink.configuration.ReadableConfig)28 FactoryUtil (org.apache.flink.table.factories.FactoryUtil)9 RowData (org.apache.flink.table.data.RowData)8 DataType (org.apache.flink.table.types.DataType)7 Properties (java.util.Properties)6 TableException (org.apache.flink.table.api.TableException)6 TableFactoryHelper (org.apache.flink.table.factories.FactoryUtil.TableFactoryHelper)5 Arrays (java.util.Arrays)4 List (java.util.List)4 Optional (java.util.Optional)4 IntStream (java.util.stream.IntStream)4 Internal (org.apache.flink.annotation.Internal)4 DeserializationSchema (org.apache.flink.api.common.serialization.DeserializationSchema)4 ConfigOption (org.apache.flink.configuration.ConfigOption)4 HBaseConnectorOptionsUtil.getHBaseConfiguration (org.apache.flink.connector.hbase.table.HBaseConnectorOptionsUtil.getHBaseConfiguration)4 HBaseTableSchema (org.apache.flink.connector.hbase.util.HBaseTableSchema)4 DeliveryGuarantee (org.apache.flink.connector.base.DeliveryGuarantee)3 ChangelogMode (org.apache.flink.table.connector.ChangelogMode)3 EncodingFormat (org.apache.flink.table.connector.format.EncodingFormat)3 DynamicTableSink (org.apache.flink.table.connector.sink.DynamicTableSink)3