use of org.apache.flink.table.connector.format.DecodingFormat in project flink by apache.
the class KafkaDynamicTableFactory method createDynamicTableSource.
@Override
public DynamicTableSource createDynamicTableSource(Context context) {
final TableFactoryHelper helper = FactoryUtil.createTableFactoryHelper(this, context);
final Optional<DecodingFormat<DeserializationSchema<RowData>>> keyDecodingFormat = getKeyDecodingFormat(helper);
final DecodingFormat<DeserializationSchema<RowData>> valueDecodingFormat = getValueDecodingFormat(helper);
helper.validateExcept(PROPERTIES_PREFIX);
final ReadableConfig tableOptions = helper.getOptions();
validateTableSourceOptions(tableOptions);
validatePKConstraints(context.getObjectIdentifier(), context.getPrimaryKeyIndexes(), context.getCatalogTable().getOptions(), valueDecodingFormat);
final StartupOptions startupOptions = getStartupOptions(tableOptions);
final Properties properties = getKafkaProperties(context.getCatalogTable().getOptions());
// add topic-partition discovery
final Optional<Long> partitionDiscoveryInterval = tableOptions.getOptional(SCAN_TOPIC_PARTITION_DISCOVERY).map(Duration::toMillis);
properties.setProperty(KafkaSourceOptions.PARTITION_DISCOVERY_INTERVAL_MS.key(), partitionDiscoveryInterval.orElse(-1L).toString());
final DataType physicalDataType = context.getPhysicalRowDataType();
final int[] keyProjection = createKeyFormatProjection(tableOptions, physicalDataType);
final int[] valueProjection = createValueFormatProjection(tableOptions, physicalDataType);
final String keyPrefix = tableOptions.getOptional(KEY_FIELDS_PREFIX).orElse(null);
return createKafkaTableSource(physicalDataType, keyDecodingFormat.orElse(null), valueDecodingFormat, keyProjection, valueProjection, keyPrefix, getSourceTopics(tableOptions), getSourceTopicPattern(tableOptions), properties, startupOptions.startupMode, startupOptions.specificOffsets, startupOptions.startupTimestampMillis, context.getObjectIdentifier().asSummaryString());
}
use of org.apache.flink.table.connector.format.DecodingFormat in project flink by apache.
the class CsvFormatFactory method createDecodingFormat.
@Override
public DecodingFormat<DeserializationSchema<RowData>> createDecodingFormat(DynamicTableFactory.Context context, ReadableConfig formatOptions) {
FactoryUtil.validateFactoryOptions(this, formatOptions);
CsvCommons.validateFormatOptions(formatOptions);
return new DecodingFormat<DeserializationSchema<RowData>>() {
@Override
public DeserializationSchema<RowData> createRuntimeDecoder(DynamicTableSource.Context context, DataType producedDataType) {
final RowType rowType = (RowType) producedDataType.getLogicalType();
final TypeInformation<RowData> rowDataTypeInfo = context.createTypeInformation(producedDataType);
final CsvRowDataDeserializationSchema.Builder schemaBuilder = new CsvRowDataDeserializationSchema.Builder(rowType, rowDataTypeInfo);
configureDeserializationSchema(formatOptions, schemaBuilder);
return schemaBuilder.build();
}
@Override
public ChangelogMode getChangelogMode() {
return ChangelogMode.insertOnly();
}
};
}
use of org.apache.flink.table.connector.format.DecodingFormat in project flink by apache.
the class TestDynamicTableFactory method createDynamicTableSource.
@Override
public DynamicTableSource createDynamicTableSource(Context context) {
final TableFactoryHelper helper = FactoryUtil.createTableFactoryHelper(this, context);
final Optional<DecodingFormat<DeserializationSchema<RowData>>> keyFormat = helper.discoverOptionalDecodingFormat(DeserializationFormatFactory.class, KEY_FORMAT);
final DecodingFormat<DeserializationSchema<RowData>> valueFormat = helper.discoverOptionalDecodingFormat(DeserializationFormatFactory.class, FORMAT).orElseGet(() -> helper.discoverDecodingFormat(DeserializationFormatFactory.class, VALUE_FORMAT));
helper.validate();
return new DynamicTableSourceMock(helper.getOptions().get(TARGET), helper.getOptions().getOptional(PASSWORD).orElse(null), keyFormat.orElse(null), valueFormat);
}
use of org.apache.flink.table.connector.format.DecodingFormat in project flink by apache.
the class RawFormatFactory method createDecodingFormat.
@Override
public DecodingFormat<DeserializationSchema<RowData>> createDecodingFormat(Context context, ReadableConfig formatOptions) {
FactoryUtil.validateFactoryOptions(this, formatOptions);
final String charsetName = validateAndGetCharsetName(formatOptions);
final boolean isBigEndian = isBigEndian(formatOptions);
return new DecodingFormat<DeserializationSchema<RowData>>() {
@Override
public DeserializationSchema<RowData> createRuntimeDecoder(DynamicTableSource.Context context, DataType producedDataType) {
final RowType physicalRowType = (RowType) producedDataType.getLogicalType();
final LogicalType fieldType = validateAndExtractSingleField(physicalRowType);
final TypeInformation<RowData> producedTypeInfo = context.createTypeInformation(producedDataType);
return new RawFormatDeserializationSchema(fieldType, producedTypeInfo, charsetName, isBigEndian);
}
@Override
public ChangelogMode getChangelogMode() {
return ChangelogMode.insertOnly();
}
};
}
Aggregations