Search in sources :

Example 6 with EncodingFormat

use of org.apache.flink.table.connector.format.EncodingFormat in project flink by apache.

the class KafkaDynamicTableFactory method createDynamicTableSink.

@Override
public DynamicTableSink createDynamicTableSink(Context context) {
    final TableFactoryHelper helper = FactoryUtil.createTableFactoryHelper(this, autoCompleteSchemaRegistrySubject(context));
    final Optional<EncodingFormat<SerializationSchema<RowData>>> keyEncodingFormat = getKeyEncodingFormat(helper);
    final EncodingFormat<SerializationSchema<RowData>> valueEncodingFormat = getValueEncodingFormat(helper);
    helper.validateExcept(PROPERTIES_PREFIX);
    final ReadableConfig tableOptions = helper.getOptions();
    final DeliveryGuarantee deliveryGuarantee = validateDeprecatedSemantic(tableOptions);
    validateTableSinkOptions(tableOptions);
    KafkaConnectorOptionsUtil.validateDeliveryGuarantee(tableOptions);
    validatePKConstraints(context.getObjectIdentifier(), context.getPrimaryKeyIndexes(), context.getCatalogTable().getOptions(), valueEncodingFormat);
    final DataType physicalDataType = context.getPhysicalRowDataType();
    final int[] keyProjection = createKeyFormatProjection(tableOptions, physicalDataType);
    final int[] valueProjection = createValueFormatProjection(tableOptions, physicalDataType);
    final String keyPrefix = tableOptions.getOptional(KEY_FIELDS_PREFIX).orElse(null);
    final Integer parallelism = tableOptions.getOptional(SINK_PARALLELISM).orElse(null);
    return createKafkaTableSink(physicalDataType, keyEncodingFormat.orElse(null), valueEncodingFormat, keyProjection, valueProjection, keyPrefix, tableOptions.get(TOPIC).get(0), getKafkaProperties(context.getCatalogTable().getOptions()), getFlinkKafkaPartitioner(tableOptions, context.getClassLoader()).orElse(null), deliveryGuarantee, parallelism, tableOptions.get(TRANSACTIONAL_ID_PREFIX));
}
Also used : EncodingFormat(org.apache.flink.table.connector.format.EncodingFormat) RowData(org.apache.flink.table.data.RowData) ReadableConfig(org.apache.flink.configuration.ReadableConfig) DeliveryGuarantee(org.apache.flink.connector.base.DeliveryGuarantee) SerializationSchema(org.apache.flink.api.common.serialization.SerializationSchema) TableFactoryHelper(org.apache.flink.table.factories.FactoryUtil.TableFactoryHelper) DataType(org.apache.flink.table.types.DataType)

Example 7 with EncodingFormat

use of org.apache.flink.table.connector.format.EncodingFormat in project flink by apache.

the class MaxwellJsonFormatFactory method createEncodingFormat.

@Override
public EncodingFormat<SerializationSchema<RowData>> createEncodingFormat(DynamicTableFactory.Context context, ReadableConfig formatOptions) {
    FactoryUtil.validateFactoryOptions(this, formatOptions);
    validateEncodingFormatOptions(formatOptions);
    TimestampFormat timestampFormat = JsonFormatOptionsUtil.getTimestampFormat(formatOptions);
    JsonFormatOptions.MapNullKeyMode mapNullKeyMode = JsonFormatOptionsUtil.getMapNullKeyMode(formatOptions);
    String mapNullKeyLiteral = formatOptions.get(JSON_MAP_NULL_KEY_LITERAL);
    final boolean encodeDecimalAsPlainNumber = formatOptions.get(ENCODE_DECIMAL_AS_PLAIN_NUMBER);
    return new EncodingFormat<SerializationSchema<RowData>>() {

        @Override
        public ChangelogMode getChangelogMode() {
            return ChangelogMode.newBuilder().addContainedKind(RowKind.INSERT).addContainedKind(RowKind.UPDATE_BEFORE).addContainedKind(RowKind.UPDATE_AFTER).addContainedKind(RowKind.DELETE).build();
        }

        @Override
        public SerializationSchema<RowData> createRuntimeEncoder(DynamicTableSink.Context context, DataType consumedDataType) {
            final RowType rowType = (RowType) consumedDataType.getLogicalType();
            return new MaxwellJsonSerializationSchema(rowType, timestampFormat, mapNullKeyMode, mapNullKeyLiteral, encodeDecimalAsPlainNumber);
        }
    };
}
Also used : EncodingFormat(org.apache.flink.table.connector.format.EncodingFormat) JsonFormatOptions(org.apache.flink.formats.json.JsonFormatOptions) RowData(org.apache.flink.table.data.RowData) DataType(org.apache.flink.table.types.DataType) RowType(org.apache.flink.table.types.logical.RowType) TimestampFormat(org.apache.flink.formats.common.TimestampFormat)

Example 8 with EncodingFormat

use of org.apache.flink.table.connector.format.EncodingFormat in project flink by apache.

the class JsonFormatFactory method createEncodingFormat.

@Override
public EncodingFormat<SerializationSchema<RowData>> createEncodingFormat(DynamicTableFactory.Context context, ReadableConfig formatOptions) {
    FactoryUtil.validateFactoryOptions(this, formatOptions);
    JsonFormatOptionsUtil.validateEncodingFormatOptions(formatOptions);
    TimestampFormat timestampOption = JsonFormatOptionsUtil.getTimestampFormat(formatOptions);
    JsonFormatOptions.MapNullKeyMode mapNullKeyMode = JsonFormatOptionsUtil.getMapNullKeyMode(formatOptions);
    String mapNullKeyLiteral = formatOptions.get(MAP_NULL_KEY_LITERAL);
    final boolean encodeDecimalAsPlainNumber = formatOptions.get(ENCODE_DECIMAL_AS_PLAIN_NUMBER);
    return new EncodingFormat<SerializationSchema<RowData>>() {

        @Override
        public SerializationSchema<RowData> createRuntimeEncoder(DynamicTableSink.Context context, DataType consumedDataType) {
            final RowType rowType = (RowType) consumedDataType.getLogicalType();
            return new JsonRowDataSerializationSchema(rowType, timestampOption, mapNullKeyMode, mapNullKeyLiteral, encodeDecimalAsPlainNumber);
        }

        @Override
        public ChangelogMode getChangelogMode() {
            return ChangelogMode.insertOnly();
        }
    };
}
Also used : EncodingFormat(org.apache.flink.table.connector.format.EncodingFormat) RowData(org.apache.flink.table.data.RowData) DataType(org.apache.flink.table.types.DataType) RowType(org.apache.flink.table.types.logical.RowType) TimestampFormat(org.apache.flink.formats.common.TimestampFormat)

Example 9 with EncodingFormat

use of org.apache.flink.table.connector.format.EncodingFormat in project flink by apache.

the class RawFormatFactory method createEncodingFormat.

@Override
public EncodingFormat<SerializationSchema<RowData>> createEncodingFormat(Context context, ReadableConfig formatOptions) {
    FactoryUtil.validateFactoryOptions(this, formatOptions);
    final String charsetName = validateAndGetCharsetName(formatOptions);
    final boolean isBigEndian = isBigEndian(formatOptions);
    return new EncodingFormat<SerializationSchema<RowData>>() {

        @Override
        public SerializationSchema<RowData> createRuntimeEncoder(DynamicTableSink.Context context, DataType consumedDataType) {
            final RowType physicalRowType = (RowType) consumedDataType.getLogicalType();
            final LogicalType fieldType = validateAndExtractSingleField(physicalRowType);
            return new RawFormatSerializationSchema(fieldType, charsetName, isBigEndian);
        }

        @Override
        public ChangelogMode getChangelogMode() {
            return ChangelogMode.insertOnly();
        }
    };
}
Also used : EncodingFormat(org.apache.flink.table.connector.format.EncodingFormat) Context(org.apache.flink.table.factories.DynamicTableFactory.Context) RowData(org.apache.flink.table.data.RowData) DataType(org.apache.flink.table.types.DataType) RowType(org.apache.flink.table.types.logical.RowType) LogicalType(org.apache.flink.table.types.logical.LogicalType)

Example 10 with EncodingFormat

use of org.apache.flink.table.connector.format.EncodingFormat in project flink by apache.

the class OggJsonFormatFactory method createEncodingFormat.

@Override
public EncodingFormat<SerializationSchema<RowData>> createEncodingFormat(DynamicTableFactory.Context context, ReadableConfig formatOptions) {
    FactoryUtil.validateFactoryOptions(this, formatOptions);
    validateEncodingFormatOptions(formatOptions);
    TimestampFormat timestampFormat = JsonFormatOptionsUtil.getTimestampFormat(formatOptions);
    JsonFormatOptions.MapNullKeyMode mapNullKeyMode = JsonFormatOptionsUtil.getMapNullKeyMode(formatOptions);
    String mapNullKeyLiteral = formatOptions.get(JSON_MAP_NULL_KEY_LITERAL);
    final boolean encodeDecimalAsPlainNumber = formatOptions.get(ENCODE_DECIMAL_AS_PLAIN_NUMBER);
    return new EncodingFormat<SerializationSchema<RowData>>() {

        @Override
        public ChangelogMode getChangelogMode() {
            return ChangelogMode.newBuilder().addContainedKind(RowKind.INSERT).addContainedKind(RowKind.UPDATE_BEFORE).addContainedKind(RowKind.UPDATE_AFTER).addContainedKind(RowKind.DELETE).build();
        }

        @Override
        public SerializationSchema<RowData> createRuntimeEncoder(DynamicTableSink.Context context, DataType consumedDataType) {
            final RowType rowType = (RowType) consumedDataType.getLogicalType();
            return new OggJsonSerializationSchema(rowType, timestampFormat, mapNullKeyMode, mapNullKeyLiteral, encodeDecimalAsPlainNumber);
        }
    };
}
Also used : EncodingFormat(org.apache.flink.table.connector.format.EncodingFormat) JsonFormatOptions(org.apache.flink.formats.json.JsonFormatOptions) RowData(org.apache.flink.table.data.RowData) DataType(org.apache.flink.table.types.DataType) RowType(org.apache.flink.table.types.logical.RowType) TimestampFormat(org.apache.flink.formats.common.TimestampFormat)

Aggregations

EncodingFormat (org.apache.flink.table.connector.format.EncodingFormat)13 RowData (org.apache.flink.table.data.RowData)13 DataType (org.apache.flink.table.types.DataType)12 RowType (org.apache.flink.table.types.logical.RowType)11 TimestampFormat (org.apache.flink.formats.common.TimestampFormat)5 JsonFormatOptions (org.apache.flink.formats.json.JsonFormatOptions)4 BulkWriter (org.apache.flink.api.common.serialization.BulkWriter)2 SerializationSchema (org.apache.flink.api.common.serialization.SerializationSchema)2 ReadableConfig (org.apache.flink.configuration.ReadableConfig)2 ValidationException (org.apache.flink.table.api.ValidationException)2 Collections (java.util.Collections)1 Set (java.util.Set)1 StringEscapeUtils (org.apache.commons.lang3.StringEscapeUtils)1 Internal (org.apache.flink.annotation.Internal)1 Factory (org.apache.flink.api.common.serialization.BulkWriter.Factory)1 ConfigOption (org.apache.flink.configuration.ConfigOption)1 DeliveryGuarantee (org.apache.flink.connector.base.DeliveryGuarantee)1 FileSourceSplit (org.apache.flink.connector.file.src.FileSourceSplit)1 StreamFormatAdapter (org.apache.flink.connector.file.src.impl.StreamFormatAdapter)1 BulkFormat (org.apache.flink.connector.file.src.reader.BulkFormat)1