use of org.apache.flink.formats.common.TimestampFormat in project flink by apache.
the class DebeziumJsonDecodingFormat method createRuntimeDecoder.
@Override
public DeserializationSchema<RowData> createRuntimeDecoder(DynamicTableSource.Context context, DataType physicalDataType, int[][] projections) {
physicalDataType = Projection.of(projections).project(physicalDataType);
final List<ReadableMetadata> readableMetadata = metadataKeys.stream().map(k -> Stream.of(ReadableMetadata.values()).filter(rm -> rm.key.equals(k)).findFirst().orElseThrow(IllegalStateException::new)).collect(Collectors.toList());
final List<DataTypes.Field> metadataFields = readableMetadata.stream().map(m -> DataTypes.FIELD(m.key, m.dataType)).collect(Collectors.toList());
final DataType producedDataType = DataTypeUtils.appendRowFields(physicalDataType, metadataFields);
final TypeInformation<RowData> producedTypeInfo = context.createTypeInformation(producedDataType);
return new DebeziumJsonDeserializationSchema(physicalDataType, readableMetadata, producedTypeInfo, schemaInclude, ignoreParseErrors, timestampFormat);
}
use of org.apache.flink.formats.common.TimestampFormat in project flink by apache.
the class DebeziumJsonFormatFactory method createEncodingFormat.
@Override
public EncodingFormat<SerializationSchema<RowData>> createEncodingFormat(DynamicTableFactory.Context context, ReadableConfig formatOptions) {
FactoryUtil.validateFactoryOptions(this, formatOptions);
validateEncodingFormatOptions(formatOptions);
TimestampFormat timestampFormat = JsonFormatOptionsUtil.getTimestampFormat(formatOptions);
JsonFormatOptions.MapNullKeyMode mapNullKeyMode = JsonFormatOptionsUtil.getMapNullKeyMode(formatOptions);
String mapNullKeyLiteral = formatOptions.get(JSON_MAP_NULL_KEY_LITERAL);
final boolean encodeDecimalAsPlainNumber = formatOptions.get(ENCODE_DECIMAL_AS_PLAIN_NUMBER);
return new EncodingFormat<SerializationSchema<RowData>>() {
@Override
public ChangelogMode getChangelogMode() {
return ChangelogMode.newBuilder().addContainedKind(RowKind.INSERT).addContainedKind(RowKind.UPDATE_BEFORE).addContainedKind(RowKind.UPDATE_AFTER).addContainedKind(RowKind.DELETE).build();
}
@Override
public SerializationSchema<RowData> createRuntimeEncoder(DynamicTableSink.Context context, DataType consumedDataType) {
final RowType rowType = (RowType) consumedDataType.getLogicalType();
return new DebeziumJsonSerializationSchema(rowType, timestampFormat, mapNullKeyMode, mapNullKeyLiteral, encodeDecimalAsPlainNumber);
}
};
}
use of org.apache.flink.formats.common.TimestampFormat in project flink by apache.
the class DebeziumJsonFormatFactory method createDecodingFormat.
@Override
public DecodingFormat<DeserializationSchema<RowData>> createDecodingFormat(DynamicTableFactory.Context context, ReadableConfig formatOptions) {
FactoryUtil.validateFactoryOptions(this, formatOptions);
validateDecodingFormatOptions(formatOptions);
final boolean schemaInclude = formatOptions.get(SCHEMA_INCLUDE);
final boolean ignoreParseErrors = formatOptions.get(IGNORE_PARSE_ERRORS);
final TimestampFormat timestampFormat = JsonFormatOptionsUtil.getTimestampFormat(formatOptions);
return new DebeziumJsonDecodingFormat(schemaInclude, ignoreParseErrors, timestampFormat);
}
use of org.apache.flink.formats.common.TimestampFormat in project flink by apache.
the class OggJsonFormatFactory method createDecodingFormat.
@Override
public DecodingFormat<DeserializationSchema<RowData>> createDecodingFormat(DynamicTableFactory.Context context, ReadableConfig formatOptions) {
FactoryUtil.validateFactoryOptions(this, formatOptions);
validateDecodingFormatOptions(formatOptions);
final boolean ignoreParseErrors = formatOptions.get(IGNORE_PARSE_ERRORS);
final TimestampFormat timestampFormat = JsonFormatOptionsUtil.getTimestampFormat(formatOptions);
return new OggJsonDecodingFormat(ignoreParseErrors, timestampFormat);
}
use of org.apache.flink.formats.common.TimestampFormat in project flink by apache.
the class MaxwellJsonDecodingFormat method createRuntimeDecoder.
@Override
public DeserializationSchema<RowData> createRuntimeDecoder(DynamicTableSource.Context context, DataType physicalDataType, int[][] projections) {
physicalDataType = Projection.of(projections).project(physicalDataType);
final List<ReadableMetadata> readableMetadata = metadataKeys.stream().map(k -> Stream.of(ReadableMetadata.values()).filter(rm -> rm.key.equals(k)).findFirst().orElseThrow(() -> new IllegalStateException(String.format("Could not find the requested metadata key: %s", k)))).collect(Collectors.toList());
final List<DataTypes.Field> metadataFields = readableMetadata.stream().map(m -> DataTypes.FIELD(m.key, m.dataType)).collect(Collectors.toList());
final DataType producedDataType = DataTypeUtils.appendRowFields(physicalDataType, metadataFields);
final TypeInformation<RowData> producedTypeInfo = context.createTypeInformation(producedDataType);
return new MaxwellJsonDeserializationSchema(physicalDataType, readableMetadata, producedTypeInfo, ignoreParseErrors, timestampFormat);
}
Aggregations