use of org.apache.flink.formats.json.debezium.DebeziumJsonDecodingFormat.ReadableMetadata in project flink by apache.
the class DebeziumJsonSerDeSchemaTest method testDeserializationWithMetadata.
private void testDeserializationWithMetadata(String resourceFile, boolean schemaInclude, Consumer<RowData> testConsumer) throws Exception {
// we only read the first line for keeping the test simple
final String firstLine = readLines(resourceFile).get(0);
final List<ReadableMetadata> requestedMetadata = Arrays.asList(ReadableMetadata.values());
final DataType producedDataType = DataTypeUtils.appendRowFields(PHYSICAL_DATA_TYPE, requestedMetadata.stream().map(m -> DataTypes.FIELD(m.key, m.dataType)).collect(Collectors.toList()));
final DebeziumJsonDeserializationSchema deserializationSchema = new DebeziumJsonDeserializationSchema(PHYSICAL_DATA_TYPE, requestedMetadata, InternalTypeInfo.of(producedDataType.getLogicalType()), schemaInclude, false, TimestampFormat.ISO_8601);
final SimpleCollector collector = new SimpleCollector();
deserializationSchema.deserialize(firstLine.getBytes(StandardCharsets.UTF_8), collector);
assertEquals(1, collector.list.size());
testConsumer.accept(collector.list.get(0));
}
use of org.apache.flink.formats.json.debezium.DebeziumJsonDecodingFormat.ReadableMetadata in project flink by apache.
the class DebeziumJsonDeserializationSchema method createJsonRowType.
// --------------------------------------------------------------------------------------------
private static RowType createJsonRowType(DataType physicalDataType, List<ReadableMetadata> readableMetadata, boolean schemaInclude) {
DataType payload = DataTypes.ROW(DataTypes.FIELD("before", physicalDataType), DataTypes.FIELD("after", physicalDataType), DataTypes.FIELD("op", DataTypes.STRING()));
// append fields that are required for reading metadata in the payload
final List<DataTypes.Field> payloadMetadataFields = readableMetadata.stream().filter(m -> m.isJsonPayload).map(m -> m.requiredJsonField).distinct().collect(Collectors.toList());
payload = DataTypeUtils.appendRowFields(payload, payloadMetadataFields);
DataType root = payload;
if (schemaInclude) {
// when Debezium Kafka connect enables "value.converter.schemas.enable",
// the JSON will contain "schema" information and we need to extract data from
// "payload".
root = DataTypes.ROW(DataTypes.FIELD("payload", payload));
}
// append fields that are required for reading metadata in the root
final List<DataTypes.Field> rootMetadataFields = readableMetadata.stream().filter(m -> !m.isJsonPayload).map(m -> m.requiredJsonField).distinct().collect(Collectors.toList());
root = DataTypeUtils.appendRowFields(root, rootMetadataFields);
return (RowType) root.getLogicalType();
}
Aggregations