use of org.apache.inlong.sort.protocol.serialization.CanalSerializationInfo in project incubator-inlong by apache.
the class DebeziumToCanalITCase method test.
@Test(timeout = 60 * 1000)
public void test() throws Exception {
final ExecutorService executorService = Executors.newSingleThreadExecutor();
executorService.execute(() -> {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
try {
DataStream<SerializedRecord> sourceStream = env.addSource(new TestSource());
// Deserialize
DeserializationSchema<Row> deserializationSchema = DeserializationSchemaFactory.build(fieldInfos, new DebeziumDeserializationInfo(false, "ISO_8601"));
FieldMappingTransformer fieldMappingTransformer = new FieldMappingTransformer(new Configuration(), fieldInfos);
DeserializationFunction function = new DeserializationFunction(deserializationSchema, fieldMappingTransformer, false);
DataStream<Row> deserializedStream = sourceStream.process(function);
// Serialize and output
SerializationSchema<Row> serializationSchema = SerializationSchemaFactory.build(fieldInfos, new CanalSerializationInfo());
deserializedStream.addSink(new TestSink(serializationSchema));
env.execute();
} catch (Exception e) {
logger.error("Error occurred when executing flink test job: ", e);
} finally {
jobFinishedLatch.countDown();
}
});
try {
while (!verify()) {
Thread.sleep(500);
}
verificationFinishedLatch.countDown();
jobFinishedLatch.await();
} finally {
executorService.shutdown();
}
Thread.sleep(10000);
}
use of org.apache.inlong.sort.protocol.serialization.CanalSerializationInfo in project incubator-inlong by apache.
the class WholeDBMigrationITCase method test.
@Test(timeout = 60 * 1000)
public void test() throws Exception {
final ExecutorService executorService = Executors.newSingleThreadExecutor();
executorService.execute(() -> {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
try {
DataStream<SerializedRecord> sourceStream = env.addSource(new TestSource());
// Deserialize
DeserializationSchema<Row> deserializationSchema = DeserializationSchemaFactory.build(fieldInfos, new DebeziumDeserializationInfo(false, "ISO_8601", true));
FieldMappingTransformer fieldMappingTransformer = new FieldMappingTransformer(new Configuration(), fieldInfos);
DeserializationFunction function = new DeserializationFunction(deserializationSchema, fieldMappingTransformer, false);
DataStream<Row> deserializedStream = sourceStream.process(function);
// Serialize and output
SerializationSchema<Row> serializationSchema = SerializationSchemaFactory.build(fieldInfos, new CanalSerializationInfo());
deserializedStream.addSink(new TestSink(serializationSchema));
env.execute();
} catch (Exception e) {
logger.error("Error occurred when executing flink test job: ", e);
} finally {
jobFinishedLatch.countDown();
}
});
try {
while (!verify()) {
Thread.sleep(500);
}
verificationFinishedLatch.countDown();
jobFinishedLatch.await();
} finally {
executorService.shutdown();
}
Thread.sleep(10000);
}
use of org.apache.inlong.sort.protocol.serialization.CanalSerializationInfo in project incubator-inlong by apache.
the class RowToCanalKafkaSinkTest method prepareData.
@Override
protected void prepareData() throws IOException, ClassNotFoundException {
topic = "test_kafka_row_to_canal";
fieldInfos = new FieldInfo[] { new FieldInfo("f1", new StringFormatInfo()), new FieldInfo("f2", new IntFormatInfo()) };
serializationSchema = SerializationSchemaFactory.build(fieldInfos, new CanalSerializationInfo());
prepareTestData();
}
use of org.apache.inlong.sort.protocol.serialization.CanalSerializationInfo in project incubator-inlong by apache.
the class CanalSerializationTest method test.
@Test
public void test() throws Exception {
SerializationSchema<Row> canalJsonSerializationSchema = SerializationSchemaFactory.build(fieldInfos, new CanalSerializationInfo());
canalJsonSerializationSchema.open(new InitializationContext() {
@Override
public MetricGroup getMetricGroup() {
return null;
}
@Override
public UserCodeClassLoader getUserCodeClassLoader() {
return null;
}
});
Row row = Row.of("name", 29, "database", "table", 123L, false, "INSERT");
String result = new String(canalJsonSerializationSchema.serialize(row));
String expectedResult = "{\"data\":[{\"name\":\"name\",\"age\":29}]," + "\"type\":\"INSERT\",\"database\":\"database\"," + "\"table\":\"table\",\"es\":123,\"isDdl\":false}";
assertEquals(expectedResult, result);
}
use of org.apache.inlong.sort.protocol.serialization.CanalSerializationInfo in project incubator-inlong by apache.
the class SerializationUtils method serializeForKafka.
/**
* Get serialization info for Kafka
*/
private static SerializationInfo serializeForKafka(SourceResponse sourceResponse, KafkaSinkResponse sinkResponse) {
String serializationType = sinkResponse.getSerializationType();
DataTypeEnum dataType = DataTypeEnum.forName(serializationType);
switch(dataType) {
case AVRO:
return new AvroSerializationInfo();
case JSON:
return new JsonSerializationInfo();
case CANAL:
return new CanalSerializationInfo();
case DEBEZIUM_JSON:
Assert.isInstanceOf(BinlogSourceResponse.class, sourceResponse, "Unsupported serializationType for Kafka");
BinlogSourceResponse binlogSource = (BinlogSourceResponse) sourceResponse;
return new DebeziumSerializationInfo(binlogSource.getTimestampFormatStandard(), "FAIL", "", false);
default:
throw new IllegalArgumentException(String.format("Unsupported serializationType for Kafka sink: %s", serializationType));
}
}
Aggregations