use of com.hazelcast.jet.cdc.ChangeRecord in project hazelcast by hazelcast.
the class MySqlCdcListenBeforeExistIntegrationTest method listenBeforeDatabaseExists.
@Test
public void listenBeforeDatabaseExists() throws Exception {
List<String> expectedRecords = Collections.singletonList("1001/0:INSERT:TableRow {id=1001, value1=someValue1, value2=someValue2, value3=null}");
StreamSource<ChangeRecord> source = sourceBuilder("cdcMysql").setDatabaseWhitelist(DATABASE).build();
Pipeline pipeline = pipeline(source);
// when
HazelcastInstance hz = createHazelcastInstances(2)[0];
Job job = hz.getJet().newJob(pipeline);
assertJobStatusEventually(job, RUNNING);
try {
// then
createDb(DATABASE);
createTableWithData(DATABASE, "someTable");
insertToTable(DATABASE, "someTable", 1001, "someValue1", "someValue2");
assertEqualsEventually(() -> mapResultsToSortedList(hz.getMap(SINK_MAP_NAME)), expectedRecords);
} finally {
job.cancel();
assertJobStatusEventually(job, JobStatus.FAILED);
}
}
use of com.hazelcast.jet.cdc.ChangeRecord in project hazelcast by hazelcast.
the class MySqlCdcWhiteBlackListIntegrationTest method pipeline.
private Pipeline pipeline(StreamSource<ChangeRecord> source) {
Pipeline pipeline = Pipeline.create();
pipeline.readFrom(source).withNativeTimestamps(0).filter(t -> t.database().startsWith(DB_PREFIX)).setLocalParallelism(1).<ChangeRecord>customTransform("filter_timestamps", filterTimestampsProcessorSupplier()).setLocalParallelism(1).groupingKey(record -> (Integer) record.key().toMap().get("id")).mapStateful(LongAccumulator::new, (accumulator, rowId, record) -> {
long count = accumulator.get();
accumulator.add(1);
Operation operation = record.operation();
RecordPart value = record.value();
TableRow row = value.toObject(TableRow.class);
return entry(rowId + "/" + count, operation + ":" + row);
}).setLocalParallelism(1).writeTo(Sinks.map(SINK_MAP_NAME));
return pipeline;
}
Aggregations