use of com.hazelcast.jet.cdc.ChangeRecord in project hazelcast by hazelcast.
the class MySqlCdcIntegrationTest method restart.
@Test
@Category(NightlyTest.class)
public void restart() throws Exception {
// given
List<String> expectedRecords = Arrays.asList("1004/1:UPDATE:Customer {id=1004, firstName=Anne Marie, lastName=Kretchmar, email=annek@noanswer.org}", "1005/0:INSERT:Customer {id=1005, firstName=Jason, lastName=Bourne, email=jason@bourne.org}", "1005/1:DELETE:Customer {id=1005, firstName=Jason, lastName=Bourne, email=jason@bourne.org}");
Pipeline pipeline = Pipeline.create();
pipeline.readFrom(source("customers")).withNativeTimestamps(0).<ChangeRecord>customTransform("filter_timestamps", filterTimestampsProcessorSupplier()).groupingKey(record -> (Integer) record.key().toMap().get("id")).mapStateful(LongAccumulator::new, (accumulator, customerId, record) -> {
long count = accumulator.get();
accumulator.add(1);
Operation operation = record.operation();
RecordPart value = record.value();
Customer customer = value.toObject(Customer.class);
return entry(customerId + "/" + count, operation + ":" + customer);
}).setLocalParallelism(1).writeTo(Sinks.map("results"));
// when
HazelcastInstance hz = createHazelcastInstances(2)[0];
JobConfig jobConfig = new JobConfig().setProcessingGuarantee(ProcessingGuarantee.AT_LEAST_ONCE);
Job job = hz.getJet().newJob(pipeline, jobConfig);
JetTestSupport.assertJobStatusEventually(job, JobStatus.RUNNING);
assertEqualsEventually(() -> hz.getMap("results").size(), 4);
// then
hz.getMap("results").destroy();
// when
assertEqualsEventually(() -> hz.getMap("results").size(), 0);
// then
job.restart();
// when
JetTestSupport.assertJobStatusEventually(job, JobStatus.RUNNING);
// then update a record
try (Connection connection = getConnection(mysql, "inventory")) {
Statement statement = connection.createStatement();
statement.addBatch("UPDATE customers SET first_name='Anne Marie' WHERE id=1004");
statement.addBatch("INSERT INTO customers VALUES (1005, 'Jason', 'Bourne', 'jason@bourne.org')");
statement.addBatch("DELETE FROM customers WHERE id=1005");
statement.executeBatch();
}
// then
try {
assertEqualsEventually(() -> mapResultsToSortedList(hz.getMap("results")), expectedRecords);
} finally {
job.cancel();
assertJobStatusEventually(job, JobStatus.FAILED);
}
}
use of com.hazelcast.jet.cdc.ChangeRecord in project hazelcast by hazelcast.
the class MySqlCdcIntegrationTest method customers.
@Test
@Category(QuickTest.class)
public void customers() throws Exception {
// given
List<String> expectedRecords = Arrays.asList("1001/0:INSERT:Customer {id=1001, firstName=Sally, lastName=Thomas, email=sally.thomas@acme.com}", "1002/0:INSERT:Customer {id=1002, firstName=George, lastName=Bailey, email=gbailey@foobar.com}", "1003/0:INSERT:Customer {id=1003, firstName=Edward, lastName=Walker, email=ed@walker.com}", "1004/0:INSERT:Customer {id=1004, firstName=Anne, lastName=Kretchmar, email=annek@noanswer.org}", "1004/1:UPDATE:Customer {id=1004, firstName=Anne Marie, lastName=Kretchmar, email=annek@noanswer.org}", "1005/0:INSERT:Customer {id=1005, firstName=Jason, lastName=Bourne, email=jason@bourne.org}", "1005/1:DELETE:Customer {id=1005, firstName=Jason, lastName=Bourne, email=jason@bourne.org}");
Pipeline pipeline = Pipeline.create();
pipeline.readFrom(source("customers")).withNativeTimestamps(0).<ChangeRecord>customTransform("filter_timestamps", filterTimestampsProcessorSupplier()).groupingKey(record -> (Integer) record.key().toMap().get("id")).mapStateful(LongAccumulator::new, (accumulator, customerId, record) -> {
long count = accumulator.get();
accumulator.add(1);
Operation operation = record.operation();
RecordPart value = record.value();
Customer customer = value.toObject(Customer.class);
return entry(customerId + "/" + count, operation + ":" + customer);
}).setLocalParallelism(1).writeTo(Sinks.map("results"));
// when
HazelcastInstance hz = createHazelcastInstances(2)[0];
Job job = hz.getJet().newJob(pipeline);
// then
assertEqualsEventually(() -> hz.getMap("results").size(), 4);
// when
try (Connection connection = getConnection(mysql, "inventory")) {
Statement statement = connection.createStatement();
statement.addBatch("UPDATE customers SET first_name='Anne Marie' WHERE id=1004");
statement.addBatch("INSERT INTO customers VALUES (1005, 'Jason', 'Bourne', 'jason@bourne.org')");
statement.addBatch("DELETE FROM customers WHERE id=1005");
statement.executeBatch();
}
// then
try {
assertEqualsEventually(() -> mapResultsToSortedList(hz.getMap("results")), expectedRecords);
} finally {
job.cancel();
assertJobStatusEventually(job, JobStatus.FAILED);
}
}
use of com.hazelcast.jet.cdc.ChangeRecord in project hazelcast by hazelcast.
the class PostgresCdcAuthAndConnectionIntegrationTest method incorrectDatabaseName.
@Test
public void incorrectDatabaseName() {
StreamSource<ChangeRecord> source = PostgresCdcSources.postgres("name").setDatabaseAddress(postgres.getContainerIpAddress()).setDatabasePort(postgres.getMappedPort(POSTGRESQL_PORT)).setDatabaseUser("postgres").setDatabasePassword("postgres").setDatabaseName("wrongDatabaseName").build();
Pipeline pipeline = pipeline(source);
HazelcastInstance hz = createHazelcastInstances(2)[0];
// when
Job job = hz.getJet().newJob(pipeline);
// then
assertThatThrownBy(job::join).hasRootCauseInstanceOf(JetException.class).hasStackTraceContaining("database \"wrongDatabaseName\" does not exist");
}
use of com.hazelcast.jet.cdc.ChangeRecord in project hazelcast by hazelcast.
the class PostgresCdcListenBeforeExistsIntegrationTest method listenBeforeTableExists.
@Test
public void listenBeforeTableExists() throws Exception {
// given
createSchema(SCHEMA);
List<String> expectedRecords = Collections.singletonList("1001/0:(SYNC|INSERT):TableRow \\{id=1001, value1=someValue1, value2=someValue2, value3=null\\}");
StreamSource<ChangeRecord> source = sourceBuilder("source").setSchemaWhitelist(SCHEMA).setTableWhitelist(SCHEMA + ".someTable").build();
Pipeline pipeline = pipeline(source);
// when
HazelcastInstance hz = createHazelcastInstances(2)[0];
Job job = hz.getJet().newJob(pipeline);
assertJobStatusEventually(job, RUNNING);
assertReplicationSlotActive();
try {
// then
createTableWithData(SCHEMA, "someTable");
insertIntoTable(SCHEMA, "someTable", 1001, "someValue1", "someValue2");
assertTrueEventually(() -> assertMatch(expectedRecords, mapResultsToSortedList(hz.getMap(SINK_MAP_NAME))));
} finally {
job.cancel();
assertJobStatusEventually(job, JobStatus.FAILED);
}
}
use of com.hazelcast.jet.cdc.ChangeRecord in project hazelcast by hazelcast.
the class PostgresCdcIntegrationTest method customersPipeline.
@Nonnull
private Pipeline customersPipeline(Long commitPeriod) {
Pipeline pipeline = Pipeline.create();
pipeline.readFrom(source("customers", commitPeriod)).withNativeTimestamps(0).<ChangeRecord>customTransform("filter_timestamps", filterTimestampsProcessorSupplier()).groupingKey(record -> (Integer) record.key().toMap().get("id")).mapStateful(LongAccumulator::new, (accumulator, customerId, record) -> {
String count = format("%05d", accumulator.get());
accumulator.add(1);
Operation operation = record.operation();
RecordPart value = record.value();
Customer customer = value.toObject(Customer.class);
return entry(customerId + "/" + count, operation + ":" + customer);
}).setLocalParallelism(1).writeTo(Sinks.map("results"));
return pipeline;
}
Aggregations