use of com.hazelcast.jet.pipeline.Pipeline in project hazelcast by hazelcast.
the class PostgresCdcNetworkIntegrationTest method when_databaseShutdownOrLongDisconnectDuringSnapshotting.
@Test
public void when_databaseShutdownOrLongDisconnectDuringSnapshotting() throws Exception {
postgres = initPostgres(null, null);
int port = fixPortBinding(postgres, POSTGRESQL_PORT);
Pipeline pipeline = initPipeline(postgres.getContainerIpAddress(), port);
// when job starts
HazelcastInstance hz = createHazelcastInstances(2)[0];
Job job = hz.getJet().newJob(pipeline);
assertJobStatusEventually(job, RUNNING);
// and snapshotting is ongoing (we have no exact way of identifying
// the moment, but random sleep will catch it at least some of the time)
MILLISECONDS.sleep(ThreadLocalRandom.current().nextInt(100, 500));
// and DB is stopped
stopContainer(postgres);
// then
boolean neverReconnect = reconnectBehavior.getMaxAttempts() == 0;
if (neverReconnect) {
// then job fails
assertThatThrownBy(job::join).hasRootCauseInstanceOf(JetException.class).hasStackTraceContaining("Failed to connect to database");
} else {
// and DB is started anew
postgres = initPostgres(null, port);
// then snapshotting finishes successfully
try {
assertEqualsEventually(() -> hz.getMap("results").size(), 4);
assertEquals(RUNNING, job.getStatus());
} finally {
abortJob(job);
}
}
}
use of com.hazelcast.jet.pipeline.Pipeline in project hazelcast by hazelcast.
the class PostgresCdcNetworkIntegrationTest method when_shortConnectionLossDuringBinlogReading_then_connectorDoesNotNoticeAnything.
@Test
public void when_shortConnectionLossDuringBinlogReading_then_connectorDoesNotNoticeAnything() throws Exception {
try (Network network = initNetwork();
ToxiproxyContainer toxiproxy = initToxiproxy(network)) {
postgres = initPostgres(network, null);
ToxiproxyContainer.ContainerProxy proxy = initProxy(toxiproxy, postgres);
Pipeline pipeline = initPipeline(proxy.getContainerIpAddress(), proxy.getProxyPort());
// when connector is up and transitions to binlog reading
HazelcastInstance hz = createHazelcastInstances(2)[0];
Job job = hz.getJet().newJob(pipeline);
assertEqualsEventually(() -> hz.getMap("results").size(), 4);
SECONDS.sleep(3);
insertRecords(postgres, 1005);
assertEqualsEventually(() -> hz.getMap("results").size(), 5);
// and the connection is cut
proxy.setConnectionCut(true);
// and some new events get generated in the DB
insertRecords(postgres, 1006, 1007);
// and some time passes
MILLISECONDS.sleep(5 * RECONNECT_INTERVAL_MS);
// and the connection is re-established
proxy.setConnectionCut(false);
// then
try {
// then job keeps running, connector starts freshly, including snapshotting
assertEqualsEventually(() -> hz.getMap("results").size(), 7);
assertEquals(RUNNING, job.getStatus());
} finally {
abortJob(job);
}
}
}
use of com.hazelcast.jet.pipeline.Pipeline in project hazelcast by hazelcast.
the class PostgresCdcIntegrationTest method customers.
@Test
@Category(QuickTest.class)
public void customers() throws Exception {
// given
List<String> expectedRecords = Arrays.asList("1001/00000:SYNC:" + new Customer(1001, "Sally", "Thomas", "sally.thomas@acme.com"), "1002/00000:SYNC:" + new Customer(1002, "George", "Bailey", "gbailey@foobar.com"), "1003/00000:SYNC:" + new Customer(1003, "Edward", "Walker", "ed@walker.com"), "1004/00000:SYNC:" + new Customer(1004, "Anne", "Kretchmar", "annek@noanswer.org"), "1004/00001:UPDATE:" + new Customer(1004, "Anne Marie", "Kretchmar", "annek@noanswer.org"), "1005/00000:INSERT:" + new Customer(1005, "Jason", "Bourne", "jason@bourne.org"), "1005/00001:DELETE:" + new Customer(1005, "Jason", "Bourne", "jason@bourne.org"));
Pipeline pipeline = customersPipeline(null);
// when
HazelcastInstance hz = createHazelcastInstances(2)[0];
Job job = hz.getJet().newJob(pipeline);
// then
assertEqualsEventually(() -> hz.getMap("results").size(), 4);
// when
executeBatch("UPDATE customers SET first_name='Anne Marie' WHERE id=1004", "INSERT INTO customers VALUES (1005, 'Jason', 'Bourne', 'jason@bourne.org')", "DELETE FROM customers WHERE id=1005");
// then
try {
assertEqualsEventually(() -> mapResultsToSortedList(hz.getMap("results")), expectedRecords);
} finally {
job.cancel();
assertJobStatusEventually(job, JobStatus.FAILED);
}
}
use of com.hazelcast.jet.pipeline.Pipeline in project hazelcast by hazelcast.
the class PostgresCdcIntegrationTest method orders.
@Test
@Category(NightlyTest.class)
public void orders() {
// given
List<String> expectedRecords = Arrays.asList("10001/0:SYNC:" + new Order(10001, new Date(1452902400000L), 1001, 1, 102), "10002/0:SYNC:" + new Order(10002, new Date(1452988800000L), 1002, 2, 105), "10003/0:SYNC:" + new Order(10003, new Date(1455840000000L), 1002, 2, 106), "10004/0:SYNC:" + new Order(10004, new Date(1456012800000L), 1003, 1, 107));
Pipeline pipeline = ordersPipeline();
// when
HazelcastInstance hz = createHazelcastInstances(2)[0];
Job job = hz.getJet().newJob(pipeline);
// then
try {
assertEqualsEventually(() -> mapResultsToSortedList(hz.getMap("results")), expectedRecords);
} finally {
job.cancel();
assertJobStatusEventually(job, JobStatus.FAILED);
}
}
use of com.hazelcast.jet.pipeline.Pipeline in project hazelcast by hazelcast.
the class PostgresCdcIntegrationTest method dataLoss.
@Test
@Category(NightlyTest.class)
public void dataLoss() throws Exception {
int offset = 1005;
int length = 9995;
// given
List<String> expectedRecords = new ArrayList<>(Arrays.asList("1001/00000:(SYNC|INSERT):Customer \\{id=1001, firstName=Sally, lastName=Thomas, " + "email=sally.thomas@acme.com\\}", "1002/00000:(SYNC|INSERT):Customer \\{id=1002, firstName=George, lastName=Bailey, " + "email=gbailey@foobar.com\\}", "1003/00000:(SYNC|INSERT):Customer \\{id=1003, firstName=Edward, lastName=Walker, " + "email=ed@walker.com\\}", "1004/00000:(SYNC|INSERT):Customer \\{id=1004, firstName=Anne, lastName=Kretchmar, " + "email=annek@noanswer.org\\}"));
for (int i = offset; i < offset + length; i++) {
expectedRecords.add(i + "/00000:(SYNC|INSERT):Customer \\{id=" + i + ", firstName=first" + i + ", lastName=last" + i + ", email=" + i + "@google.com\\}");
}
expectedRecords.sort(String::compareTo);
Pipeline pipeline = customersPipeline(null);
// when
HazelcastInstance hz = createHazelcastInstances(1)[0];
Job job = hz.getJet().newJob(pipeline);
// then
assertJobStatusEventually(job, JobStatus.RUNNING);
// when
String[] batch = new String[length];
for (int i = offset; i < offset + length; i++) {
batch[i - offset] = "INSERT INTO customers VALUES (" + i + ", 'first" + i + "', 'last" + i + "', '" + i + "@google.com')";
}
executeBatch(batch);
// then
try {
assertTrueEventually(() -> {
IMap<Object, Object> map = hz.getMap("results");
int size = map.size();
System.out.println("No. of records: " + size);
assertEquals(expectedRecords.size(), size);
assertMatch(expectedRecords, mapResultsToSortedList(map));
});
} finally {
job.cancel();
assertJobStatusEventually(job, JobStatus.FAILED);
}
}
Aggregations