use of io.eventuate.javaclient.commonimpl.EntityIdVersionAndEventIds in project eventuate-local by eventuate-local.
the class MySQLClientNameTest method test.
@Test
public void test() throws Exception {
databaseOffsetKafkaStore = createDatabaseOffsetKafkaStore(createMySqlBinaryLogClient());
BlockingQueue<PublishedEvent> publishedEvents = new LinkedBlockingDeque<>();
CdcProcessor<PublishedEvent> cdcProcessor = createMySQLCdcProcessor();
cdcProcessor.start(publishedEvent -> {
publishedEvents.add(publishedEvent);
databaseOffsetKafkaStore.save(publishedEvent.getBinlogFileOffset());
});
EventuateLocalAggregateCrud localAggregateCrud = new EventuateLocalAggregateCrud(eventuateJdbcAccess);
List<EventTypeAndData> events = Collections.singletonList(new EventTypeAndData("TestEvent", "{}", Optional.empty()));
EntityIdVersionAndEventIds entityIdVersionAndEventIds = localAggregateCrud.save("TestAggregate", events, Optional.empty());
PublishedEvent publishedEvent;
while ((publishedEvent = publishedEvents.poll(10, TimeUnit.SECONDS)) != null) {
if (entityIdVersionAndEventIds.getEntityVersion().asString().equals(publishedEvent.getId())) {
break;
}
}
Assert.assertEquals(entityIdVersionAndEventIds.getEntityVersion().asString(), publishedEvent.getId());
cdcProcessor.stop();
/*waiting while offset is storing in kafka*/
Thread.sleep(10000);
databaseOffsetKafkaStore = createDatabaseOffsetKafkaStore(createMySqlBinaryLogClient());
cdcProcessor = createMySQLCdcProcessor();
cdcProcessor.start(event -> {
publishedEvents.add(event);
databaseOffsetKafkaStore.save(event.getBinlogFileOffset());
});
while ((publishedEvent = publishedEvents.poll(10, TimeUnit.SECONDS)) != null) {
Assert.assertNotEquals(entityIdVersionAndEventIds.getEntityVersion().asString(), publishedEvent.getId());
}
}
use of io.eventuate.javaclient.commonimpl.EntityIdVersionAndEventIds in project eventuate-local by eventuate-local.
the class MySQLMigrationTest method test.
@Test
public void test() throws Exception {
BlockingQueue<PublishedEvent> publishedEvents = new LinkedBlockingDeque<>();
CdcProcessor<PublishedEvent> cdcProcessor = createMySQLCdcProcessor();
cdcProcessor.start(publishedEvent -> {
publishedEvents.add(publishedEvent);
databaseOffsetKafkaStore.save(publishedEvent.getBinlogFileOffset());
});
EventuateLocalAggregateCrud localAggregateCrud = new EventuateLocalAggregateCrud(eventuateJdbcAccess);
List<EventTypeAndData> events = Collections.singletonList(new EventTypeAndData("TestEvent_MIGRATION", "{}", Optional.empty()));
EntityIdVersionAndEventIds entityIdVersionAndEventIds = localAggregateCrud.save("TestAggregate_MIGRATION", events, Optional.empty());
PublishedEvent publishedEvent;
while ((publishedEvent = publishedEvents.poll(10, TimeUnit.SECONDS)) != null) {
if ("TestEvent_MIGRATION".equals(publishedEvent.getEventType())) {
break;
}
}
Assert.assertNotNull(publishedEvent);
Assert.assertEquals(entityIdVersionAndEventIds.getEntityVersion().asString(), publishedEvent.getId());
}
use of io.eventuate.javaclient.commonimpl.EntityIdVersionAndEventIds in project eventuate-local by eventuate-local.
the class AbstractPostgresWalCdcIntegrationTest method shouldGetEvents.
@Test
public void shouldGetEvents() throws InterruptedException {
PostgresWalClient<PublishedEvent> postgresWalClient = new PostgresWalClient<>(postgresWalMessageParser, dataSourceURL, dbUserName, dbPassword, eventuateConfigurationProperties.getBinlogConnectionTimeoutInMilliseconds(), eventuateConfigurationProperties.getMaxAttemptsForBinlogConnection(), eventuateConfigurationProperties.getPostgresWalIntervalInMilliseconds(), eventuateConfigurationProperties.getPostgresReplicationStatusIntervalInMilliseconds(), eventuateConfigurationProperties.getPostgresReplicationSlotName());
EventuateLocalAggregateCrud localAggregateCrud = new EventuateLocalAggregateCrud(eventuateJdbcAccess);
BlockingQueue<PublishedEvent> publishedEvents = new LinkedBlockingDeque<>();
postgresWalClient.start(Optional.empty(), publishedEvents::add);
String accountCreatedEventData = generateAccountCreatedEvent();
EntityIdVersionAndEventIds saveResult = saveEvent(localAggregateCrud, accountCreatedEventData);
String accountDebitedEventData = generateAccountDebitedEvent();
EntityIdVersionAndEventIds updateResult = updateEvent(saveResult.getEntityId(), saveResult.getEntityVersion(), localAggregateCrud, accountDebitedEventData);
// Wait for 10 seconds
LocalDateTime deadline = LocalDateTime.now().plusSeconds(10);
waitForEvent(publishedEvents, saveResult.getEntityVersion(), deadline, accountCreatedEventData);
waitForEvent(publishedEvents, updateResult.getEntityVersion(), deadline, accountDebitedEventData);
postgresWalClient.stop();
}
use of io.eventuate.javaclient.commonimpl.EntityIdVersionAndEventIds in project eventuate-local by eventuate-local.
the class CdcKafkaPublisherTest method shouldSendPublishedEventsToKafka.
@Test
public void shouldSendPublishedEventsToKafka() throws InterruptedException {
CdcKafkaPublisher<PublishedEvent> cdcKafkaPublisher = createCdcKafkaPublisher();
cdcKafkaPublisher.start();
cdcProcessor.start(cdcKafkaPublisher::handleEvent);
String accountCreatedEventData = generateAccountCreatedEvent();
EntityIdVersionAndEventIds entityIdVersionAndEventIds = saveEvent(localAggregateCrud, accountCreatedEventData);
KafkaConsumer<String, String> consumer = createConsumer(eventuateKafkaConfigurationProperties.getBootstrapServers());
consumer.partitionsFor(getEventTopicName());
consumer.subscribe(Collections.singletonList(getEventTopicName()));
waitForEventInKafka(consumer, entityIdVersionAndEventIds.getEntityId(), LocalDateTime.now().plusSeconds(40));
cdcKafkaPublisher.stop();
}
use of io.eventuate.javaclient.commonimpl.EntityIdVersionAndEventIds in project eventuate-local by eventuate-local.
the class CdcProcessorTest method shouldReadUnprocessedEventsAfterStartup.
@Test
public void shouldReadUnprocessedEventsAfterStartup() throws InterruptedException {
BlockingQueue<PublishedEvent> publishedEvents = new LinkedBlockingDeque<>();
String accountCreatedEventData = generateAccountCreatedEvent();
EntityIdVersionAndEventIds entityIdVersionAndEventIds = saveEvent(localAggregateCrud, accountCreatedEventData);
CdcProcessor<PublishedEvent> cdcProcessor = createCdcProcessor();
cdcProcessor.start(publishedEvents::add);
waitForEvent(publishedEvents, entityIdVersionAndEventIds.getEntityVersion(), LocalDateTime.now().plusSeconds(20), accountCreatedEventData);
cdcProcessor.stop();
}
Aggregations