use of io.eventuate.javaclient.commonimpl.EventTypeAndData in project eventuate-local by eventuate-local.
the class MySQLClientNameTest method test.
@Test
public void test() throws Exception {
databaseOffsetKafkaStore = createDatabaseOffsetKafkaStore(createMySqlBinaryLogClient());
BlockingQueue<PublishedEvent> publishedEvents = new LinkedBlockingDeque<>();
CdcProcessor<PublishedEvent> cdcProcessor = createMySQLCdcProcessor();
cdcProcessor.start(publishedEvent -> {
publishedEvents.add(publishedEvent);
databaseOffsetKafkaStore.save(publishedEvent.getBinlogFileOffset());
});
EventuateLocalAggregateCrud localAggregateCrud = new EventuateLocalAggregateCrud(eventuateJdbcAccess);
List<EventTypeAndData> events = Collections.singletonList(new EventTypeAndData("TestEvent", "{}", Optional.empty()));
EntityIdVersionAndEventIds entityIdVersionAndEventIds = localAggregateCrud.save("TestAggregate", events, Optional.empty());
PublishedEvent publishedEvent;
while ((publishedEvent = publishedEvents.poll(10, TimeUnit.SECONDS)) != null) {
if (entityIdVersionAndEventIds.getEntityVersion().asString().equals(publishedEvent.getId())) {
break;
}
}
Assert.assertEquals(entityIdVersionAndEventIds.getEntityVersion().asString(), publishedEvent.getId());
cdcProcessor.stop();
/*waiting while offset is storing in kafka*/
Thread.sleep(10000);
databaseOffsetKafkaStore = createDatabaseOffsetKafkaStore(createMySqlBinaryLogClient());
cdcProcessor = createMySQLCdcProcessor();
cdcProcessor.start(event -> {
publishedEvents.add(event);
databaseOffsetKafkaStore.save(event.getBinlogFileOffset());
});
while ((publishedEvent = publishedEvents.poll(10, TimeUnit.SECONDS)) != null) {
Assert.assertNotEquals(entityIdVersionAndEventIds.getEntityVersion().asString(), publishedEvent.getId());
}
}
use of io.eventuate.javaclient.commonimpl.EventTypeAndData in project eventuate-local by eventuate-local.
the class MySQLMigrationTest method test.
@Test
public void test() throws Exception {
BlockingQueue<PublishedEvent> publishedEvents = new LinkedBlockingDeque<>();
CdcProcessor<PublishedEvent> cdcProcessor = createMySQLCdcProcessor();
cdcProcessor.start(publishedEvent -> {
publishedEvents.add(publishedEvent);
databaseOffsetKafkaStore.save(publishedEvent.getBinlogFileOffset());
});
EventuateLocalAggregateCrud localAggregateCrud = new EventuateLocalAggregateCrud(eventuateJdbcAccess);
List<EventTypeAndData> events = Collections.singletonList(new EventTypeAndData("TestEvent_MIGRATION", "{}", Optional.empty()));
EntityIdVersionAndEventIds entityIdVersionAndEventIds = localAggregateCrud.save("TestAggregate_MIGRATION", events, Optional.empty());
PublishedEvent publishedEvent;
while ((publishedEvent = publishedEvents.poll(10, TimeUnit.SECONDS)) != null) {
if ("TestEvent_MIGRATION".equals(publishedEvent.getEventType())) {
break;
}
}
Assert.assertNotNull(publishedEvent);
Assert.assertEquals(entityIdVersionAndEventIds.getEntityVersion().asString(), publishedEvent.getId());
}
use of io.eventuate.javaclient.commonimpl.EventTypeAndData in project eventuate-local by eventuate-local.
the class AbstractTopicRelayTest method shouldCaptureAndPublishChange.
@Test
public void shouldCaptureAndPublishChange() throws ExecutionException, InterruptedException {
String aggregateType = "TestAggregate";
String eventType = "TestEvent";
List<EventTypeAndData> myEvents = Collections.singletonList(new EventTypeAndData(eventType, "{}", Optional.empty()));
long publishTime = System.currentTimeMillis();
EntityIdVersionAndEventIds ewidv = AsyncUtil.await(eventuateJdbcEventStore.save(aggregateType, myEvents, Optional.empty()));
Int128 expectedEventId = ewidv.getEntityVersion();
BlockingQueue<Int128> result = new LinkedBlockingDeque<>();
logger.debug("Looking for eventId {}", expectedEventId);
eventuateKafkaAggregateSubscriptions.subscribe("testSubscriber-" + getClass().getName(), Collections.singletonMap(aggregateType, Collections.singleton(eventType)), SubscriberOptions.DEFAULTS, se -> {
logger.debug("got se {}", se);
if (se.getId().equals(expectedEventId))
result.add(se.getId());
return CompletableFuture.completedFuture(null);
}).get();
Assert.assertNotNull("Failed to find eventId: " + expectedEventId, result.poll(30, TimeUnit.SECONDS));
Assert.assertNull(result.poll(30, TimeUnit.SECONDS));
long endTime = System.currentTimeMillis();
logger.debug("got the event I just published in msecs {}", endTime - publishTime);
// eventTableChangesToAggregateTopicRelay.stopCapturingChanges();
}
use of io.eventuate.javaclient.commonimpl.EventTypeAndData in project eventuate-local by eventuate-local.
the class PrepareMigrationToNewCdcTest method shouldCaptureAndPublishChange.
@Test
public void shouldCaptureAndPublishChange() throws ExecutionException, InterruptedException {
String aggregateType = "TestAggregate_MIGRATION";
String eventType = "TestEvent_MIGRATION";
List<EventTypeAndData> myEvents = Collections.singletonList(new EventTypeAndData(eventType, "{}", Optional.empty()));
EntityIdVersionAndEventIds ewidv = AsyncUtil.await(eventuateJdbcEventStore.save(aggregateType, myEvents, Optional.empty()));
Int128 expectedEventId = ewidv.getEntityVersion();
BlockingQueue<Int128> result = new LinkedBlockingDeque<>();
eventuateKafkaAggregateSubscriptions.subscribe("testSubscriber", Collections.singletonMap(aggregateType, Collections.singleton(eventType)), SubscriberOptions.DEFAULTS, se -> {
logger.debug("got se {}", se);
if (se.getId().equals(expectedEventId))
result.add(se.getId());
return CompletableFuture.completedFuture(null);
}).get();
Assert.assertNotNull(result.poll(30, TimeUnit.SECONDS));
}
Aggregations