Search in sources :

Example 6 with BinlogFileOffset

use of io.eventuate.local.common.BinlogFileOffset in project eventuate-local by eventuate-local.

the class AbstractDatabaseOffsetKafkaStoreTest method shouldReadTheLastRecordMultipleTimes.

@Test
public void shouldReadTheLastRecordMultipleTimes() throws InterruptedException {
    BinlogFileOffset bfo = generateAndSaveBinlogFileOffset();
    assertLastRecordEquals(bfo);
    assertLastRecordEquals(bfo);
}
Also used : BinlogFileOffset(io.eventuate.local.common.BinlogFileOffset) AbstractCdcTest(io.eventuate.local.test.util.AbstractCdcTest) Test(org.junit.Test)

Example 7 with BinlogFileOffset

use of io.eventuate.local.common.BinlogFileOffset in project eventuate-local by eventuate-local.

the class AbstractDuplicatePublishingDetectorTest method shouldBePublishedTest.

@Test
public void shouldBePublishedTest() {
    String topicName = generateUniqueTopicName();
    String binlogFilename = "binlog.file." + System.currentTimeMillis();
    DuplicatePublishingDetector duplicatePublishingDetector = new DuplicatePublishingDetector(eventuateKafkaConfigurationProperties.getBootstrapServers());
    Producer<String, String> producer = createProducer(eventuateKafkaConfigurationProperties.getBootstrapServers());
    floodTopic(producer, binlogFilename, topicName);
    producer.close();
    assertFalse(duplicatePublishingDetector.shouldBePublished(new BinlogFileOffset(binlogFilename, 1L), topicName));
    assertTrue(duplicatePublishingDetector.shouldBePublished(new BinlogFileOffset(binlogFilename, 10L), topicName));
}
Also used : BinlogFileOffset(io.eventuate.local.common.BinlogFileOffset) DuplicatePublishingDetector(io.eventuate.local.db.log.common.DuplicatePublishingDetector) Test(org.junit.Test) AbstractCdcTest(io.eventuate.local.test.util.AbstractCdcTest)

Example 8 with BinlogFileOffset

use of io.eventuate.local.common.BinlogFileOffset in project eventuate-local by eventuate-local.

the class AbstractDuplicatePublishingDetectorTest method emptyTopicTest.

@Test
public void emptyTopicTest() {
    DuplicatePublishingDetector duplicatePublishingDetector = new DuplicatePublishingDetector(eventuateKafkaConfigurationProperties.getBootstrapServers());
    BinlogFileOffset bfo = generateBinlogFileOffset();
    assertTrue(duplicatePublishingDetector.shouldBePublished(bfo, generateUniqueTopicName()));
}
Also used : BinlogFileOffset(io.eventuate.local.common.BinlogFileOffset) DuplicatePublishingDetector(io.eventuate.local.db.log.common.DuplicatePublishingDetector) Test(org.junit.Test) AbstractCdcTest(io.eventuate.local.test.util.AbstractCdcTest)

Example 9 with BinlogFileOffset

use of io.eventuate.local.common.BinlogFileOffset in project eventuate-local by eventuate-local.

the class AbstractDuplicatePublishingDetectorTest method shouldHandlePublishCheckForOldEntires.

@Test
public void shouldHandlePublishCheckForOldEntires() {
    String topicName = generateUniqueTopicName();
    String binlogFilename = "binlog.file." + System.currentTimeMillis();
    DuplicatePublishingDetector duplicatePublishingDetector = new DuplicatePublishingDetector(eventuateKafkaConfigurationProperties.getBootstrapServers());
    Producer<String, String> producer = createProducer(eventuateKafkaConfigurationProperties.getBootstrapServers());
    floodTopic(producer, binlogFilename, topicName);
    sendOldPublishedEvent(producer, topicName);
    producer.close();
    assertTrue(duplicatePublishingDetector.shouldBePublished(new BinlogFileOffset(binlogFilename, 10L), topicName));
}
Also used : BinlogFileOffset(io.eventuate.local.common.BinlogFileOffset) DuplicatePublishingDetector(io.eventuate.local.db.log.common.DuplicatePublishingDetector) Test(org.junit.Test) AbstractCdcTest(io.eventuate.local.test.util.AbstractCdcTest)

Example 10 with BinlogFileOffset

use of io.eventuate.local.common.BinlogFileOffset in project eventuate-local by eventuate-local.

the class MySqlBinaryLogClient method start.

public void start(Optional<BinlogFileOffset> binlogFileOffset, Consumer<M> eventConsumer) {
    client = new BinaryLogClient(host, port, dbUserName, dbPassword);
    client.setServerId(binlogClientUniqueId);
    client.setKeepAliveInterval(5 * 1000);
    BinlogFileOffset bfo = binlogFileOffset.orElse(new BinlogFileOffset("", 4L));
    logger.debug("Starting with {}", bfo);
    client.setBinlogFilename(bfo.getBinlogFilename());
    client.setBinlogPosition(bfo.getOffset());
    client.setEventDeserializer(getEventDeserializer());
    client.registerEventListener(event -> {
        switch(event.getHeader().getEventType()) {
            case TABLE_MAP:
                {
                    TableMapEventData tableMapEvent = event.getData();
                    if (tableMapEvent.getTable().equalsIgnoreCase(sourceTableName)) {
                        tableMapEventByTableId.put(tableMapEvent.getTableId(), tableMapEvent);
                    }
                    break;
                }
            case EXT_WRITE_ROWS:
                {
                    logger.debug("Got binlog event {}", event);
                    offset = ((EventHeaderV4) event.getHeader()).getPosition();
                    WriteRowsEventData eventData = event.getData();
                    if (tableMapEventByTableId.containsKey(eventData.getTableId())) {
                        try {
                            eventConsumer.accept(writeRowsEventDataParser.parseEventData(eventData, getCurrentBinlogFilename(), offset));
                        } catch (IOException e) {
                            throw new RuntimeException("Event row parsing exception", e);
                        }
                    }
                    break;
                }
            case ROTATE:
                {
                    RotateEventData eventData = event.getData();
                    if (eventData != null) {
                        binlogFilename = eventData.getBinlogFilename();
                    }
                    break;
                }
        }
    });
    connectWithRetriesOnFail();
}
Also used : BinlogFileOffset(io.eventuate.local.common.BinlogFileOffset) IOException(java.io.IOException) BinaryLogClient(com.github.shyiko.mysql.binlog.BinaryLogClient)

Aggregations

BinlogFileOffset (io.eventuate.local.common.BinlogFileOffset)12 AbstractCdcTest (io.eventuate.local.test.util.AbstractCdcTest)4 Test (org.junit.Test)4 DuplicatePublishingDetector (io.eventuate.local.db.log.common.DuplicatePublishingDetector)3 JSonMapper (io.eventuate.javaclient.commonimpl.JSonMapper)2 PublishedEvent (io.eventuate.local.common.PublishedEvent)2 DatabaseOffsetKafkaStore (io.eventuate.local.db.log.common.DatabaseOffsetKafkaStore)2 IOException (java.io.IOException)2 SQLException (java.sql.SQLException)2 Logger (org.slf4j.Logger)2 LoggerFactory (org.slf4j.LoggerFactory)2 BinaryLogClient (com.github.shyiko.mysql.binlog.BinaryLogClient)1 BinLogEvent (io.eventuate.local.common.BinLogEvent)1 DbLogClient (io.eventuate.local.db.log.common.DbLogClient)1 ConsumerPropertiesFactory (io.eventuate.local.java.kafka.consumer.ConsumerPropertiesFactory)1 EventuateKafkaConsumer (io.eventuate.local.java.kafka.consumer.EventuateKafkaConsumer)1 MessageImpl (io.eventuate.tram.messaging.common.MessageImpl)1 ByteBuffer (java.nio.ByteBuffer)1 Connection (java.sql.Connection)1 DriverManager (java.sql.DriverManager)1