Search in sources :

Example 71 with SourceRecord

use of org.apache.kafka.connect.source.SourceRecord in project debezium by debezium.

the class RecordMakersTest method shouldGenerateRecordsWithCorrectlySerializedId.

@Test
public void shouldGenerateRecordsWithCorrectlySerializedId() throws InterruptedException {
    CollectionId collectionId = new CollectionId("rs0", "dbA", "c1");
    BsonTimestamp ts = new BsonTimestamp(1000, 1);
    // long
    Document obj = new Document().append("_id", Long.valueOf(Integer.MAX_VALUE) + 10).append("name", "Sally");
    Document event = new Document().append("o", obj).append("ns", "dbA.c1").append("ts", ts).append("h", Long.valueOf(12345678)).append("op", "i");
    RecordsForCollection records = recordMakers.forCollection(collectionId);
    records.recordEvent(event, 1002);
    // String
    obj = new Document().append("_id", "123").append("name", "Sally");
    event = new Document().append("o", obj).append("ns", "dbA.c1").append("ts", ts).append("h", Long.valueOf(12345678)).append("op", "i");
    records = recordMakers.forCollection(collectionId);
    records.recordEvent(event, 1003);
    // Complex key type
    obj = new Document().append("_id", new Document().append("company", 32).append("dept", "home improvement")).append("name", "Sally");
    event = new Document().append("o", obj).append("ns", "dbA.c1").append("ts", ts).append("h", Long.valueOf(12345678)).append("op", "i");
    records = recordMakers.forCollection(collectionId);
    records.recordEvent(event, 1004);
    // date
    Calendar cal = Calendar.getInstance();
    cal.set(2017, 9, 19);
    obj = new Document().append("_id", cal.getTime()).append("name", "Sally");
    event = new Document().append("o", obj).append("ns", "dbA.c1").append("ts", ts).append("h", Long.valueOf(12345678)).append("op", "i");
    records = recordMakers.forCollection(collectionId);
    records.recordEvent(event, 1005);
    // Decimal128
    obj = new Document().append("_id", new Decimal128(new BigDecimal("123.45678"))).append("name", "Sally");
    event = new Document().append("o", obj).append("ns", "dbA.c1").append("ts", ts).append("h", Long.valueOf(12345678)).append("op", "i");
    records = recordMakers.forCollection(collectionId);
    records.recordEvent(event, 1004);
    assertThat(produced.size()).isEqualTo(5);
    SourceRecord record = produced.get(0);
    Struct key = (Struct) record.key();
    assertThat(key.get("id")).isEqualTo("2147483657");
    record = produced.get(1);
    key = (Struct) record.key();
    assertThat(key.get("id")).isEqualTo("\"123\"");
    record = produced.get(2);
    key = (Struct) record.key();
    assertThat(key.get("id")).isEqualTo("{ \"company\" : 32 , \"dept\" : \"home improvement\"}");
    record = produced.get(3);
    key = (Struct) record.key();
    // that's actually not what https://docs.mongodb.com/manual/reference/mongodb-extended-json/#date suggests;
    // seems JsonSerializers is not fully compliant with that description
    assertThat(key.get("id")).isEqualTo("{ \"$date\" : " + cal.getTime().getTime() + "}");
    record = produced.get(4);
    key = (Struct) record.key();
    assertThat(key.get("id")).isEqualTo("{ \"$numberDecimal\" : \"123.45678\"}");
}
Also used : RecordsForCollection(io.debezium.connector.mongodb.RecordMakers.RecordsForCollection) Calendar(java.util.Calendar) Decimal128(org.bson.types.Decimal128) Document(org.bson.Document) SourceRecord(org.apache.kafka.connect.source.SourceRecord) BsonTimestamp(org.bson.BsonTimestamp) BigDecimal(java.math.BigDecimal) Struct(org.apache.kafka.connect.data.Struct) Test(org.junit.Test)

Example 72 with SourceRecord

use of org.apache.kafka.connect.source.SourceRecord in project debezium by debezium.

the class RecordMakersTest method shouldGenerateRecordForInsertEvent.

@Test
public void shouldGenerateRecordForInsertEvent() throws InterruptedException {
    CollectionId collectionId = new CollectionId("rs0", "dbA", "c1");
    BsonTimestamp ts = new BsonTimestamp(1000, 1);
    ObjectId objId = new ObjectId();
    Document obj = new Document().append("_id", objId).append("name", "Sally");
    Document event = new Document().append("o", obj).append("ns", "dbA.c1").append("ts", ts).append("h", Long.valueOf(12345678)).append("op", "i");
    RecordsForCollection records = recordMakers.forCollection(collectionId);
    records.recordEvent(event, 1002);
    assertThat(produced.size()).isEqualTo(1);
    SourceRecord record = produced.get(0);
    Struct key = (Struct) record.key();
    Struct value = (Struct) record.value();
    assertThat(key.schema()).isSameAs(record.keySchema());
    assertThat(key.get("id")).isEqualTo("{ \"$oid\" : \"" + objId + "\"}");
    assertThat(value.schema()).isSameAs(record.valueSchema());
    // assertThat(value.getString(FieldName.BEFORE)).isNull();
    assertThat(value.getString(FieldName.AFTER)).isEqualTo(obj.toJson(WRITER_SETTINGS));
    assertThat(value.getString(FieldName.OPERATION)).isEqualTo(Operation.CREATE.code());
    assertThat(value.getInt64(FieldName.TIMESTAMP)).isEqualTo(1002L);
    Struct actualSource = value.getStruct(FieldName.SOURCE);
    Struct expectedSource = source.lastOffsetStruct("rs0", collectionId);
    assertThat(actualSource).isEqualTo(expectedSource);
}
Also used : RecordsForCollection(io.debezium.connector.mongodb.RecordMakers.RecordsForCollection) ObjectId(org.bson.types.ObjectId) Document(org.bson.Document) SourceRecord(org.apache.kafka.connect.source.SourceRecord) BsonTimestamp(org.bson.BsonTimestamp) Struct(org.apache.kafka.connect.data.Struct) Test(org.junit.Test)

Example 73 with SourceRecord

use of org.apache.kafka.connect.source.SourceRecord in project debezium by debezium.

the class ReplicatorIT method shouldReplicateContent.

@Test
public void shouldReplicateContent() throws InterruptedException {
    Testing.Print.disable();
    // Update the configuration to add a collection filter ...
    useConfiguration(config.edit().with(MongoDbConnectorConfig.MAX_FAILED_CONNECTIONS, 1).with(MongoDbConnectorConfig.COLLECTION_WHITELIST, "dbA.contacts").build());
    TestHelper.cleanDatabase(primary, "dbA");
    // ------------------------------------------------------------------------------
    // ADD A DOCUMENT
    // ------------------------------------------------------------------------------
    // Add a document to the 'contacts' database ...
    primary.execute("shouldCreateContactsDatabase", mongo -> {
        Testing.debug("Populating the 'dbA.contacts' collection");
        // Create a database and a collection in that database ...
        MongoDatabase db = mongo.getDatabase("dbA");
        MongoCollection<Document> contacts = db.getCollection("contacts");
        InsertOneOptions insertOptions = new InsertOneOptions().bypassDocumentValidation(true);
        contacts.insertOne(Document.parse("{ \"name\":\"Jon Snow\"}"), insertOptions);
        assertThat(db.getCollection("contacts").count()).isEqualTo(1);
        // Read the collection to make sure we can find our document ...
        Bson filter = Filters.eq("name", "Jon Snow");
        FindIterable<Document> movieResults = db.getCollection("contacts").find(filter);
        try (MongoCursor<Document> cursor = movieResults.iterator()) {
            assertThat(cursor.tryNext().getString("name")).isEqualTo("Jon Snow");
            assertThat(cursor.tryNext()).isNull();
        }
        Testing.debug("Completed document to 'dbA.contacts' collection");
    });
    // Start the replicator ...
    List<SourceRecord> records = new LinkedList<>();
    Replicator replicator = new Replicator(context, replicaSet, records::add, (x) -> {
    });
    Thread thread = new Thread(replicator::run);
    thread.start();
    // Sleep for 2 seconds ...
    Thread.sleep(2000);
    // ------------------------------------------------------------------------------
    // ADD A SECOND DOCUMENT
    // ------------------------------------------------------------------------------
    // Add more documents to the 'contacts' database ...
    final Object[] expectedNames = { "Jon Snow", "Sally Hamm" };
    primary.execute("shouldCreateContactsDatabase", mongo -> {
        Testing.debug("Populating the 'dbA.contacts' collection");
        // Create a database and a collection in that database ...
        MongoDatabase db = mongo.getDatabase("dbA");
        MongoCollection<Document> contacts = db.getCollection("contacts");
        InsertOneOptions insertOptions = new InsertOneOptions().bypassDocumentValidation(true);
        contacts.insertOne(Document.parse("{ \"name\":\"Sally Hamm\"}"), insertOptions);
        assertThat(db.getCollection("contacts").count()).isEqualTo(2);
        // Read the collection to make sure we can find our documents ...
        FindIterable<Document> movieResults = db.getCollection("contacts").find();
        Set<String> foundNames = new HashSet<>();
        try (MongoCursor<Document> cursor = movieResults.iterator()) {
            while (cursor.hasNext()) {
                String name = cursor.next().getString("name");
                foundNames.add(name);
            }
        }
        assertThat(foundNames).containsOnly(expectedNames);
        Testing.debug("Completed document to 'dbA.contacts' collection");
    });
    // For for a minimum number of events or max time ...
    // both documents
    int numEventsExpected = 2;
    long stop = System.currentTimeMillis() + TimeUnit.SECONDS.toMillis(3);
    while (records.size() < numEventsExpected && System.currentTimeMillis() < stop) {
        Thread.sleep(100);
    }
    // ------------------------------------------------------------------------------
    // STOP REPLICATOR AND VERIFY WE FOUND A TOTAL OF 2 EVENTS
    // ------------------------------------------------------------------------------
    replicator.stop();
    // Verify each record is valid and that we found the two records we expect ...
    final Set<String> foundNames = new HashSet<>();
    records.forEach(record -> {
        VerifyRecord.isValid(record);
        Struct value = (Struct) record.value();
        String after = value.getString("after");
        Document afterDoc = Document.parse(after);
        foundNames.add(afterDoc.getString("name"));
        Operation op = Operation.forCode(value.getString("op"));
        assertThat(op == Operation.READ || op == Operation.CREATE).isTrue();
    });
    assertThat(records.size()).isEqualTo(2);
    assertThat(foundNames).containsOnly(expectedNames);
    // ------------------------------------------------------------------------------
    // RESTART REPLICATOR FROM SAME POSITON
    // ------------------------------------------------------------------------------
    reuseConfiguration(config);
    // Start the replicator again ...
    records = new LinkedList<>();
    replicator = new Replicator(context, replicaSet, records::add, (x) -> {
    });
    thread = new Thread(replicator::run);
    thread.start();
    // Sleep for 2 seconds ...
    Thread.sleep(2000);
    // Stop the replicator ...
    replicator.stop();
    // We should not have found any new records ...
    records.forEach(record -> {
        VerifyRecord.isValid(record);
    });
    assertThat(records.isEmpty()).isTrue();
    // ------------------------------------------------------------------------------
    // START REPLICATOR AND ALSO REMOVE A DOCUMENT
    // ------------------------------------------------------------------------------
    // Update the configuration and don't use a collection filter ...
    reuseConfiguration(config.edit().with(MongoDbConnectorConfig.MAX_FAILED_CONNECTIONS, 1).build());
    // Start the replicator again ...
    records = new LinkedList<>();
    replicator = new Replicator(context, replicaSet, records::add, (x) -> {
    });
    thread = new Thread(replicator::run);
    thread.start();
    // Sleep for 2 seconds ...
    Thread.sleep(2000);
    // Remove Jon Snow ...
    AtomicReference<ObjectId> jonSnowId = new AtomicReference<>();
    primary.execute("removeJonSnow", mongo -> {
        MongoDatabase db = mongo.getDatabase("dbA");
        MongoCollection<Document> contacts = db.getCollection("contacts");
        // Read the collection to make sure we can find our document ...
        Bson filter = Filters.eq("name", "Jon Snow");
        FindIterable<Document> movieResults = db.getCollection("contacts").find(filter);
        try (MongoCursor<Document> cursor = movieResults.iterator()) {
            Document doc = cursor.tryNext();
            assertThat(doc.getString("name")).isEqualTo("Jon Snow");
            assertThat(cursor.tryNext()).isNull();
            jonSnowId.set(doc.getObjectId("_id"));
            assertThat(jonSnowId.get()).isNotNull();
        }
        // Remove the document by filter ...
        contacts.deleteOne(Filters.eq("name", "Jon Snow"));
        Testing.debug("Removed the Jon Snow document from 'dbA.contacts' collection");
    });
    // For for a minimum number of events or max time ...
    // just one delete event
    numEventsExpected = 1;
    stop = System.currentTimeMillis() + TimeUnit.SECONDS.toMillis(3);
    while (records.size() < numEventsExpected && System.currentTimeMillis() < stop) {
        Thread.sleep(100);
    }
    // Stop the replicator ...
    replicator.stop();
    // Verify each record is valid and that we found the one new DELETE record we expect ...
    Set<ObjectId> foundIds = new HashSet<>();
    records.forEach(record -> {
        VerifyRecord.isValid(record);
        Struct key = (Struct) record.key();
        ObjectId id = (ObjectId) (JSON.parse(key.getString("id")));
        foundIds.add(id);
        if (record.value() != null) {
            Struct value = (Struct) record.value();
            Operation op = Operation.forCode(value.getString("op"));
            assertThat(op).isEqualTo(Operation.DELETE);
        }
    });
    // 1 delete and 1 tombstone
    assertThat(records.size()).isEqualTo(2);
    // ------------------------------------------------------------------------------
    // START REPLICATOR TO PERFORM SNAPSHOT
    // ------------------------------------------------------------------------------
    // Update the configuration and don't use a collection filter ...
    useConfiguration(config);
    // Start the replicator again ...
    records = new LinkedList<>();
    replicator = new Replicator(context, replicaSet, records::add, (x) -> {
    });
    thread = new Thread(replicator::run);
    thread.start();
    // Sleep for 2 seconds ...
    Thread.sleep(2000);
    // Stop the replicator ...
    replicator.stop();
    // Verify each record is valid and that we found the two records we expect ...
    foundNames.clear();
    records.forEach(record -> {
        VerifyRecord.isValid(record);
        Struct value = (Struct) record.value();
        String after = value.getString("after");
        Document afterDoc = Document.parse(after);
        foundNames.add(afterDoc.getString("name"));
        Operation op = Operation.forCode(value.getString("op"));
        assertThat(op).isEqualTo(Operation.READ);
    });
    // We should not have found any new records ...
    assertThat(records.size()).isEqualTo(1);
    Object[] allExpectedNames = { "Sally Hamm" };
    assertThat(foundNames).containsOnly(allExpectedNames);
}
Also used : Document(org.bson.Document) MongoCollection(com.mongodb.client.MongoCollection) Set(java.util.Set) Test(org.junit.Test) MongoDatabase(com.mongodb.client.MongoDatabase) AtomicReference(java.util.concurrent.atomic.AtomicReference) SourceRecord(org.apache.kafka.connect.source.SourceRecord) HashSet(java.util.HashSet) TimeUnit(java.util.concurrent.TimeUnit) Bson(org.bson.conversions.Bson) Filters(com.mongodb.client.model.Filters) Operation(io.debezium.data.Envelope.Operation) List(java.util.List) Testing(io.debezium.util.Testing) Assertions.assertThat(org.fest.assertions.Assertions.assertThat) MongoCursor(com.mongodb.client.MongoCursor) FindIterable(com.mongodb.client.FindIterable) Struct(org.apache.kafka.connect.data.Struct) ObjectId(org.bson.types.ObjectId) JSON(com.mongodb.util.JSON) InsertOneOptions(com.mongodb.client.model.InsertOneOptions) VerifyRecord(io.debezium.data.VerifyRecord) LinkedList(java.util.LinkedList) InsertOneOptions(com.mongodb.client.model.InsertOneOptions) Operation(io.debezium.data.Envelope.Operation) Document(org.bson.Document) SourceRecord(org.apache.kafka.connect.source.SourceRecord) Bson(org.bson.conversions.Bson) Struct(org.apache.kafka.connect.data.Struct) MongoDatabase(com.mongodb.client.MongoDatabase) HashSet(java.util.HashSet) ObjectId(org.bson.types.ObjectId) AtomicReference(java.util.concurrent.atomic.AtomicReference) LinkedList(java.util.LinkedList) Test(org.junit.Test)

Example 74 with SourceRecord

use of org.apache.kafka.connect.source.SourceRecord in project kafka by apache.

the class ErrorHandlingTaskTest method testErrorHandlingInSourceTasks.

@Test
public void testErrorHandlingInSourceTasks() throws Exception {
    Map<String, String> reportProps = new HashMap<>();
    reportProps.put(ConnectorConfig.ERRORS_LOG_ENABLE_CONFIG, "true");
    reportProps.put(ConnectorConfig.ERRORS_LOG_INCLUDE_MESSAGES_CONFIG, "true");
    LogReporter reporter = new LogReporter(taskId, connConfig(reportProps), errorHandlingMetrics);
    RetryWithToleranceOperator retryWithToleranceOperator = operator();
    retryWithToleranceOperator.metrics(errorHandlingMetrics);
    retryWithToleranceOperator.reporters(singletonList(reporter));
    createSourceTask(initialState, retryWithToleranceOperator);
    // valid json
    Schema valSchema = SchemaBuilder.struct().field("val", Schema.INT32_SCHEMA).build();
    Struct struct1 = new Struct(valSchema).put("val", 1234);
    SourceRecord record1 = new SourceRecord(emptyMap(), emptyMap(), TOPIC, PARTITION1, valSchema, struct1);
    Struct struct2 = new Struct(valSchema).put("val", 6789);
    SourceRecord record2 = new SourceRecord(emptyMap(), emptyMap(), TOPIC, PARTITION1, valSchema, struct2);
    EasyMock.expect(workerSourceTask.isStopping()).andReturn(false);
    EasyMock.expect(workerSourceTask.isStopping()).andReturn(false);
    EasyMock.expect(workerSourceTask.isStopping()).andReturn(true);
    EasyMock.expect(workerSourceTask.commitOffsets()).andReturn(true);
    sourceTask.initialize(EasyMock.anyObject());
    EasyMock.expectLastCall();
    sourceTask.start(EasyMock.anyObject());
    EasyMock.expectLastCall();
    EasyMock.expect(sourceTask.poll()).andReturn(singletonList(record1));
    EasyMock.expect(sourceTask.poll()).andReturn(singletonList(record2));
    expectTopicCreation(TOPIC);
    EasyMock.expect(producer.send(EasyMock.anyObject(), EasyMock.anyObject())).andReturn(null).times(2);
    PowerMock.replayAll();
    workerSourceTask.initialize(TASK_CONFIG);
    workerSourceTask.initializeAndStart();
    workerSourceTask.execute();
    // two records were consumed from Kafka
    assertSourceMetricValue("source-record-poll-total", 2.0);
    // only one was written to the task
    assertSourceMetricValue("source-record-write-total", 0.0);
    // one record completely failed (converter issues)
    assertErrorHandlingMetricValue("total-record-errors", 0.0);
    // 2 failures in the transformation, and 1 in the converter
    assertErrorHandlingMetricValue("total-record-failures", 4.0);
    // one record completely failed (converter issues), and thus was skipped
    assertErrorHandlingMetricValue("total-records-skipped", 0.0);
    PowerMock.verifyAll();
}
Also used : HashMap(java.util.HashMap) RetryWithToleranceOperator(org.apache.kafka.connect.runtime.errors.RetryWithToleranceOperator) Schema(org.apache.kafka.connect.data.Schema) LogReporter(org.apache.kafka.connect.runtime.errors.LogReporter) SourceRecord(org.apache.kafka.connect.source.SourceRecord) Struct(org.apache.kafka.connect.data.Struct) ParameterizedTest(org.apache.kafka.connect.util.ParameterizedTest) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Example 75 with SourceRecord

use of org.apache.kafka.connect.source.SourceRecord in project kafka by apache.

the class ErrorHandlingTaskTest method testErrorHandlingInSourceTasksWthBadConverter.

@Test
public void testErrorHandlingInSourceTasksWthBadConverter() throws Exception {
    Map<String, String> reportProps = new HashMap<>();
    reportProps.put(ConnectorConfig.ERRORS_LOG_ENABLE_CONFIG, "true");
    reportProps.put(ConnectorConfig.ERRORS_LOG_INCLUDE_MESSAGES_CONFIG, "true");
    LogReporter reporter = new LogReporter(taskId, connConfig(reportProps), errorHandlingMetrics);
    RetryWithToleranceOperator retryWithToleranceOperator = operator();
    retryWithToleranceOperator.metrics(errorHandlingMetrics);
    retryWithToleranceOperator.reporters(singletonList(reporter));
    createSourceTask(initialState, retryWithToleranceOperator, badConverter());
    // valid json
    Schema valSchema = SchemaBuilder.struct().field("val", Schema.INT32_SCHEMA).build();
    Struct struct1 = new Struct(valSchema).put("val", 1234);
    SourceRecord record1 = new SourceRecord(emptyMap(), emptyMap(), TOPIC, PARTITION1, valSchema, struct1);
    Struct struct2 = new Struct(valSchema).put("val", 6789);
    SourceRecord record2 = new SourceRecord(emptyMap(), emptyMap(), TOPIC, PARTITION1, valSchema, struct2);
    EasyMock.expect(workerSourceTask.isStopping()).andReturn(false);
    EasyMock.expect(workerSourceTask.isStopping()).andReturn(false);
    EasyMock.expect(workerSourceTask.isStopping()).andReturn(true);
    EasyMock.expect(workerSourceTask.commitOffsets()).andReturn(true);
    sourceTask.initialize(EasyMock.anyObject());
    EasyMock.expectLastCall();
    sourceTask.start(EasyMock.anyObject());
    EasyMock.expectLastCall();
    EasyMock.expect(sourceTask.poll()).andReturn(singletonList(record1));
    EasyMock.expect(sourceTask.poll()).andReturn(singletonList(record2));
    expectTopicCreation(TOPIC);
    EasyMock.expect(producer.send(EasyMock.anyObject(), EasyMock.anyObject())).andReturn(null).times(2);
    PowerMock.replayAll();
    workerSourceTask.initialize(TASK_CONFIG);
    workerSourceTask.initializeAndStart();
    workerSourceTask.execute();
    // two records were consumed from Kafka
    assertSourceMetricValue("source-record-poll-total", 2.0);
    // only one was written to the task
    assertSourceMetricValue("source-record-write-total", 0.0);
    // one record completely failed (converter issues)
    assertErrorHandlingMetricValue("total-record-errors", 0.0);
    // 2 failures in the transformation, and 1 in the converter
    assertErrorHandlingMetricValue("total-record-failures", 8.0);
    // one record completely failed (converter issues), and thus was skipped
    assertErrorHandlingMetricValue("total-records-skipped", 0.0);
    PowerMock.verifyAll();
}
Also used : HashMap(java.util.HashMap) RetryWithToleranceOperator(org.apache.kafka.connect.runtime.errors.RetryWithToleranceOperator) Schema(org.apache.kafka.connect.data.Schema) LogReporter(org.apache.kafka.connect.runtime.errors.LogReporter) SourceRecord(org.apache.kafka.connect.source.SourceRecord) Struct(org.apache.kafka.connect.data.Struct) ParameterizedTest(org.apache.kafka.connect.util.ParameterizedTest) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Aggregations

SourceRecord (org.apache.kafka.connect.source.SourceRecord)308 Test (org.junit.Test)148 Test (org.junit.jupiter.api.Test)98 Struct (org.apache.kafka.connect.data.Struct)68 HashMap (java.util.HashMap)60 Schema (org.apache.kafka.connect.data.Schema)45 ThreadedTest (org.apache.kafka.connect.util.ThreadedTest)27 ParameterizedTest (org.apache.kafka.connect.util.ParameterizedTest)23 ArrayList (java.util.ArrayList)22 RetryWithToleranceOperatorTest (org.apache.kafka.connect.runtime.errors.RetryWithToleranceOperatorTest)21 Map (java.util.Map)15 SchemaBuilder (org.apache.kafka.connect.data.SchemaBuilder)13 ConnectException (org.apache.kafka.connect.errors.ConnectException)13 Document (org.bson.Document)13 FixFor (io.debezium.doc.FixFor)12 List (java.util.List)12 RecordsForCollection (io.debezium.connector.mongodb.RecordMakers.RecordsForCollection)11 ProducerRecord (org.apache.kafka.clients.producer.ProducerRecord)11 ConnectHeaders (org.apache.kafka.connect.header.ConnectHeaders)11 BsonTimestamp (org.bson.BsonTimestamp)11