use of com.mongodb.client.model.InsertOneOptions in project debezium by debezium.
the class MongoDbConnectorIT method shouldConsumeAllEventsFromDatabase.
@Test
public void shouldConsumeAllEventsFromDatabase() throws InterruptedException, IOException {
// Use the DB configuration to define the connector's configuration ...
config = TestHelper.getConfiguration().edit().with(MongoDbConnectorConfig.POLL_INTERVAL_MS, 10).with(MongoDbConnectorConfig.COLLECTION_WHITELIST, "dbit.*").with(MongoDbConnectorConfig.LOGICAL_NAME, "mongo").build();
// Set up the replication context for connections ...
context = new MongoDbTaskContext(config);
// Cleanup database
TestHelper.cleanDatabase(primary(), "dbit");
// Before starting the connector, add data to the databases ...
storeDocuments("dbit", "simpletons", "simple_objects.json");
storeDocuments("dbit", "restaurants", "restaurants1.json");
// Start the connector ...
start(MongoDbConnector.class, config);
// ---------------------------------------------------------------------------------------------------------------
// Consume all of the events due to startup and initialization of the database
// ---------------------------------------------------------------------------------------------------------------
SourceRecords records = consumeRecordsByTopic(12);
records.topics().forEach(System.out::println);
assertThat(records.recordsForTopic("mongo.dbit.simpletons").size()).isEqualTo(6);
assertThat(records.recordsForTopic("mongo.dbit.restaurants").size()).isEqualTo(6);
assertThat(records.topics().size()).isEqualTo(2);
AtomicBoolean foundLast = new AtomicBoolean(false);
records.forEach(record -> {
// Check that all records are valid, and can be serialized and deserialized ...
validate(record);
verifyFromInitialSync(record, foundLast);
verifyReadOperation(record);
});
assertThat(foundLast.get()).isTrue();
// At this point, the connector has performed the initial sync and awaits changes ...
// ---------------------------------------------------------------------------------------------------------------
// Store more documents while the connector is still running
// ---------------------------------------------------------------------------------------------------------------
storeDocuments("dbit", "restaurants", "restaurants2.json");
// Wait until we can consume the 4 documents we just added ...
SourceRecords records2 = consumeRecordsByTopic(4);
assertThat(records2.recordsForTopic("mongo.dbit.restaurants").size()).isEqualTo(4);
assertThat(records2.topics().size()).isEqualTo(1);
records2.forEach(record -> {
// Check that all records are valid, and can be serialized and deserialized ...
validate(record);
verifyNotFromInitialSync(record);
verifyCreateOperation(record);
});
// ---------------------------------------------------------------------------------------------------------------
// Stop the connector
// ---------------------------------------------------------------------------------------------------------------
stopConnector();
// ---------------------------------------------------------------------------------------------------------------
// Store more documents while the connector is NOT running
// ---------------------------------------------------------------------------------------------------------------
storeDocuments("dbit", "restaurants", "restaurants3.json");
// ---------------------------------------------------------------------------------------------------------------
// Start the connector and we should only see the documents added since it was stopped
// ---------------------------------------------------------------------------------------------------------------
start(MongoDbConnector.class, config);
// Wait until we can consume the 4 documents we just added ...
SourceRecords records3 = consumeRecordsByTopic(5);
assertThat(records3.recordsForTopic("mongo.dbit.restaurants").size()).isEqualTo(5);
assertThat(records3.topics().size()).isEqualTo(1);
records3.forEach(record -> {
// Check that all records are valid, and can be serialized and deserialized ...
validate(record);
verifyNotFromInitialSync(record);
verifyCreateOperation(record);
});
// ---------------------------------------------------------------------------------------------------------------
// Store more documents while the connector is still running
// ---------------------------------------------------------------------------------------------------------------
storeDocuments("dbit", "restaurants", "restaurants4.json");
// Wait until we can consume the 4 documents we just added ...
SourceRecords records4 = consumeRecordsByTopic(8);
assertThat(records4.recordsForTopic("mongo.dbit.restaurants").size()).isEqualTo(8);
assertThat(records4.topics().size()).isEqualTo(1);
records4.forEach(record -> {
// Check that all records are valid, and can be serialized and deserialized ...
validate(record);
verifyNotFromInitialSync(record);
verifyCreateOperation(record);
});
// ---------------------------------------------------------------------------------------------------------------
// Create and then update a document
// ---------------------------------------------------------------------------------------------------------------
// Testing.Debug.enable();
AtomicReference<String> id = new AtomicReference<>();
primary().execute("create", mongo -> {
MongoDatabase db1 = mongo.getDatabase("dbit");
MongoCollection<Document> coll = db1.getCollection("arbitrary");
coll.drop();
// Insert the document with a generated ID ...
Document doc = Document.parse("{\"a\": 1, \"b\": 2}");
InsertOneOptions insertOptions = new InsertOneOptions().bypassDocumentValidation(true);
coll.insertOne(doc, insertOptions);
// Find the document to get the generated ID ...
doc = coll.find().first();
Testing.debug("Document: " + doc);
id.set(doc.getObjectId("_id").toString());
Testing.debug("Document ID: " + id.get());
});
primary().execute("update", mongo -> {
MongoDatabase db1 = mongo.getDatabase("dbit");
MongoCollection<Document> coll = db1.getCollection("arbitrary");
// Find the document ...
Document doc = coll.find().first();
Testing.debug("Document: " + doc);
Document filter = Document.parse("{\"a\": 1}");
Document operation = Document.parse("{ \"$set\": { \"b\": 10 } }");
coll.updateOne(filter, operation);
doc = coll.find().first();
Testing.debug("Document: " + doc);
});
// Wait until we can consume the 1 insert and 1 update ...
SourceRecords insertAndUpdate = consumeRecordsByTopic(2);
assertThat(insertAndUpdate.recordsForTopic("mongo.dbit.arbitrary").size()).isEqualTo(2);
assertThat(insertAndUpdate.topics().size()).isEqualTo(1);
records4.forEach(record -> {
// Check that all records are valid, and can be serialized and deserialized ...
validate(record);
verifyNotFromInitialSync(record);
verifyCreateOperation(record);
});
SourceRecord insertRecord = insertAndUpdate.allRecordsInOrder().get(0);
SourceRecord updateRecord = insertAndUpdate.allRecordsInOrder().get(1);
Testing.debug("Insert event: " + insertRecord);
Testing.debug("Update event: " + updateRecord);
Struct insertKey = (Struct) insertRecord.key();
Struct updateKey = (Struct) updateRecord.key();
String insertId = JSON.parse(insertKey.getString("id")).toString();
String updateId = JSON.parse(updateKey.getString("id")).toString();
assertThat(insertId).isEqualTo(id.get());
assertThat(updateId).isEqualTo(id.get());
}
use of com.mongodb.client.model.InsertOneOptions in project debezium by debezium.
the class MongoDbConnectorIT method storeDocuments.
protected void storeDocuments(MongoCollection<Document> collection, String pathOnClasspath) {
InsertOneOptions insertOptions = new InsertOneOptions().bypassDocumentValidation(true);
loadTestDocuments(pathOnClasspath).forEach(doc -> {
assertThat(doc).isNotNull();
assertThat(doc.size()).isGreaterThan(0);
collection.insertOne(doc, insertOptions);
});
}
use of com.mongodb.client.model.InsertOneOptions in project debezium by debezium.
the class ReplicatorIT method shouldReplicateContent.
@Test
public void shouldReplicateContent() throws InterruptedException {
Testing.Print.disable();
// Update the configuration to add a collection filter ...
useConfiguration(config.edit().with(MongoDbConnectorConfig.MAX_FAILED_CONNECTIONS, 1).with(MongoDbConnectorConfig.COLLECTION_WHITELIST, "dbA.contacts").build());
TestHelper.cleanDatabase(primary, "dbA");
// ------------------------------------------------------------------------------
// ADD A DOCUMENT
// ------------------------------------------------------------------------------
// Add a document to the 'contacts' database ...
primary.execute("shouldCreateContactsDatabase", mongo -> {
Testing.debug("Populating the 'dbA.contacts' collection");
// Create a database and a collection in that database ...
MongoDatabase db = mongo.getDatabase("dbA");
MongoCollection<Document> contacts = db.getCollection("contacts");
InsertOneOptions insertOptions = new InsertOneOptions().bypassDocumentValidation(true);
contacts.insertOne(Document.parse("{ \"name\":\"Jon Snow\"}"), insertOptions);
assertThat(db.getCollection("contacts").count()).isEqualTo(1);
// Read the collection to make sure we can find our document ...
Bson filter = Filters.eq("name", "Jon Snow");
FindIterable<Document> movieResults = db.getCollection("contacts").find(filter);
try (MongoCursor<Document> cursor = movieResults.iterator()) {
assertThat(cursor.tryNext().getString("name")).isEqualTo("Jon Snow");
assertThat(cursor.tryNext()).isNull();
}
Testing.debug("Completed document to 'dbA.contacts' collection");
});
// Start the replicator ...
List<SourceRecord> records = new LinkedList<>();
Replicator replicator = new Replicator(context, replicaSet, records::add, (x) -> {
});
Thread thread = new Thread(replicator::run);
thread.start();
// Sleep for 2 seconds ...
Thread.sleep(2000);
// ------------------------------------------------------------------------------
// ADD A SECOND DOCUMENT
// ------------------------------------------------------------------------------
// Add more documents to the 'contacts' database ...
final Object[] expectedNames = { "Jon Snow", "Sally Hamm" };
primary.execute("shouldCreateContactsDatabase", mongo -> {
Testing.debug("Populating the 'dbA.contacts' collection");
// Create a database and a collection in that database ...
MongoDatabase db = mongo.getDatabase("dbA");
MongoCollection<Document> contacts = db.getCollection("contacts");
InsertOneOptions insertOptions = new InsertOneOptions().bypassDocumentValidation(true);
contacts.insertOne(Document.parse("{ \"name\":\"Sally Hamm\"}"), insertOptions);
assertThat(db.getCollection("contacts").count()).isEqualTo(2);
// Read the collection to make sure we can find our documents ...
FindIterable<Document> movieResults = db.getCollection("contacts").find();
Set<String> foundNames = new HashSet<>();
try (MongoCursor<Document> cursor = movieResults.iterator()) {
while (cursor.hasNext()) {
String name = cursor.next().getString("name");
foundNames.add(name);
}
}
assertThat(foundNames).containsOnly(expectedNames);
Testing.debug("Completed document to 'dbA.contacts' collection");
});
// For for a minimum number of events or max time ...
// both documents
int numEventsExpected = 2;
long stop = System.currentTimeMillis() + TimeUnit.SECONDS.toMillis(3);
while (records.size() < numEventsExpected && System.currentTimeMillis() < stop) {
Thread.sleep(100);
}
// ------------------------------------------------------------------------------
// STOP REPLICATOR AND VERIFY WE FOUND A TOTAL OF 2 EVENTS
// ------------------------------------------------------------------------------
replicator.stop();
// Verify each record is valid and that we found the two records we expect ...
final Set<String> foundNames = new HashSet<>();
records.forEach(record -> {
VerifyRecord.isValid(record);
Struct value = (Struct) record.value();
String after = value.getString("after");
Document afterDoc = Document.parse(after);
foundNames.add(afterDoc.getString("name"));
Operation op = Operation.forCode(value.getString("op"));
assertThat(op == Operation.READ || op == Operation.CREATE).isTrue();
});
assertThat(records.size()).isEqualTo(2);
assertThat(foundNames).containsOnly(expectedNames);
// ------------------------------------------------------------------------------
// RESTART REPLICATOR FROM SAME POSITON
// ------------------------------------------------------------------------------
reuseConfiguration(config);
// Start the replicator again ...
records = new LinkedList<>();
replicator = new Replicator(context, replicaSet, records::add, (x) -> {
});
thread = new Thread(replicator::run);
thread.start();
// Sleep for 2 seconds ...
Thread.sleep(2000);
// Stop the replicator ...
replicator.stop();
// We should not have found any new records ...
records.forEach(record -> {
VerifyRecord.isValid(record);
});
assertThat(records.isEmpty()).isTrue();
// ------------------------------------------------------------------------------
// START REPLICATOR AND ALSO REMOVE A DOCUMENT
// ------------------------------------------------------------------------------
// Update the configuration and don't use a collection filter ...
reuseConfiguration(config.edit().with(MongoDbConnectorConfig.MAX_FAILED_CONNECTIONS, 1).build());
// Start the replicator again ...
records = new LinkedList<>();
replicator = new Replicator(context, replicaSet, records::add, (x) -> {
});
thread = new Thread(replicator::run);
thread.start();
// Sleep for 2 seconds ...
Thread.sleep(2000);
// Remove Jon Snow ...
AtomicReference<ObjectId> jonSnowId = new AtomicReference<>();
primary.execute("removeJonSnow", mongo -> {
MongoDatabase db = mongo.getDatabase("dbA");
MongoCollection<Document> contacts = db.getCollection("contacts");
// Read the collection to make sure we can find our document ...
Bson filter = Filters.eq("name", "Jon Snow");
FindIterable<Document> movieResults = db.getCollection("contacts").find(filter);
try (MongoCursor<Document> cursor = movieResults.iterator()) {
Document doc = cursor.tryNext();
assertThat(doc.getString("name")).isEqualTo("Jon Snow");
assertThat(cursor.tryNext()).isNull();
jonSnowId.set(doc.getObjectId("_id"));
assertThat(jonSnowId.get()).isNotNull();
}
// Remove the document by filter ...
contacts.deleteOne(Filters.eq("name", "Jon Snow"));
Testing.debug("Removed the Jon Snow document from 'dbA.contacts' collection");
});
// For for a minimum number of events or max time ...
// just one delete event
numEventsExpected = 1;
stop = System.currentTimeMillis() + TimeUnit.SECONDS.toMillis(3);
while (records.size() < numEventsExpected && System.currentTimeMillis() < stop) {
Thread.sleep(100);
}
// Stop the replicator ...
replicator.stop();
// Verify each record is valid and that we found the one new DELETE record we expect ...
Set<ObjectId> foundIds = new HashSet<>();
records.forEach(record -> {
VerifyRecord.isValid(record);
Struct key = (Struct) record.key();
ObjectId id = (ObjectId) (JSON.parse(key.getString("id")));
foundIds.add(id);
if (record.value() != null) {
Struct value = (Struct) record.value();
Operation op = Operation.forCode(value.getString("op"));
assertThat(op).isEqualTo(Operation.DELETE);
}
});
// 1 delete and 1 tombstone
assertThat(records.size()).isEqualTo(2);
// ------------------------------------------------------------------------------
// START REPLICATOR TO PERFORM SNAPSHOT
// ------------------------------------------------------------------------------
// Update the configuration and don't use a collection filter ...
useConfiguration(config);
// Start the replicator again ...
records = new LinkedList<>();
replicator = new Replicator(context, replicaSet, records::add, (x) -> {
});
thread = new Thread(replicator::run);
thread.start();
// Sleep for 2 seconds ...
Thread.sleep(2000);
// Stop the replicator ...
replicator.stop();
// Verify each record is valid and that we found the two records we expect ...
foundNames.clear();
records.forEach(record -> {
VerifyRecord.isValid(record);
Struct value = (Struct) record.value();
String after = value.getString("after");
Document afterDoc = Document.parse(after);
foundNames.add(afterDoc.getString("name"));
Operation op = Operation.forCode(value.getString("op"));
assertThat(op).isEqualTo(Operation.READ);
});
// We should not have found any new records ...
assertThat(records.size()).isEqualTo(1);
Object[] allExpectedNames = { "Sally Hamm" };
assertThat(foundNames).containsOnly(allExpectedNames);
}
use of com.mongodb.client.model.InsertOneOptions in project mongo-java-driver by mongodb.
the class MongoCollectionImplTest method testInsertOne.
@Test
public void testInsertOne() {
InsertOneOptions options = new InsertOneOptions().bypassDocumentValidation(true);
Document insert = new Document("_id", 1);
assertAll("insertOne", () -> assertAll("check validation", () -> assertThrows(IllegalArgumentException.class, () -> collection.insertOne(null)), () -> assertThrows(IllegalArgumentException.class, () -> collection.insertOne(insert, null)), () -> assertThrows(IllegalArgumentException.class, () -> collection.insertOne(clientSession, null)), () -> assertThrows(IllegalArgumentException.class, () -> collection.insertOne(clientSession, insert, null)), () -> assertThrows(IllegalArgumentException.class, () -> collection.insertOne(null, insert)), () -> assertThrows(IllegalArgumentException.class, () -> collection.insertOne(null, insert, options))), () -> {
Publisher<InsertOneResult> expected = mongoOperationPublisher.insertOne(null, insert, new InsertOneOptions());
assertPublisherIsTheSameAs(expected, collection.insertOne(insert), "Default");
}, () -> {
Publisher<InsertOneResult> expected = mongoOperationPublisher.insertOne(null, insert, options);
assertPublisherIsTheSameAs(expected, collection.insertOne(insert, options), "With options");
}, () -> {
Publisher<InsertOneResult> expected = mongoOperationPublisher.insertOne(clientSession, insert, new InsertOneOptions());
assertPublisherIsTheSameAs(expected, collection.insertOne(clientSession, insert), "With client session");
}, () -> {
Publisher<InsertOneResult> expected = mongoOperationPublisher.insertOne(clientSession, insert, options);
assertPublisherIsTheSameAs(expected, collection.insertOne(clientSession, insert, options), "With client session & options");
});
}
use of com.mongodb.client.model.InsertOneOptions in project mongo-java-driver by mongodb.
the class JsonPoweredCrudTestHelper method getInsertOneResult.
BsonDocument getInsertOneResult(final BsonDocument collectionOptions, final BsonDocument arguments, @Nullable final ClientSession clientSession) {
BsonDocument document = arguments.getDocument("document");
InsertOneOptions options = new InsertOneOptions();
if (arguments.containsKey("bypassDocumentValidation")) {
options.bypassDocumentValidation(arguments.getBoolean("bypassDocumentValidation").getValue());
}
InsertOneResult result;
if (clientSession == null) {
result = getCollection(collectionOptions).insertOne(document, options);
} else {
result = getCollection(collectionOptions).insertOne(clientSession, document, options);
}
return toResult(new BsonDocument("insertedId", result.getInsertedId()));
}
Aggregations