use of com.mongodb.client.model.CreateCollectionOptions in project camel by apache.
the class MongoDbTailableCursorConsumerTest method testHundredThousandRecords.
@Test
public void testHundredThousandRecords() throws Exception {
assertEquals(0, cappedTestCollection.count());
final MockEndpoint mock = getMockEndpoint("mock:test");
mock.expectedMessageCount(1000);
// create a capped collection with max = 1000
// DocumentBuilder.start().add("capped", true).add("size",
// 1000000000).add("max", 1000).get())
db.createCollection(cappedTestCollectionName, new CreateCollectionOptions().capped(true).sizeInBytes(1000000000).maxDocuments(1000));
cappedTestCollection = db.getCollection(cappedTestCollectionName, Document.class);
addTestRoutes();
context.startRoute("tailableCursorConsumer1");
// continuous pump of 100000 records, asserting incrementally to reduce
// overhead on the mock endpoint
Thread t = new Thread(new Runnable() {
@Override
public void run() {
for (int i = 1; i <= 100000; i++) {
cappedTestCollection.insertOne(new Document("increasing", i).append("string", "value" + i));
// messages and otherwise the test would be sluggish
if (i % 1000 == 0) {
try {
MongoDbTailableCursorConsumerTest.this.assertAndResetMockEndpoint(mock);
} catch (Exception e) {
return;
}
}
}
}
});
// start the data pumping
t.start();
// before we stop the route, wait for the data pumping to end
t.join();
context.stopRoute("tailableCursorConsumer1");
}
use of com.mongodb.client.model.CreateCollectionOptions in project camel by apache.
the class MongoDbTailableCursorConsumerTest method testPersistentTailTrackIncreasingDateField.
@Test
@Ignore
public void testPersistentTailTrackIncreasingDateField() throws Exception {
assertEquals(0, cappedTestCollection.count());
final MockEndpoint mock = getMockEndpoint("mock:test");
final Calendar startTimestamp = Calendar.getInstance();
// get default tracking collection
MongoCollection<Document> trackingCol = db.getCollection(MongoDbTailTrackingConfig.DEFAULT_COLLECTION, Document.class);
trackingCol.drop();
trackingCol = db.getCollection(MongoDbTailTrackingConfig.DEFAULT_COLLECTION, Document.class);
// create a capped collection with max = 1000
// DocumentBuilder.start().add("capped", true).add("size",
// 1000000000).add("max", 1000).get()
db.createCollection(cappedTestCollectionName, new CreateCollectionOptions().capped(true).sizeInBytes(1000000000).maxDocuments(1000));
cappedTestCollection = db.getCollection(cappedTestCollectionName, Document.class);
addTestRoutes();
context.startRoute("tailableCursorConsumer2");
mock.expectedMessageCount(300);
// pump 300 records
Thread t = new Thread(new Runnable() {
@Override
public void run() {
for (int i = 1; i <= 300; i++) {
Calendar c = (Calendar) (startTimestamp.clone());
c.add(Calendar.MINUTE, i);
cappedTestCollection.insertOne(new Document("increasing", c.getTime()).append("string", "value" + i));
}
}
});
// start the data pumping
t.start();
// before we continue wait for the data pump to end
t.join();
mock.assertIsSatisfied();
mock.reset();
// ensure that the persisted lastVal is startTimestamp + 300min
Calendar cal300 = (Calendar) startTimestamp.clone();
cal300.add(Calendar.MINUTE, 300);
context.stopRoute("tailableCursorConsumer2");
assertEquals(cal300.getTime(), trackingCol.find(eq("persistentId", "darwin")).first().get(MongoDbTailTrackingConfig.DEFAULT_FIELD));
context.startRoute("tailableCursorConsumer2");
// expect 300 messages and not 600
mock.expectedMessageCount(300);
// pump 300 records
t = new Thread(new Runnable() {
@Override
public void run() {
for (int i = 301; i <= 600; i++) {
Calendar c = (Calendar) (startTimestamp.clone());
c.add(Calendar.MINUTE, i);
cappedTestCollection.insertOne(new Document("increasing", c.getTime()).append("string", "value" + i));
}
}
});
// start the data pumping
t.start();
// before we continue wait for the data pump to end
t.join();
mock.assertIsSatisfied();
Object firstBody = mock.getExchanges().get(0).getIn().getBody();
assertTrue(firstBody instanceof Document);
Calendar cal301 = Calendar.class.cast(startTimestamp.clone());
cal301.add(Calendar.MINUTE, 301);
assertEquals(cal301.getTime(), Document.class.cast(firstBody).get("increasing"));
// check that the persisted lastVal after stopping the route is
// startTimestamp + 600min
context.stopRoute("tailableCursorConsumer2");
Calendar cal600 = (Calendar) startTimestamp.clone();
cal600.add(Calendar.MINUTE, 600);
assertEquals(cal600.getTime(), trackingCol.find(eq("persistentId", "darwin")).first().get(MongoDbTailTrackingConfig.DEFAULT_FIELD));
}
use of com.mongodb.client.model.CreateCollectionOptions in project camel by apache.
the class MongoDbTailableCursorConsumerTest method testCustomTailTrackLocation.
@Test
@Ignore
public void testCustomTailTrackLocation() throws Exception {
assertEquals(0, cappedTestCollection.count());
final MockEndpoint mock = getMockEndpoint("mock:test");
// get the custom tracking collection and drop it
// (tailTrackDb=einstein&tailTrackCollection=curie&tailTrackField=newton)
MongoCollection<Document> trackingCol = mongo.getDatabase("einstein").getCollection("curie", Document.class);
trackingCol.drop();
trackingCol = mongo.getDatabase("einstein").getCollection("curie", Document.class);
// create a capped collection with max = 1000
// DocumentBuilder.start().add("capped", true).add("size",
// 1000000000).add("max", 1000).get()
db.createCollection(cappedTestCollectionName, new CreateCollectionOptions().capped(true).sizeInBytes(1000000000).maxDocuments(1000));
cappedTestCollection = db.getCollection(cappedTestCollectionName, Document.class);
addTestRoutes();
context.startRoute("tailableCursorConsumer3");
mock.expectedMessageCount(300);
// pump 300 records
Thread t = new Thread(new Runnable() {
@Override
public void run() {
for (int i = 1; i <= 300; i++) {
cappedTestCollection.insertOne(new Document("increasing", i).append("string", "value" + i));
}
}
});
// start the data pumping
t.start();
// before we continue wait for the data pump to end
t.join();
mock.assertIsSatisfied();
mock.reset();
// stop the route to ensure that our lastVal is persisted, and check it
context.stopRoute("tailableCursorConsumer3");
// ensure that the persisted lastVal is 300, newton is the name of the
// trackingField we are using
assertEquals(300, trackingCol.find(eq("persistentId", "darwin")).first().get("newton"));
context.startRoute("tailableCursorConsumer3");
// expect 300 messages and not 600
mock.expectedMessageCount(300);
// pump 300 records
t = new Thread(new Runnable() {
@Override
public void run() {
for (int i = 301; i <= 600; i++) {
cappedTestCollection.insertOne(new Document("increasing", i).append("string", "value" + i));
}
}
});
// start the data pumping
t.start();
// before we continue wait for the data pump to end
t.join();
mock.assertIsSatisfied();
// check that the first received body contains increasing=301 and not
// increasing=1, i.e. it's not starting from the top
Object firstBody = mock.getExchanges().get(0).getIn().getBody();
assertTrue(firstBody instanceof Document);
assertEquals(301, (Document.class.cast(firstBody)).get("increasing"));
// check that the persisted lastVal after stopping the route is 600,
// newton is the name of the trackingField we are using
context.stopRoute("tailableCursorConsumer3");
assertEquals(600, trackingCol.find(eq("persistentId", "darwin")).first().get("newton"));
}
use of com.mongodb.client.model.CreateCollectionOptions in project camel by apache.
the class MongoDbTailableCursorConsumerTest method testPersistentTailTrack.
@Test
@Ignore
public void testPersistentTailTrack() throws Exception {
assertEquals(0, cappedTestCollection.count());
final MockEndpoint mock = getMockEndpoint("mock:test");
// drop the tracking collection
db.getCollection(MongoDbTailTrackingConfig.DEFAULT_COLLECTION).drop();
// create a capped collection with max = 1000
// DocumentBuilder.start().add("capped", true).add("size",
// 1000000000).add("max", 1000).get()
db.createCollection(cappedTestCollectionName, new CreateCollectionOptions().capped(true).sizeInBytes(1000000000).maxDocuments(1000));
cappedTestCollection = db.getCollection(cappedTestCollectionName, Document.class);
cappedTestCollection.createIndex(new Document("increasing", 1));
addTestRoutes();
context.startRoute("tailableCursorConsumer2");
mock.expectedMessageCount(300);
// pump 300 records
Thread t = new Thread(new Runnable() {
@Override
public void run() {
for (int i = 1; i <= 300; i++) {
cappedTestCollection.insertOne(new Document("increasing", i).append("string", "value" + i));
}
}
});
// start the data pumping
t.start();
// before we continue wait for the data pump to end
t.join();
mock.assertIsSatisfied();
mock.reset();
context.stopRoute("tailableCursorConsumer2");
while (context.getRouteStatus("tailableCursorConsumer2") != ServiceStatus.Stopped) {
}
context.startRoute("tailableCursorConsumer2");
// expect 300 messages and not 600
mock.expectedMessageCount(300);
// pump 300 records
t = new Thread(new Runnable() {
@Override
public void run() {
for (int i = 301; i <= 600; i++) {
cappedTestCollection.insertOne(new Document("increasing", i).append("string", "value" + i));
}
}
});
// start the data pumping
t.start();
// before we continue wait for the data pump to end
t.join();
mock.assertIsSatisfied();
// check that the first message received in this second batch
// corresponds to increasing=301
Object firstBody = mock.getExchanges().get(0).getIn().getBody();
assertTrue(firstBody instanceof Document);
assertEquals(301, Document.class.cast(firstBody).get("increasing"));
// check that the lastVal is persisted at the right time: check before
// and after stopping the route
assertEquals(300, db.getCollection(MongoDbTailTrackingConfig.DEFAULT_COLLECTION).find(eq("persistentId", "darwin")).first().get("lastTrackingValue"));
// stop the route and verify the last value has been updated
context.stopRoute("tailableCursorConsumer2");
while (context.getRouteStatus("tailableCursorConsumer2") != ServiceStatus.Stopped) {
}
assertEquals(600, db.getCollection(MongoDbTailTrackingConfig.DEFAULT_COLLECTION).find(eq("persistentId", "darwin")).first().get("lastTrackingValue"));
}
use of com.mongodb.client.model.CreateCollectionOptions in project mongo-java-driver by mongodb.
the class AbstractClientSideEncryptionTest method setUp.
@Before
public void setUp() {
assumeTrue("Client side encryption tests disabled", hasEncryptionTestsEnabled());
assumeFalse("runOn requirements not satisfied", skipTest);
assumeFalse("Skipping count tests", filename.startsWith("count."));
assumeFalse(definition.getString("skipReason", new BsonString("")).getValue(), definition.containsKey("skipReason"));
String databaseName = specDocument.getString("database_name").getValue();
String collectionName = specDocument.getString("collection_name").getValue();
collectionHelper = new CollectionHelper<BsonDocument>(new BsonDocumentCodec(), new MongoNamespace(databaseName, collectionName));
MongoDatabase database = getMongoClient().getDatabase(databaseName);
MongoCollection<BsonDocument> collection = database.getCollection(collectionName, BsonDocument.class);
collection.drop();
/* Create the collection for auto encryption. */
if (specDocument.containsKey("json_schema")) {
database.createCollection(collectionName, new CreateCollectionOptions().validationOptions(new ValidationOptions().validator(new BsonDocument("$jsonSchema", specDocument.getDocument("json_schema")))));
}
/* Insert data into the collection */
List<BsonDocument> documents = new ArrayList<BsonDocument>();
if (!data.isEmpty()) {
for (BsonValue document : data) {
documents.add(document.asDocument());
}
database.getCollection(collectionName, BsonDocument.class).insertMany(documents);
}
/* Insert data into the "keyvault.datakeys" key vault. */
BsonArray data = specDocument.getArray("key_vault_data", new BsonArray());
collection = getMongoClient().getDatabase("keyvault").getCollection("datakeys", BsonDocument.class).withWriteConcern(WriteConcern.MAJORITY);
collection.drop();
if (!data.isEmpty()) {
documents = new ArrayList<BsonDocument>();
for (BsonValue document : data) {
documents.add(document.asDocument());
}
collection.insertMany(documents);
}
commandListener = new TestCommandListener();
BsonDocument clientOptions = definition.getDocument("clientOptions");
BsonDocument cryptOptions = clientOptions.getDocument("autoEncryptOpts");
BsonDocument kmsProviders = cryptOptions.getDocument("kmsProviders");
boolean bypassAutoEncryption = cryptOptions.getBoolean("bypassAutoEncryption", BsonBoolean.FALSE).getValue();
Map<String, BsonDocument> namespaceToSchemaMap = new HashMap<String, BsonDocument>();
if (cryptOptions.containsKey("schemaMap")) {
BsonDocument autoEncryptMapDocument = cryptOptions.getDocument("schemaMap");
for (Map.Entry<String, BsonValue> entries : autoEncryptMapDocument.entrySet()) {
final BsonDocument autoEncryptOptionsDocument = entries.getValue().asDocument();
namespaceToSchemaMap.put(entries.getKey(), autoEncryptOptionsDocument);
}
}
Map<String, Object> extraOptions = new HashMap<String, Object>();
if (cryptOptions.containsKey("extraOptions")) {
BsonDocument extraOptionsDocument = cryptOptions.getDocument("extraOptions");
if (extraOptionsDocument.containsKey("mongocryptdSpawnArgs")) {
List<String> mongocryptdSpawnArgsValue = new ArrayList<String>();
for (BsonValue cur : extraOptionsDocument.getArray("mongocryptdSpawnArgs")) {
mongocryptdSpawnArgsValue.add(cur.asString().getValue());
}
extraOptions.put("mongocryptdSpawnArgs", mongocryptdSpawnArgsValue);
}
if (extraOptionsDocument.containsKey("mongocryptdBypassSpawn")) {
extraOptions.put("mongocryptdBypassSpawn", extraOptionsDocument.getBoolean("mongocryptdBypassSpawn").getValue());
}
if (extraOptionsDocument.containsKey("mongocryptdURI")) {
extraOptions.put("mongocryptdURI", extraOptionsDocument.getString("mongocryptdURI").getValue());
}
}
Map<String, Map<String, Object>> kmsProvidersMap = new HashMap<>();
for (String kmsProviderKey : kmsProviders.keySet()) {
BsonDocument kmsProviderOptions = kmsProviders.get(kmsProviderKey).asDocument();
Map<String, Object> kmsProviderMap = new HashMap<>();
kmsProvidersMap.put(kmsProviderKey.startsWith("aws") ? "aws" : kmsProviderKey, kmsProviderMap);
switch(kmsProviderKey) {
case "aws":
kmsProviderMap.put("accessKeyId", System.getProperty("org.mongodb.test.awsAccessKeyId"));
kmsProviderMap.put("secretAccessKey", System.getProperty("org.mongodb.test.awsSecretAccessKey"));
break;
case "awsTemporary":
kmsProviderMap.put("accessKeyId", System.getProperty("org.mongodb.test.tmpAwsAccessKeyId"));
kmsProviderMap.put("secretAccessKey", System.getProperty("org.mongodb.test.tmpAwsSecretAccessKey"));
kmsProviderMap.put("sessionToken", System.getProperty("org.mongodb.test.tmpAwsSessionToken"));
break;
case "awsTemporaryNoSessionToken":
kmsProviderMap.put("accessKeyId", System.getProperty("org.mongodb.test.tmpAwsAccessKeyId"));
kmsProviderMap.put("secretAccessKey", System.getProperty("org.mongodb.test.tmpAwsSecretAccessKey"));
break;
case "azure":
kmsProviderMap.put("tenantId", System.getProperty("org.mongodb.test.azureTenantId"));
kmsProviderMap.put("clientId", System.getProperty("org.mongodb.test.azureClientId"));
kmsProviderMap.put("clientSecret", System.getProperty("org.mongodb.test.azureClientSecret"));
break;
case "gcp":
kmsProviderMap.put("email", System.getProperty("org.mongodb.test.gcpEmail"));
kmsProviderMap.put("privateKey", System.getProperty("org.mongodb.test.gcpPrivateKey"));
break;
case "kmip":
kmsProviderMap.put("endpoint", System.getProperty("org.mongodb.test.kmipEndpoint", "localhost:5698"));
break;
case "local":
kmsProviderMap.put("key", kmsProviderOptions.getBinary("key").getData());
break;
default:
throw new UnsupportedOperationException("Unsupported KMS provider: " + kmsProviderKey);
}
}
String keyVaultNamespace = "keyvault.datakeys";
if (cryptOptions.containsKey("keyVaultNamespace")) {
keyVaultNamespace = cryptOptions.getString("keyVaultNamespace").getValue();
}
createMongoClient(AutoEncryptionSettings.builder().keyVaultNamespace(keyVaultNamespace).kmsProviders(kmsProvidersMap).schemaMap(namespaceToSchemaMap).bypassAutoEncryption(bypassAutoEncryption).extraOptions(extraOptions).build(), commandListener);
database = getDatabase(databaseName);
helper = new JsonPoweredCrudTestHelper(description, database, database.getCollection("default", BsonDocument.class));
}
Aggregations