use of com.mongodb.reactivestreams.client.MongoDatabase in project gravitee-access-management by gravitee-io.
the class MongodbProvider method afterPropertiesSet.
@Override
public void afterPropertiesSet() {
mongoDBContainer = new MongoDBContainer(DockerImageName.parse("mongo:4.0.10"));
mongoDBContainer.withEnv("MONGO_INITDB_DATABASE", databaseName);
mongoDBContainer.start();
// cluster configuration
ClusterSettings clusterSettings = ClusterSettings.builder().hosts(Collections.singletonList(new ServerAddress(mongoDBContainer.getHost(), mongoDBContainer.getFirstMappedPort()))).build();
// codec configuration
CodecRegistry pojoCodecRegistry = fromRegistries(MongoClients.getDefaultCodecRegistry(), fromProviders(PojoCodecProvider.builder().automatic(true).build()));
MongoClientSettings settings = MongoClientSettings.builder().applyToClusterSettings(builder1 -> builder1.applySettings(clusterSettings)).codecRegistry(pojoCodecRegistry).writeConcern(WriteConcern.ACKNOWLEDGED).build();
mongoClient = MongoClients.create(settings);
mongoDatabase = mongoClient.getDatabase(databaseName);
}
use of com.mongodb.reactivestreams.client.MongoDatabase in project pulsar by apache.
the class MongoSink method open.
@Override
public void open(Map<String, Object> config, SinkContext sinkContext) throws Exception {
log.info("Open MongoDB Sink");
mongoConfig = MongoConfig.load(config);
mongoConfig.validate(true, true);
if (clientProvider != null) {
mongoClient = clientProvider.get();
} else {
mongoClient = MongoClients.create(mongoConfig.getMongoUri());
}
final MongoDatabase db = mongoClient.getDatabase(mongoConfig.getDatabase());
collection = db.getCollection(mongoConfig.getCollection());
incomingList = Lists.newArrayList();
flushExecutor = Executors.newScheduledThreadPool(1);
flushExecutor.scheduleAtFixedRate(() -> flush(), mongoConfig.getBatchTimeMs(), mongoConfig.getBatchTimeMs(), TimeUnit.MILLISECONDS);
}
use of com.mongodb.reactivestreams.client.MongoDatabase in project pulsar by apache.
the class MongoSource method open.
@Override
public void open(Map<String, Object> config, SourceContext sourceContext) throws Exception {
log.info("Open MongoDB Source");
mongoConfig = MongoConfig.load(config);
mongoConfig.validate(false, false);
if (clientProvider != null) {
mongoClient = clientProvider.get();
} else {
mongoClient = MongoClients.create(mongoConfig.getMongoUri());
}
if (StringUtils.isEmpty(mongoConfig.getDatabase())) {
// Watch all databases
log.info("Watch all");
stream = mongoClient.watch();
} else {
final MongoDatabase db = mongoClient.getDatabase(mongoConfig.getDatabase());
if (StringUtils.isEmpty(mongoConfig.getCollection())) {
// Watch all collections in a database
log.info("Watch db: {}", db.getName());
stream = db.watch();
} else {
// Watch a collection
final MongoCollection<Document> collection = db.getCollection(mongoConfig.getCollection());
log.info("Watch collection: {} {}", db.getName(), mongoConfig.getCollection());
stream = collection.watch();
}
}
stream.batchSize(mongoConfig.getBatchSize()).fullDocument(FullDocument.UPDATE_LOOKUP);
stream.subscribe(new Subscriber<ChangeStreamDocument<Document>>() {
private ObjectMapper mapper = new ObjectMapper();
private Subscription subscription;
@Override
public void onSubscribe(Subscription subscription) {
this.subscription = subscription;
this.subscription.request(Integer.MAX_VALUE);
}
@Override
public void onNext(ChangeStreamDocument<Document> doc) {
try {
log.info("New change doc: {}", doc);
// Build a record with the essential information
final Map<String, Object> recordValue = new HashMap<>();
recordValue.put("fullDocument", doc.getFullDocument());
recordValue.put("ns", doc.getNamespace());
recordValue.put("operation", doc.getOperationType());
consume(new DocRecord(Optional.of(doc.getDocumentKey().toJson()), mapper.writeValueAsString(recordValue).getBytes(StandardCharsets.UTF_8)));
} catch (JsonProcessingException e) {
log.error("Processing doc from mongo", e);
}
}
@Override
public void onError(Throwable error) {
log.error("Subscriber error", error);
}
@Override
public void onComplete() {
log.info("Subscriber complete");
}
});
}
use of com.mongodb.reactivestreams.client.MongoDatabase in project MongoSK by Romitou.
the class ExprMongoDatabase method get.
@Override
protected MongoSKDatabase[] get(@Nonnull final Event e) {
String databaseName = exprDatabaseName.getSingle(e);
MongoSKServer mongoSKServer = exprMongoSKServer.getSingle(e);
if (databaseName == null || mongoSKServer == null)
return new MongoSKDatabase[0];
MongoDatabase mongoDatabase = mongoSKServer.getMongoClient().getDatabase(databaseName);
if (mongoDatabase == null)
return new MongoSKDatabase[0];
return new MongoSKDatabase[] { new MongoSKDatabase(mongoDatabase) };
}
use of com.mongodb.reactivestreams.client.MongoDatabase in project ditto by eclipse.
the class ThingPersistenceOperationsActor method props.
/**
* Create Props of this actor.
*
* @param pubSubMediator Akka pub-sub mediator.
* @param mongoDbConfig the MongoDB configuration settings.
* @param config Configuration with info about event journal, snapshot store and database.
* @param persistenceOperationsConfig the persistence operations config.
* @return a Props object.
*/
public static Props props(final ActorRef pubSubMediator, final MongoDbConfig mongoDbConfig, final Config config, final PersistenceOperationsConfig persistenceOperationsConfig) {
return Props.create(ThingPersistenceOperationsActor.class, () -> {
final MongoEventSourceSettings eventSourceSettings = MongoEventSourceSettings.fromConfig(config, ThingPersistenceActor.PERSISTENCE_ID_PREFIX, true, ThingPersistenceActor.JOURNAL_PLUGIN_ID, ThingPersistenceActor.SNAPSHOT_PLUGIN_ID);
final MongoClientWrapper mongoClient = MongoClientWrapper.newInstance(mongoDbConfig);
final MongoDatabase db = mongoClient.getDefaultDatabase();
final NamespacePersistenceOperations namespaceOps = MongoNamespacePersistenceOperations.of(db, eventSourceSettings);
return new ThingPersistenceOperationsActor(pubSubMediator, namespaceOps, mongoClient, persistenceOperationsConfig);
});
}
Aggregations