use of com.mongodb.reactivestreams.client.MongoDatabase in project incubator-pulsar by apache.
the class MongoSink method open.
@Override
public void open(Map<String, Object> config, SinkContext sinkContext) throws Exception {
log.info("Open MongoDB Sink");
mongoConfig = MongoConfig.load(config);
mongoConfig.validate(true, true);
if (clientProvider != null) {
mongoClient = clientProvider.get();
} else {
mongoClient = MongoClients.create(mongoConfig.getMongoUri());
}
final MongoDatabase db = mongoClient.getDatabase(mongoConfig.getDatabase());
collection = db.getCollection(mongoConfig.getCollection());
incomingList = Lists.newArrayList();
flushExecutor = Executors.newScheduledThreadPool(1);
flushExecutor.scheduleAtFixedRate(() -> flush(), mongoConfig.getBatchTimeMs(), mongoConfig.getBatchTimeMs(), TimeUnit.MILLISECONDS);
}
use of com.mongodb.reactivestreams.client.MongoDatabase in project incubator-pulsar by apache.
the class MongoSource method open.
@Override
public void open(Map<String, Object> config, SourceContext sourceContext) throws Exception {
log.info("Open MongoDB Source");
mongoConfig = MongoConfig.load(config);
mongoConfig.validate(false, false);
if (clientProvider != null) {
mongoClient = clientProvider.get();
} else {
mongoClient = MongoClients.create(mongoConfig.getMongoUri());
}
if (StringUtils.isEmpty(mongoConfig.getDatabase())) {
// Watch all databases
log.info("Watch all");
stream = mongoClient.watch();
} else {
final MongoDatabase db = mongoClient.getDatabase(mongoConfig.getDatabase());
if (StringUtils.isEmpty(mongoConfig.getCollection())) {
// Watch all collections in a database
log.info("Watch db: {}", db.getName());
stream = db.watch();
} else {
// Watch a collection
final MongoCollection<Document> collection = db.getCollection(mongoConfig.getCollection());
log.info("Watch collection: {} {}", db.getName(), mongoConfig.getCollection());
stream = collection.watch();
}
}
stream.batchSize(mongoConfig.getBatchSize()).fullDocument(FullDocument.UPDATE_LOOKUP);
stream.subscribe(new Subscriber<ChangeStreamDocument<Document>>() {
private ObjectMapper mapper = new ObjectMapper();
private Subscription subscription;
@Override
public void onSubscribe(Subscription subscription) {
this.subscription = subscription;
this.subscription.request(Integer.MAX_VALUE);
}
@Override
public void onNext(ChangeStreamDocument<Document> doc) {
try {
log.info("New change doc: {}", doc);
// Build a record with the essential information
final Map<String, Object> recordValue = new HashMap<>();
recordValue.put("fullDocument", doc.getFullDocument());
recordValue.put("ns", doc.getNamespace());
recordValue.put("operation", doc.getOperationType());
consume(new DocRecord(Optional.of(doc.getDocumentKey().toJson()), mapper.writeValueAsString(recordValue).getBytes(StandardCharsets.UTF_8)));
} catch (JsonProcessingException e) {
log.error("Processing doc from mongo", e);
}
}
@Override
public void onError(Throwable error) {
log.error("Subscriber error", error);
}
@Override
public void onComplete() {
log.info("Subscriber complete");
}
});
}
use of com.mongodb.reactivestreams.client.MongoDatabase in project pinpoint by pinpoint-apm.
the class MongoReactivePluginController method insertOn.
@GetMapping(value = "/mongodb/insertOne")
public String insertOn() throws Throwable {
MongoDatabase db = getDatabase();
MongoCollection<Document> collection = db.getCollection("test");
Document canvas = new Document("item", "canvas").append("qty", 100).append("tags", Collections.singletonList("cotton"));
Document size = new Document("h", 28).append("w", 35.5).append("uom", "cm");
canvas.put("size", size);
ObservableSubscriber<InsertOneResult> sub = new ObservableSubscriber<>();
collection.insertOne(canvas).subscribe(sub);
sub.waitForThenCancel(1);
return "Insert=" + sub.getResults();
}
use of com.mongodb.reactivestreams.client.MongoDatabase in project mongo-java-driver by mongodb.
the class QuickTour method main.
/**
* Run this main method to see the output of this quick example.
*
* @param args takes an optional single argument for the connection string
*/
public static void main(final String[] args) {
MongoClient mongoClient;
if (args.length == 0) {
// connect to the local database server
mongoClient = MongoClients.create();
} else {
mongoClient = MongoClients.create(args[0]);
}
// get handle to "mydb" database
MongoDatabase database = mongoClient.getDatabase("mydb");
// get a handle to the "test" collection
final MongoCollection<Document> collection = database.getCollection("test");
// drop all the data in it
ObservableSubscriber<Void> successSubscriber = new OperationSubscriber<>();
collection.drop().subscribe(successSubscriber);
successSubscriber.await();
// make a document and insert it
Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info", new Document("x", 203).append("y", 102));
ObservableSubscriber<InsertOneResult> insertOneSubscriber = new OperationSubscriber<>();
collection.insertOne(doc).subscribe(insertOneSubscriber);
insertOneSubscriber.await();
// get it (since it's the only one in there since we dropped the rest earlier on)
ObservableSubscriber<Document> documentSubscriber = new PrintDocumentSubscriber();
collection.find().first().subscribe(documentSubscriber);
documentSubscriber.await();
// now, lets add lots of little documents to the collection so we can explore queries and cursors
List<Document> documents = new ArrayList<>();
for (int i = 0; i < 100; i++) {
documents.add(new Document("i", i));
}
ObservableSubscriber<InsertManyResult> insertManySubscriber = new OperationSubscriber<>();
collection.insertMany(documents).subscribe(insertManySubscriber);
insertManySubscriber.await();
// find first
documentSubscriber = new PrintDocumentSubscriber();
collection.find().first().subscribe(documentSubscriber);
documentSubscriber.await();
// lets get all the documents in the collection and print them out
documentSubscriber = new PrintDocumentSubscriber();
collection.find().subscribe(documentSubscriber);
documentSubscriber.await();
// Query Filters
// now use a query to get 1 document out
documentSubscriber = new PrintDocumentSubscriber();
collection.find(eq("i", 71)).first().subscribe(documentSubscriber);
documentSubscriber.await();
// now use a range query to get a larger subset
documentSubscriber = new PrintDocumentSubscriber();
collection.find(gt("i", 50)).subscribe(documentSubscriber);
successSubscriber.await();
// range query with multiple constraints
documentSubscriber = new PrintDocumentSubscriber();
collection.find(and(gt("i", 50), lte("i", 100))).subscribe(documentSubscriber);
successSubscriber.await();
// Sorting
documentSubscriber = new PrintDocumentSubscriber();
collection.find(exists("i")).sort(descending("i")).first().subscribe(documentSubscriber);
documentSubscriber.await();
// Projection
documentSubscriber = new PrintDocumentSubscriber();
collection.find().projection(excludeId()).first().subscribe(documentSubscriber);
documentSubscriber.await();
// Aggregation
documentSubscriber = new PrintDocumentSubscriber();
collection.aggregate(asList(match(gt("i", 0)), project(Document.parse("{ITimes10: {$multiply: ['$i', 10]}}")))).subscribe(documentSubscriber);
documentSubscriber.await();
documentSubscriber = new PrintDocumentSubscriber();
collection.aggregate(singletonList(group(null, sum("total", "$i")))).first().subscribe(documentSubscriber);
documentSubscriber.await();
// Update One
ObservableSubscriber<UpdateResult> updateSubscriber = new OperationSubscriber<>();
collection.updateOne(eq("i", 10), set("i", 110)).subscribe(updateSubscriber);
updateSubscriber.await();
// Update Many
updateSubscriber = new OperationSubscriber<>();
collection.updateMany(lt("i", 100), inc("i", 100)).subscribe(updateSubscriber);
updateSubscriber.await();
// Delete One
ObservableSubscriber<DeleteResult> deleteSubscriber = new OperationSubscriber<>();
collection.deleteOne(eq("i", 110)).subscribe(deleteSubscriber);
deleteSubscriber.await();
// Delete Many
deleteSubscriber = new OperationSubscriber<>();
collection.deleteMany(gte("i", 100)).subscribe(deleteSubscriber);
deleteSubscriber.await();
// Create Index
OperationSubscriber<String> createIndexSubscriber = new PrintSubscriber<>("Create Index Result: %s");
collection.createIndex(new Document("i", 1)).subscribe(createIndexSubscriber);
createIndexSubscriber.await();
// Clean up
successSubscriber = new OperationSubscriber<>();
collection.drop().subscribe(successSubscriber);
successSubscriber.await();
// release resources
mongoClient.close();
}
use of com.mongodb.reactivestreams.client.MongoDatabase in project pulsar by yahoo.
the class MongoSink method open.
@Override
public void open(Map<String, Object> config, SinkContext sinkContext) throws Exception {
log.info("Open MongoDB Sink");
mongoConfig = MongoConfig.load(config);
mongoConfig.validate(true, true);
if (clientProvider != null) {
mongoClient = clientProvider.get();
} else {
mongoClient = MongoClients.create(mongoConfig.getMongoUri());
}
final MongoDatabase db = mongoClient.getDatabase(mongoConfig.getDatabase());
collection = db.getCollection(mongoConfig.getCollection());
incomingList = Lists.newArrayList();
flushExecutor = Executors.newScheduledThreadPool(1);
flushExecutor.scheduleAtFixedRate(() -> flush(), mongoConfig.getBatchTimeMs(), mongoConfig.getBatchTimeMs(), TimeUnit.MILLISECONDS);
}
Aggregations