use of com.mongodb.client.MongoCursor in project nifi by apache.
the class GetMongo method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile input = null;
if (context.hasIncomingConnection()) {
input = session.get();
if (input == null && context.hasNonLoopConnection()) {
return;
}
}
final ComponentLog logger = getLogger();
Map attributes = new HashMap();
attributes.put(CoreAttributes.MIME_TYPE.key(), "application/json");
final Document query;
String queryStr;
if (context.getProperty(QUERY).isSet()) {
queryStr = context.getProperty(QUERY).evaluateAttributeExpressions(input).getValue();
query = Document.parse(queryStr);
} else if (!context.getProperty(QUERY).isSet() && input == null) {
queryStr = "{}";
query = Document.parse("{}");
} else {
try {
ByteArrayOutputStream out = new ByteArrayOutputStream();
session.exportTo(input, out);
out.close();
queryStr = new String(out.toByteArray());
query = Document.parse(queryStr);
} catch (Exception ex) {
getLogger().error("Error reading flowfile", ex);
if (input != null) {
// Likely culprit is a bad query
session.transfer(input, REL_FAILURE);
return;
} else {
throw new ProcessException(ex);
}
}
}
if (context.getProperty(QUERY_ATTRIBUTE).isSet()) {
final String queryAttr = context.getProperty(QUERY_ATTRIBUTE).evaluateAttributeExpressions(input).getValue();
attributes.put(queryAttr, queryStr);
}
final Document projection = context.getProperty(PROJECTION).isSet() ? Document.parse(context.getProperty(PROJECTION).evaluateAttributeExpressions(input).getValue()) : null;
final Document sort = context.getProperty(SORT).isSet() ? Document.parse(context.getProperty(SORT).evaluateAttributeExpressions(input).getValue()) : null;
final String jsonTypeSetting = context.getProperty(JSON_TYPE).getValue();
final String usePrettyPrint = context.getProperty(USE_PRETTY_PRINTING).getValue();
configureMapper(jsonTypeSetting);
final MongoCollection<Document> collection = getCollection(context);
try {
final FindIterable<Document> it = query != null ? collection.find(query) : collection.find();
if (projection != null) {
it.projection(projection);
}
if (sort != null) {
it.sort(sort);
}
if (context.getProperty(LIMIT).isSet()) {
it.limit(context.getProperty(LIMIT).evaluateAttributeExpressions(input).asInteger());
}
if (context.getProperty(BATCH_SIZE).isSet()) {
it.batchSize(context.getProperty(BATCH_SIZE).evaluateAttributeExpressions(input).asInteger());
}
final MongoCursor<Document> cursor = it.iterator();
ComponentLog log = getLogger();
try {
FlowFile flowFile = null;
if (context.getProperty(RESULTS_PER_FLOWFILE).isSet()) {
int ceiling = context.getProperty(RESULTS_PER_FLOWFILE).evaluateAttributeExpressions(input).asInteger();
List<Document> batch = new ArrayList<>();
while (cursor.hasNext()) {
batch.add(cursor.next());
if (batch.size() == ceiling) {
try {
if (log.isDebugEnabled()) {
log.debug("Writing batch...");
}
String payload = buildBatch(batch, jsonTypeSetting, usePrettyPrint);
writeBatch(payload, null, context, session, attributes, REL_SUCCESS);
batch = new ArrayList<>();
} catch (Exception ex) {
getLogger().error("Error building batch", ex);
}
}
}
if (batch.size() > 0) {
try {
writeBatch(buildBatch(batch, jsonTypeSetting, usePrettyPrint), null, context, session, attributes, REL_SUCCESS);
} catch (Exception ex) {
getLogger().error("Error sending remainder of batch", ex);
}
}
} else {
while (cursor.hasNext()) {
flowFile = session.create();
flowFile = session.write(flowFile, out -> {
String json;
if (jsonTypeSetting.equals(JSON_TYPE_STANDARD)) {
json = getObjectWriter(mapper, usePrettyPrint).writeValueAsString(cursor.next());
} else {
json = cursor.next().toJson();
}
out.write(json.getBytes(context.getProperty(CHARSET).evaluateAttributeExpressions().getValue()));
});
flowFile = session.putAllAttributes(flowFile, attributes);
session.getProvenanceReporter().receive(flowFile, getURI(context));
session.transfer(flowFile, REL_SUCCESS);
}
}
if (input != null) {
session.transfer(input, REL_ORIGINAL);
}
} finally {
cursor.close();
}
} catch (final RuntimeException e) {
if (input != null) {
session.transfer(input, REL_FAILURE);
}
context.yield();
logger.error("Failed to execute query {} due to {}", new Object[] { query, e }, e);
}
}
use of com.mongodb.client.MongoCursor in project nifi by apache.
the class RunMongoAggregation method onTrigger.
@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
FlowFile flowFile = null;
if (context.hasIncomingConnection()) {
flowFile = session.get();
if (flowFile == null && context.hasNonLoopConnection()) {
return;
}
}
String query = context.getProperty(QUERY).evaluateAttributeExpressions(flowFile).getValue();
String queryAttr = context.getProperty(QUERY_ATTRIBUTE).evaluateAttributeExpressions(flowFile).getValue();
Integer batchSize = context.getProperty(BATCH_SIZE).asInteger();
Integer resultsPerFlowfile = context.getProperty(RESULTS_PER_FLOWFILE).asInteger();
Map attrs = new HashMap();
if (queryAttr != null && queryAttr.trim().length() > 0) {
attrs.put(queryAttr, query);
}
MongoCollection collection = getCollection(context);
MongoCursor iter = null;
try {
List<Bson> aggQuery = buildAggregationQuery(query);
AggregateIterable it = collection.aggregate(aggQuery);
it.batchSize(batchSize != null ? batchSize : 1);
iter = it.iterator();
List batch = new ArrayList();
while (iter.hasNext()) {
batch.add(iter.next());
if (batch.size() == resultsPerFlowfile) {
writeBatch(buildBatch(batch), flowFile, context, session, attrs, REL_RESULTS);
batch = new ArrayList();
}
}
if (batch.size() > 0) {
writeBatch(buildBatch(batch), flowFile, context, session, attrs, REL_RESULTS);
}
if (flowFile != null) {
session.transfer(flowFile, REL_ORIGINAL);
}
} catch (Exception e) {
getLogger().error("Error running MongoDB aggregation query.", e);
if (flowFile != null) {
session.transfer(flowFile, REL_FAILURE);
}
} finally {
if (iter != null) {
iter.close();
}
}
}
use of com.mongodb.client.MongoCursor in project LuckPerms by lucko.
the class MongoDao method savePlayerData.
@Override
public PlayerSaveResult savePlayerData(UUID uuid, String username) {
username = username.toLowerCase();
MongoCollection<Document> c = this.database.getCollection(this.prefix + "uuid");
// find any existing mapping
String oldUsername = getPlayerName(uuid);
// do the insert
if (!username.equalsIgnoreCase(oldUsername)) {
c.replaceOne(new Document("_id", uuid), new Document("_id", uuid).append("name", username), new UpdateOptions().upsert(true));
}
PlayerSaveResult result = PlayerSaveResult.determineBaseResult(username, oldUsername);
Set<UUID> conflicting = new HashSet<>();
try (MongoCursor<Document> cursor = c.find(new Document("name", username)).iterator()) {
if (cursor.hasNext()) {
conflicting.add(cursor.next().get("_id", UUID.class));
}
}
conflicting.remove(uuid);
if (!conflicting.isEmpty()) {
// remove the mappings for conflicting uuids
c.deleteMany(Filters.and(conflicting.stream().map(u -> Filters.eq("_id", u)).collect(Collectors.toList())));
result = result.withOtherUuidsPresent(conflicting);
}
return result;
}
use of com.mongodb.client.MongoCursor in project jackrabbit-oak by apache.
the class MongoBlobReferenceIterator method getIteratorOverDocsWithBinaries.
@Override
public Iterator<NodeDocument> getIteratorOverDocsWithBinaries() {
Bson query = Filters.eq(NodeDocument.HAS_BINARY_FLAG, NodeDocument.HAS_BINARY_VAL);
// TODO It currently prefers secondary. Would that be Ok?
MongoCursor<BasicDBObject> cursor = documentStore.getDBCollection(NODES).withReadPreference(documentStore.getConfiguredReadPreference(NODES)).find(query).iterator();
return CloseableIterator.wrap(transform(cursor, input -> documentStore.convertFromDBObject(NODES, input)), cursor);
}
use of com.mongodb.client.MongoCursor in project jackrabbit-oak by apache.
the class MongoStatus method isMajorityReadConcernEnabled.
/**
* Check if the majority read concern is enabled and can be used for queries.
*
* @return true if the majority read concern is enabled
*/
public boolean isMajorityReadConcernEnabled() {
if (majorityReadConcernEnabled == null) {
// Mongo API doesn't seem to provide an option to check whether the
// majority read concern has been enabled, so we have to try to use
// it and optionally catch the exception.
MongoCollection<?> emptyCollection = client.getDatabase(dbName).getCollection("emptyCollection-" + System.currentTimeMillis());
try (MongoCursor cursor = emptyCollection.withReadConcern(ReadConcern.MAJORITY).find(new BasicDBObject()).iterator()) {
cursor.hasNext();
majorityReadConcernEnabled = true;
} catch (MongoQueryException | IllegalArgumentException e) {
majorityReadConcernEnabled = false;
}
}
return majorityReadConcernEnabled;
}
Aggregations