use of com.mongodb.DBCollection in project GNS by MobilityFirst.
the class MongoRecords method lookupEntireRecord.
private JSONObject lookupEntireRecord(String collectionName, String guid, boolean explain) throws RecordNotFoundException, FailedDBOperationException {
long startTime = System.currentTimeMillis();
db.requestStart();
try {
String primaryKey = mongoCollectionSpecs.getCollectionSpec(collectionName).getPrimaryKey().getName();
db.requestEnsureConnection();
DBCollection collection = db.getCollection(collectionName);
BasicDBObject query = new BasicDBObject(primaryKey, guid);
DBCursor cursor = collection.find(query);
if (explain) {
System.out.println(cursor.explain().toString());
}
if (cursor.hasNext()) {
DBObject obj = cursor.next();
// arun: optimized for the common case of Map
@SuppressWarnings("unchecked") JSONObject json = obj instanceof Map ? DiskMapRecords.recursiveCopyMap((Map<String, ?>) obj) : new JSONObject(obj.toString());
// instrumentation
DelayProfiler.updateDelay("lookupEntireRecord", startTime);
// older style
int lookupTime = (int) (System.currentTimeMillis() - startTime);
if (lookupTime > 20) {
DatabaseConfig.getLogger().log(Level.FINE, "{0} mongoLookup Long delay {1}", new Object[] { dbName, lookupTime });
}
return json;
} else {
throw new RecordNotFoundException(guid);
}
} catch (JSONException e) {
DatabaseConfig.getLogger().log(Level.WARNING, "{0} Unable to parse JSON: {1}", new Object[] { dbName, e.getMessage() });
return null;
} catch (MongoException e) {
DatabaseConfig.getLogger().log(Level.FINE, "{0} lookupEntireRecord failed: {1}", new Object[] { dbName, e.getMessage() });
throw new FailedDBOperationException(collectionName, guid, "Original mongo exception:" + e.getMessage());
} finally {
db.requestDone();
}
}
use of com.mongodb.DBCollection in project jackrabbit-oak by apache.
the class MongoDocumentStore method remove.
@Override
public <T extends Document> int remove(Collection<T> collection, Map<String, Map<Key, Condition>> toRemove) {
log("remove", toRemove);
int num = 0;
DBCollection dbCollection = getDBCollection(collection);
Stopwatch watch = startWatch();
try {
List<String> batchIds = Lists.newArrayList();
List<DBObject> batch = Lists.newArrayList();
Iterator<Entry<String, Map<Key, Condition>>> it = toRemove.entrySet().iterator();
while (it.hasNext()) {
Entry<String, Map<Key, Condition>> entry = it.next();
QueryBuilder query = createQueryForUpdate(entry.getKey(), entry.getValue());
batchIds.add(entry.getKey());
batch.add(query.get());
if (!it.hasNext() || batch.size() == IN_CLAUSE_BATCH_SIZE) {
DBObject q = new BasicDBObject();
q.put(QueryOperators.OR, batch);
try {
num += dbCollection.remove(q).getN();
} catch (Exception e) {
throw DocumentStoreException.convert(e, "Remove failed for " + batch);
} finally {
if (collection == Collection.NODES) {
invalidateCache(batchIds);
}
}
batchIds.clear();
batch.clear();
}
}
} finally {
stats.doneRemove(watch.elapsed(TimeUnit.NANOSECONDS), collection, num);
}
return num;
}
use of com.mongodb.DBCollection in project jackrabbit-oak by apache.
the class MongoDocumentStore method sendBulkUpdate.
private <T extends Document> BulkUpdateResult sendBulkUpdate(Collection<T> collection, java.util.Collection<UpdateOp> updateOps, Map<String, T> oldDocs) {
DBCollection dbCollection = getDBCollection(collection);
BulkWriteOperation bulk = dbCollection.initializeUnorderedBulkOperation();
String[] bulkIds = new String[updateOps.size()];
int i = 0;
for (UpdateOp updateOp : updateOps) {
String id = updateOp.getId();
QueryBuilder query = createQueryForUpdate(id, updateOp.getConditions());
T oldDoc = oldDocs.get(id);
DBObject update;
if (oldDoc == null || oldDoc == NodeDocument.NULL) {
query.and(Document.MOD_COUNT).exists(false);
update = createUpdate(updateOp, true);
} else {
query.and(Document.MOD_COUNT).is(oldDoc.getModCount());
update = createUpdate(updateOp, false);
}
bulk.find(query.get()).upsert().updateOne(update);
bulkIds[i++] = id;
}
BulkWriteResult bulkResult;
Set<String> failedUpdates = new HashSet<String>();
Set<String> upserts = new HashSet<String>();
try {
bulkResult = bulk.execute();
} catch (BulkWriteException e) {
bulkResult = e.getWriteResult();
for (BulkWriteError err : e.getWriteErrors()) {
failedUpdates.add(bulkIds[err.getIndex()]);
}
}
for (BulkWriteUpsert upsert : bulkResult.getUpserts()) {
upserts.add(bulkIds[upsert.getIndex()]);
}
return new BulkUpdateResult(failedUpdates, upserts);
}
use of com.mongodb.DBCollection in project jackrabbit-oak by apache.
the class MongoDocumentStoreHelper method repair.
public static void repair(MongoDocumentStore store, String path) {
DBCollection col = store.getDBCollection(NODES);
String id = Utils.getIdFromPath(path);
NodeDocument doc = store.find(NODES, id);
if (doc == null) {
System.out.println("No document for path " + path);
return;
}
Set<Revision> changes = Sets.newHashSet();
for (String key : doc.keySet()) {
if (Utils.isPropertyName(key) || NodeDocument.isDeletedEntry(key)) {
changes.addAll(NodeDocumentHelper.getLocalMap(doc, key).keySet());
}
}
SortedMap<Revision, String> commitRoot = Maps.newTreeMap(NodeDocumentHelper.getLocalCommitRoot(doc));
if (!commitRoot.keySet().retainAll(changes)) {
System.out.println("Nothing to repair on " + path);
return;
}
Number modCount = doc.getModCount();
if (modCount == null) {
System.err.println("Document does not have a modCount " + path);
return;
}
DBObject query = QueryBuilder.start(Document.ID).is(id).and(Document.MOD_COUNT).is(modCount).get();
DBObject cr = new BasicDBObject();
for (Map.Entry<Revision, String> entry : commitRoot.entrySet()) {
cr.put(entry.getKey().toString(), entry.getValue());
}
DBObject update = new BasicDBObject();
update.put("$set", new BasicDBObject(NodeDocumentHelper.commitRoot(), cr));
update.put("$inc", new BasicDBObject(Document.MOD_COUNT, 1L));
WriteResult result = col.update(query, update);
if (result.getN() == 1) {
int num = NodeDocumentHelper.getLocalCommitRoot(doc).size() - commitRoot.size();
System.out.println("Removed " + num + " _commitRoot entries on " + path);
} else {
System.out.println("Unable to repair " + path + " (concurrent update).");
}
}
use of com.mongodb.DBCollection in project jackrabbit-oak by apache.
the class MongoBlobStore method countDeleteChunks.
@Override
public long countDeleteChunks(List<String> chunkIds, long maxLastModifiedTime) throws Exception {
DBCollection collection = getBlobCollection();
QueryBuilder queryBuilder = new QueryBuilder();
if (chunkIds != null) {
queryBuilder = queryBuilder.and(MongoBlob.KEY_ID).in(chunkIds.toArray(new String[0]));
if (maxLastModifiedTime > 0) {
queryBuilder = queryBuilder.and(MongoBlob.KEY_LAST_MOD).lessThan(maxLastModifiedTime);
}
}
WriteResult result = collection.remove(queryBuilder.get());
return result.getN();
}
Aggregations