use of com.mongodb.MongoException in project GNS by MobilityFirst.
the class MongoRecords method doUpdate.
private void doUpdate(String collectionName, String guid, BasicDBObject updates) throws FailedDBOperationException {
String primaryKey = mongoCollectionSpecs.getCollectionSpec(collectionName).getPrimaryKey().getName();
DBCollection collection = db.getCollection(collectionName);
BasicDBObject query = new BasicDBObject(primaryKey, guid);
if (updates.keySet().size() > 0) {
long startTime = System.currentTimeMillis();
try {
collection.update(query, new BasicDBObject("$set", updates));
} catch (MongoException e) {
DatabaseConfig.getLogger().log(Level.SEVERE, "{0} doUpdate failed: {1}", new Object[] { dbName, e.getMessage() });
throw new FailedDBOperationException(collectionName, updates.toString(), "Original mongo exception:" + e.getMessage());
}
DelayProfiler.updateDelay("mongoSetUpdate", startTime);
long finishTime = System.currentTimeMillis();
if (finishTime - startTime > 10) {
DatabaseConfig.getLogger().log(Level.FINE, "{0} Long latency mongoUpdate {1}", new Object[] { dbName, (finishTime - startTime) });
}
}
}
use of com.mongodb.MongoException in project GNS by MobilityFirst.
the class MongoRecords method selectRecords.
private MongoRecordCursor selectRecords(String collectionName, ColumnField valuesMapField, String key, Object value, boolean explain) throws FailedDBOperationException {
db.requestEnsureConnection();
DBCollection collection = db.getCollection(collectionName);
// note that if the value of the key in the database is a list (which it is) this
// query will find all records where the value (a list) *contains* an element whose value is the value
//
//FROM MONGO DOC: Match an Array Element
//Equality matches can specify a single element in the array to match. These specifications match
//if the array contains at least one element with the specified value.
//In the following example, the query matches all documents where the value of the field tags is
//an array that contains 'fruit' as one of its elements:
//db.inventory.find( { tags: 'fruit' } )
String fieldName = valuesMapField.getName() + "." + key;
BasicDBObject query = new BasicDBObject(fieldName, value);
//System.out.println("***GNSProtocol.QUERY.toString()***: " + query.toString());
DBCursor cursor = null;
try {
cursor = collection.find(query);
} catch (MongoException e) {
DatabaseConfig.getLogger().log(Level.FINE, "{0} selectRecords failed: {1}", new Object[] { dbName, e.getMessage() });
throw new FailedDBOperationException(collectionName, fieldName, "Original mongo exception:" + e.getMessage());
}
if (explain) {
System.out.println(cursor.explain().toString());
}
return new MongoRecordCursor(cursor, mongoCollectionSpecs.getCollectionSpec(collectionName).getPrimaryKey());
}
use of com.mongodb.MongoException in project graylog2-server by Graylog2.
the class ContentPackLoaderPeriodical method doRun.
@Override
public void doRun() {
final ContentPackLoaderConfig contentPackLoaderConfig = clusterConfigService.getOrDefault(ContentPackLoaderConfig.class, ContentPackLoaderConfig.EMPTY);
final List<Path> files = getFiles(contentPacksDir, FILENAME_GLOB);
final Map<String, ConfigurationBundle> contentPacks = new HashMap<>(files.size());
final Set<String> loadedContentPacks = new HashSet<>(contentPackLoaderConfig.loadedContentPacks());
final Set<String> appliedContentPacks = new HashSet<>(contentPackLoaderConfig.appliedContentPacks());
final Map<String, String> checksums = new HashMap<>(contentPackLoaderConfig.checksums());
for (Path file : files) {
final String fileName = file.getFileName().toString();
LOG.debug("Reading content pack from {}", file);
final byte[] bytes;
try {
bytes = Files.readAllBytes(file);
} catch (IOException e) {
LOG.warn("Couldn't read " + file + ". Skipping.", e);
continue;
}
final String encodedFileName = encodeFileNameForMongo(fileName);
final String checksum = HASH_FUNCTION.hashBytes(bytes).toString();
final String storedChecksum = checksums.get(encodedFileName);
if (storedChecksum == null) {
checksums.put(encodedFileName, checksum);
} else if (!checksum.equals(storedChecksum)) {
LOG.info("Checksum of {} changed (expected: {}, actual: {})", file, storedChecksum, checksum);
continue;
}
if (contentPackLoaderConfig.loadedContentPacks().contains(fileName)) {
LOG.debug("Skipping already loaded content pack {} (SHA-256: {})", file, storedChecksum);
continue;
}
LOG.debug("Parsing content pack from {}", file);
final ConfigurationBundle contentPack;
try {
contentPack = objectMapper.readValue(bytes, ConfigurationBundle.class);
} catch (IOException e) {
LOG.warn("Couldn't parse content pack in file " + file + ". Skipping", e);
continue;
}
final ConfigurationBundle existingContentPack = bundleService.findByNameAndCategory(contentPack.getName(), contentPack.getCategory());
if (existingContentPack != null) {
LOG.debug("Content pack {}/{} already exists in database. Skipping.", contentPack.getCategory(), contentPack.getName());
contentPacks.put(fileName, existingContentPack);
continue;
}
final ConfigurationBundle insertedContentPack;
try {
insertedContentPack = bundleService.insert(contentPack);
LOG.debug("Successfully inserted content pack {} into database with ID {}", file, insertedContentPack.getId());
} catch (MongoException e) {
LOG.error("Error while inserting content pack " + file + " into database. Skipping.", e);
continue;
}
contentPacks.put(fileName, insertedContentPack);
loadedContentPacks.add(fileName);
}
LOG.debug("Applying selected content packs");
for (Map.Entry<String, ConfigurationBundle> entry : contentPacks.entrySet()) {
final String fileName = entry.getKey();
final ConfigurationBundle contentPack = entry.getValue();
if (contentPacksAutoLoad.contains(fileName) && appliedContentPacks.contains(fileName)) {
LOG.debug("Content pack {}/{} ({}) already applied. Skipping.", contentPack.getName(), contentPack.getCategory(), fileName);
continue;
}
if (contentPacksAutoLoad.contains(fileName)) {
LOG.debug("Applying content pack {}/{} ({})", contentPack.getName(), contentPack.getCategory(), fileName);
bundleService.applyConfigurationBundle(contentPack, userService.getAdminUser());
appliedContentPacks.add(fileName);
}
}
final ContentPackLoaderConfig changedContentPackLoaderConfig = ContentPackLoaderConfig.create(loadedContentPacks, appliedContentPacks, checksums);
if (!contentPackLoaderConfig.equals(changedContentPackLoaderConfig)) {
clusterConfigService.write(changedContentPackLoaderConfig);
}
}
use of com.mongodb.MongoException in project graylog2-server by Graylog2.
the class Server method annotateInjectorExceptions.
@Override
protected void annotateInjectorExceptions(Collection<Message> messages) {
super.annotateInjectorExceptions(messages);
for (Message message : messages) {
if (message.getCause() instanceof MongoException) {
MongoException e = (MongoException) message.getCause();
LOG.error(UI.wallString("Unable to connect to MongoDB. Is it running and the configuration correct?\n" + "Details: " + e.getMessage()));
System.exit(-1);
}
}
}
use of com.mongodb.MongoException in project jackrabbit-oak by apache.
the class MongoDocumentStore method createOrUpdate.
/**
* Try to apply all the {@link UpdateOp}s with at least MongoDB requests as
* possible. The return value is the list of the old documents (before
* applying changes). The mechanism is as follows:
*
* <ol>
* <li>For each UpdateOp try to read the assigned document from the cache.
* Add them to {@code oldDocs}.</li>
* <li>Prepare a list of all UpdateOps that doesn't have their documents and
* read them in one find() call. Add results to {@code oldDocs}.</li>
* <li>Prepare a bulk update. For each remaining UpdateOp add following
* operation:
* <ul>
* <li>Find document with the same id and the same mod_count as in the
* {@code oldDocs}.</li>
* <li>Apply changes from the UpdateOps.</li>
* </ul>
* </li>
* <li>Execute the bulk update.</li>
* </ol>
*
* If some other process modifies the target documents between points 2 and
* 3, the mod_count will be increased as well and the bulk update will fail
* for the concurrently modified docs. The method will then remove the
* failed documents from the {@code oldDocs} and restart the process from
* point 2. It will stop after 3rd iteration.
*/
@SuppressWarnings("unchecked")
@CheckForNull
@Override
public <T extends Document> List<T> createOrUpdate(Collection<T> collection, List<UpdateOp> updateOps) {
log("createOrUpdate", updateOps);
Map<String, UpdateOp> operationsToCover = new LinkedHashMap<String, UpdateOp>();
List<UpdateOp> duplicates = new ArrayList<UpdateOp>();
Map<UpdateOp, T> results = new LinkedHashMap<UpdateOp, T>();
final Stopwatch watch = startWatch();
try {
for (UpdateOp updateOp : updateOps) {
UpdateUtils.assertUnconditional(updateOp);
UpdateOp clone = updateOp.copy();
if (operationsToCover.containsKey(updateOp.getId())) {
duplicates.add(clone);
} else {
operationsToCover.put(updateOp.getId(), clone);
}
results.put(clone, null);
}
Map<String, T> oldDocs = new HashMap<String, T>();
if (collection == Collection.NODES) {
oldDocs.putAll((Map<String, T>) getCachedNodes(operationsToCover.keySet()));
}
for (int i = 0; i <= bulkRetries; i++) {
if (operationsToCover.size() <= 2) {
// in bulk mode wouldn't result in any performance gain
break;
}
for (List<UpdateOp> partition : Lists.partition(Lists.newArrayList(operationsToCover.values()), bulkSize)) {
Map<UpdateOp, T> successfulUpdates = bulkUpdate(collection, partition, oldDocs);
results.putAll(successfulUpdates);
operationsToCover.values().removeAll(successfulUpdates.keySet());
}
}
// if there are some changes left, we'll apply them one after another
Iterator<UpdateOp> it = Iterators.concat(operationsToCover.values().iterator(), duplicates.iterator());
while (it.hasNext()) {
UpdateOp op = it.next();
it.remove();
T oldDoc = createOrUpdate(collection, op);
if (oldDoc != null) {
results.put(op, oldDoc);
}
}
} catch (MongoException e) {
throw handleException(e, collection, Iterables.transform(updateOps, new Function<UpdateOp, String>() {
@Override
public String apply(UpdateOp input) {
return input.getId();
}
}));
} finally {
stats.doneCreateOrUpdate(watch.elapsed(TimeUnit.NANOSECONDS), collection, Lists.transform(updateOps, new Function<UpdateOp, String>() {
@Override
public String apply(UpdateOp input) {
return input.getId();
}
}));
}
List<T> resultList = new ArrayList<T>(results.values());
log("createOrUpdate returns", resultList);
return resultList;
}
Aggregations