Search in sources :

Example 16 with DBObject

use of com.mongodb.DBObject in project mongomvcc by igd-geo.

the class MongoDBVMaintenance method doFindDanglingCommits.

private long[] doFindDanglingCommits(long expiry, TimeUnit unit) {
    long maxTime = getMaxTime(expiry, unit);
    //load all commits which are older than the expiry time. mark them as dangling
    DBCollection collCommits = _db.getDB().getCollection(MongoDBConstants.COLLECTION_COMMITS);
    DBCursor commits = collCommits.find(new BasicDBObject(MongoDBConstants.TIMESTAMP, //also include commits without a timestamp
    new BasicDBObject("$not", new BasicDBObject("$gte", maxTime))), new BasicDBObject(MongoDBConstants.ID, 1));
    IdSet danglingCommits = new IdHashSet();
    for (DBObject o : commits) {
        long cid = (Long) o.get(MongoDBConstants.ID);
        danglingCommits.add(cid);
    }
    //walk through all branches and eliminate commits which are not dangling
    DBCollection collBranches = _db.getDB().getCollection(MongoDBConstants.COLLECTION_BRANCHES);
    DBCursor branches = collBranches.find(new BasicDBObject(), new BasicDBObject(MongoDBConstants.CID, 1));
    VHistory history = _db.getHistory();
    IdSet alreadyCheckedCommits = new IdHashSet();
    for (DBObject o : branches) {
        long cid = (Long) o.get(MongoDBConstants.CID);
        while (cid != 0) {
            if (alreadyCheckedCommits.contains(cid)) {
                break;
            }
            alreadyCheckedCommits.add(cid);
            danglingCommits.remove(cid);
            cid = history.getParent(cid);
        }
    }
    //all remaining commits must be dangling
    return danglingCommits.toArray();
}
Also used : DBCollection(com.mongodb.DBCollection) BasicDBObject(com.mongodb.BasicDBObject) DBCursor(com.mongodb.DBCursor) VHistory(de.fhg.igd.mongomvcc.VHistory) IdSet(de.fhg.igd.mongomvcc.helper.IdSet) IdHashSet(de.fhg.igd.mongomvcc.helper.IdHashSet) BasicDBObject(com.mongodb.BasicDBObject) DBObject(com.mongodb.DBObject)

Example 17 with DBObject

use of com.mongodb.DBObject in project mongo-hadoop by mongodb.

the class GridFSInputFormat method getSplits.

@Override
public List<InputSplit> getSplits(final JobContext context) throws IOException, InterruptedException {
    Configuration conf = context.getConfiguration();
    DBCollection inputCollection = MongoConfigUtil.getInputCollection(conf);
    MongoClientURI inputURI = MongoConfigUtil.getInputURI(conf);
    GridFS gridFS = new GridFS(inputCollection.getDB(), inputCollection.getName());
    DBObject query = MongoConfigUtil.getQuery(conf);
    List<InputSplit> splits = new LinkedList<InputSplit>();
    for (GridFSDBFile file : gridFS.find(query)) {
        // One split per file.
        if (MongoConfigUtil.isGridFSWholeFileSplit(conf)) {
            splits.add(new GridFSSplit(inputURI, (ObjectId) file.getId(), (int) file.getChunkSize(), file.getLength()));
        } else // One split per file chunk.
        {
            for (int chunk = 0; chunk < file.numChunks(); ++chunk) {
                splits.add(new GridFSSplit(inputURI, (ObjectId) file.getId(), (int) file.getChunkSize(), file.getLength(), chunk));
            }
        }
    }
    LOG.debug("Found GridFS splits: " + splits);
    return splits;
}
Also used : DBCollection(com.mongodb.DBCollection) GridFSSplit(com.mongodb.hadoop.input.GridFSSplit) Configuration(org.apache.hadoop.conf.Configuration) ObjectId(org.bson.types.ObjectId) GridFSDBFile(com.mongodb.gridfs.GridFSDBFile) MongoClientURI(com.mongodb.MongoClientURI) GridFS(com.mongodb.gridfs.GridFS) DBObject(com.mongodb.DBObject) InputSplit(org.apache.hadoop.mapreduce.InputSplit) LinkedList(java.util.LinkedList)

Example 18 with DBObject

use of com.mongodb.DBObject in project mongo-hadoop by mongodb.

the class StandaloneMongoSplitter method calculateSplits.

@Override
public List<InputSplit> calculateSplits() throws SplitFailedException {
    final DBObject splitKey = MongoConfigUtil.getInputSplitKey(getConfiguration());
    final DBObject splitKeyMax = MongoConfigUtil.getMaxSplitKey(getConfiguration());
    final DBObject splitKeyMin = MongoConfigUtil.getMinSplitKey(getConfiguration());
    final int splitSize = MongoConfigUtil.getSplitSize(getConfiguration());
    final MongoClientURI inputURI;
    DBCollection inputCollection = null;
    final ArrayList<InputSplit> returnVal;
    try {
        inputURI = MongoConfigUtil.getInputURI(getConfiguration());
        MongoClientURI authURI = MongoConfigUtil.getAuthURI(getConfiguration());
        if (authURI != null) {
            inputCollection = MongoConfigUtil.getCollectionWithAuth(inputURI, authURI);
        } else {
            inputCollection = MongoConfigUtil.getCollection(inputURI);
        }
        returnVal = new ArrayList<InputSplit>();
        final String ns = inputCollection.getFullName();
        if (LOG.isDebugEnabled()) {
            LOG.debug(String.format("Running splitVector on namespace: %s.%s; hosts: %s", inputURI.getDatabase(), inputURI.getCollection(), inputURI.getHosts()));
        }
        final DBObject cmd = BasicDBObjectBuilder.start("splitVector", ns).add("keyPattern", splitKey).add("min", splitKeyMin).add("max", splitKeyMax).add("force", false).add("maxChunkSize", splitSize).get();
        CommandResult data;
        boolean ok = true;
        try {
            data = inputCollection.getDB().getSisterDB(inputURI.getDatabase()).command(cmd, ReadPreference.primary());
        } catch (final MongoException e) {
            // 2.0 servers throw exceptions rather than info in a CommandResult
            data = null;
            LOG.info(e.getMessage(), e);
            if (e.getMessage().contains("unrecognized command: splitVector")) {
                ok = false;
            } else {
                throw e;
            }
        }
        if (data != null) {
            if (data.containsField("$err")) {
                throw new SplitFailedException("Error calculating splits: " + data);
            } else if (!data.get("ok").equals(1.0)) {
                ok = false;
            }
        }
        if (!ok) {
            final CommandResult stats = inputCollection.getStats();
            if (stats.containsField("primary")) {
                final DBCursor shards = inputCollection.getDB().getSisterDB("config").getCollection("shards").find(new BasicDBObject("_id", stats.getString("primary")));
                try {
                    if (shards.hasNext()) {
                        final DBObject shard = shards.next();
                        final String host = ((String) shard.get("host")).replace(shard.get("_id") + "/", "");
                        final MongoClientURI shardHost;
                        if (authURI != null) {
                            shardHost = new MongoClientURIBuilder(authURI).host(host).build();
                        } else {
                            shardHost = new MongoClientURIBuilder(inputURI).host(host).build();
                        }
                        MongoClient shardClient = null;
                        try {
                            shardClient = new MongoClient(shardHost);
                            data = shardClient.getDB(shardHost.getDatabase()).command(cmd, ReadPreference.primary());
                        } catch (final Exception e) {
                            LOG.error(e.getMessage(), e);
                        } finally {
                            if (shardClient != null) {
                                shardClient.close();
                            }
                        }
                    }
                } finally {
                    shards.close();
                }
            }
            if (data != null && !data.get("ok").equals(1.0)) {
                throw new SplitFailedException("Unable to calculate input splits: " + data.get("errmsg"));
            }
        }
        // Comes in a format where "min" and "max" are implicit
        // and each entry is just a boundary key; not ranged
        final BasicDBList splitData = (BasicDBList) data.get("splitKeys");
        if (splitData.size() == 0) {
            LOG.warn("WARNING: No Input Splits were calculated by the split code. Proceeding with a *single* split. Data may be too" + " small, try lowering 'mongo.input.split_size' if this is undesirable.");
        }
        // Lower boundary of the first min split
        BasicDBObject lastKey = null;
        // If splitKeyMin was given, use it as first boundary.
        if (!splitKeyMin.toMap().isEmpty()) {
            lastKey = new BasicDBObject(splitKeyMin.toMap());
        }
        for (final Object aSplitData : splitData) {
            final BasicDBObject currentKey = (BasicDBObject) aSplitData;
            returnVal.add(createSplitFromBounds(lastKey, currentKey));
            lastKey = currentKey;
        }
        BasicDBObject maxKey = null;
        // If splitKeyMax was given, use it as last boundary.
        if (!splitKeyMax.toMap().isEmpty()) {
            maxKey = new BasicDBObject(splitKeyMax.toMap());
        }
        // Last max split
        final MongoInputSplit lastSplit = createSplitFromBounds(lastKey, maxKey);
        returnVal.add(lastSplit);
    } finally {
        if (inputCollection != null) {
            MongoConfigUtil.close(inputCollection.getDB().getMongo());
        }
    }
    if (MongoConfigUtil.isFilterEmptySplitsEnabled(getConfiguration())) {
        return filterEmptySplits(returnVal);
    }
    return returnVal;
}
Also used : MongoException(com.mongodb.MongoException) MongoInputSplit(com.mongodb.hadoop.input.MongoInputSplit) MongoClientURI(com.mongodb.MongoClientURI) BasicDBObject(com.mongodb.BasicDBObject) DBObject(com.mongodb.DBObject) MongoException(com.mongodb.MongoException) CommandResult(com.mongodb.CommandResult) DBCollection(com.mongodb.DBCollection) BasicDBObject(com.mongodb.BasicDBObject) MongoClient(com.mongodb.MongoClient) BasicDBList(com.mongodb.BasicDBList) DBCursor(com.mongodb.DBCursor) MongoClientURIBuilder(com.mongodb.hadoop.util.MongoClientURIBuilder) BasicDBObject(com.mongodb.BasicDBObject) DBObject(com.mongodb.DBObject) InputSplit(org.apache.hadoop.mapreduce.InputSplit) MongoInputSplit(com.mongodb.hadoop.input.MongoInputSplit)

Example 19 with DBObject

use of com.mongodb.DBObject in project mongo-hadoop by mongodb.

the class MongoConfigUtil method getDBObject.

public static DBObject getDBObject(final Configuration conf, final String key) {
    try {
        final String json = conf.get(key);
        final DBObject obj = (DBObject) JSON.parse(json);
        if (obj == null) {
            return new BasicDBObject();
        } else {
            return obj;
        }
    } catch (final Exception e) {
        throw new IllegalArgumentException("Provided JSON String is not representable/parseable as a DBObject.", e);
    }
}
Also used : BasicDBObject(com.mongodb.BasicDBObject) DBObject(com.mongodb.DBObject) BasicDBObject(com.mongodb.BasicDBObject) IOException(java.io.IOException) UnknownHostException(java.net.UnknownHostException)

Example 20 with DBObject

use of com.mongodb.DBObject in project mongo-hadoop by mongodb.

the class MongoConfigUtil method setJSON.

/**
     * Helper for providing a JSON string as a value for a setting.
     * @param conf the Configuration
     * @param key the key for the setting
     * @param value the JSON string value
     */
public static void setJSON(final Configuration conf, final String key, final String value) {
    try {
        final Object dbObj = JSON.parse(value);
        setDBObject(conf, key, (DBObject) dbObj);
    } catch (final Exception e) {
        LOG.error("Cannot parse JSON...", e);
        throw new IllegalArgumentException("Provided JSON String is not representable/parseable as a DBObject.", e);
    }
}
Also used : DBObject(com.mongodb.DBObject) BasicDBObject(com.mongodb.BasicDBObject) IOException(java.io.IOException) UnknownHostException(java.net.UnknownHostException)

Aggregations

DBObject (com.mongodb.DBObject)545 BasicDBObject (com.mongodb.BasicDBObject)386 Test (org.junit.Test)214 DBCollection (com.mongodb.DBCollection)83 YearFilterPagingRequest (org.devgateway.ocds.web.rest.controller.request.YearFilterPagingRequest)54 Aggregation (org.springframework.data.mongodb.core.aggregation.Aggregation)52 ApiOperation (io.swagger.annotations.ApiOperation)47 RequestMapping (org.springframework.web.bind.annotation.RequestMapping)46 Aggregation.newAggregation (org.springframework.data.mongodb.core.aggregation.Aggregation.newAggregation)41 DBCursor (com.mongodb.DBCursor)40 ArrayList (java.util.ArrayList)38 HashMap (java.util.HashMap)38 List (java.util.List)31 CustomProjectionOperation (org.devgateway.toolkit.persistence.mongo.aggregate.CustomProjectionOperation)31 Map (java.util.Map)26 ObjectId (org.bson.types.ObjectId)26 BasicDBList (com.mongodb.BasicDBList)24 BasicDBObjectBuilder (com.mongodb.BasicDBObjectBuilder)20 BSONObject (org.bson.BSONObject)19 MongoException (com.mongodb.MongoException)18