use of com.mongodb.DBCollection in project mongomvcc by igd-geo.
the class MongoDBVMaintenance method pruneDanglingCommits.
@Override
public long pruneDanglingCommits(long expiry, TimeUnit unit) {
long[] cids = findDanglingCommits(expiry, unit);
DBCollection collCommits = _db.getDB().getCollection(MongoDBConstants.COLLECTION_COMMITS);
//delete commits in chunks, so we avoid sending an array that is
//larger than the maximum document size
final int sliceCount = 1000;
for (int i = 0; i < cids.length; i += sliceCount) {
int maxSliceCount = Math.min(sliceCount, cids.length - i);
long[] slice = new long[maxSliceCount];
System.arraycopy(cids, i, slice, 0, maxSliceCount);
collCommits.remove(new BasicDBObject(MongoDBConstants.ID, new BasicDBObject("$in", slice)));
}
return cids.length;
}
use of com.mongodb.DBCollection in project camel by apache.
the class GridFsConsumer method run.
@Override
public void run() {
DBCursor c = null;
java.util.Date fromDate = null;
QueryStrategy s = endpoint.getQueryStrategy();
boolean usesTimestamp = s != QueryStrategy.FileAttribute;
boolean persistsTimestamp = s == QueryStrategy.PersistentTimestamp || s == QueryStrategy.PersistentTimestampAndFileAttribute;
boolean usesAttribute = s == QueryStrategy.FileAttribute || s == QueryStrategy.TimeStampAndFileAttribute || s == QueryStrategy.PersistentTimestampAndFileAttribute;
DBCollection ptsCollection = null;
DBObject persistentTimestamp = null;
if (persistsTimestamp) {
ptsCollection = endpoint.getDB().getCollection(endpoint.getPersistentTSCollection());
// ensure standard indexes as long as collections are small
try {
if (ptsCollection.count() < 1000) {
ptsCollection.createIndex(new BasicDBObject("id", 1));
}
} catch (MongoException e) {
//TODO: Logging
}
persistentTimestamp = ptsCollection.findOne(new BasicDBObject("id", endpoint.getPersistentTSObject()));
if (persistentTimestamp == null) {
persistentTimestamp = new BasicDBObject("id", endpoint.getPersistentTSObject());
fromDate = new java.util.Date();
persistentTimestamp.put("timestamp", fromDate);
ptsCollection.save(persistentTimestamp);
}
fromDate = (java.util.Date) persistentTimestamp.get("timestamp");
} else if (usesTimestamp) {
fromDate = new java.util.Date();
}
try {
Thread.sleep(endpoint.getInitialDelay());
while (isStarted()) {
if (c == null || c.getCursorId() == 0) {
if (c != null) {
c.close();
}
String queryString = endpoint.getQuery();
DBObject query;
if (queryString == null) {
query = new BasicDBObject();
} else {
query = (DBObject) JSON.parse(queryString);
}
if (usesTimestamp) {
query.put("uploadDate", new BasicDBObject("$gt", fromDate));
}
if (usesAttribute) {
query.put(endpoint.getFileAttributeName(), null);
}
c = endpoint.getFilesCollection().find(query);
}
boolean dateModified = false;
while (c.hasNext() && isStarted()) {
GridFSDBFile file = (GridFSDBFile) c.next();
GridFSDBFile forig = file;
if (usesAttribute) {
file.put(endpoint.getFileAttributeName(), "processing");
DBObject q = BasicDBObjectBuilder.start("_id", file.getId()).append("camel-processed", null).get();
forig = (GridFSDBFile) endpoint.getFilesCollection().findAndModify(q, null, null, false, file, true, false);
}
if (forig != null) {
file = endpoint.getGridFs().findOne(new BasicDBObject("_id", file.getId()));
Exchange exchange = endpoint.createExchange();
exchange.getIn().setHeader(GridFsEndpoint.GRIDFS_METADATA, JSON.serialize(file.getMetaData()));
exchange.getIn().setHeader(Exchange.FILE_CONTENT_TYPE, file.getContentType());
exchange.getIn().setHeader(Exchange.FILE_LENGTH, file.getLength());
exchange.getIn().setHeader(Exchange.FILE_LAST_MODIFIED, file.getUploadDate());
exchange.getIn().setBody(file.getInputStream(), InputStream.class);
try {
getProcessor().process(exchange);
//System.out.println("Processing " + file.getFilename());
if (usesAttribute) {
forig.put(endpoint.getFileAttributeName(), "done");
endpoint.getFilesCollection().save(forig);
}
if (usesTimestamp) {
if (file.getUploadDate().compareTo(fromDate) > 0) {
fromDate = file.getUploadDate();
dateModified = true;
}
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
if (persistsTimestamp && dateModified) {
persistentTimestamp.put("timestamp", fromDate);
ptsCollection.save(persistentTimestamp);
}
Thread.sleep(endpoint.getDelay());
}
} catch (Throwable e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
if (c != null) {
c.close();
}
}
use of com.mongodb.DBCollection in project Mycat-Server by MyCATApache.
the class MongoSQLParser method dropTable.
private int dropTable(SQLDropTableStatement state) {
for (SQLTableSource table : state.getTableSources()) {
DBCollection coll = this._db.getCollection(table.toString());
coll.drop();
}
return 1;
}
use of com.mongodb.DBCollection in project Mycat-Server by MyCATApache.
the class MongoSQLParser method UpData.
private int UpData(SQLUpdateStatement state) {
SQLTableSource table = state.getTableSource();
DBCollection coll = this._db.getCollection(table.toString());
SQLExpr expr = state.getWhere();
DBObject query = parserWhere(expr);
BasicDBObject set = new BasicDBObject();
for (SQLUpdateSetItem col : state.getItems()) {
set.put(getFieldName2(col.getColumn()), getExpValue(col.getValue()));
}
DBObject mod = new BasicDBObject("$set", set);
coll.updateMulti(query, mod);
//System.out.println("changs count:"+coll.getStats().size());
return 1;
}
use of com.mongodb.DBCollection in project qi4j-sdk by Qi4j.
the class MongoMapEntityStoreMixin method applyChanges.
@Override
public void applyChanges(MapChanges changes) throws IOException {
db.requestStart();
final DBCollection entities = db.getCollection(collectionName);
changes.visitMap(new MapChanger() {
@Override
public Writer newEntity(final EntityReference ref, EntityDescriptor entityDescriptor) throws IOException {
return new StringWriter(1000) {
@Override
public void close() throws IOException {
super.close();
String jsonState = toString();
DBObject bsonState = (DBObject) JSON.parse(jsonState);
BasicDBObject entity = new BasicDBObject();
entity.put(IDENTITY_COLUMN, ref.identity());
entity.put(STATE_COLUMN, bsonState);
entities.insert(entity, writeConcern);
}
};
}
@Override
public Writer updateEntity(final EntityReference ref, EntityDescriptor entityDescriptor) throws IOException {
return new StringWriter(1000) {
@Override
public void close() throws IOException {
super.close();
DBObject bsonState = (DBObject) JSON.parse(toString());
BasicDBObject entity = new BasicDBObject();
entity.put(IDENTITY_COLUMN, ref.identity());
entity.put(STATE_COLUMN, bsonState);
entities.update(byIdentity(ref), entity, false, false, writeConcern);
}
};
}
@Override
public void removeEntity(EntityReference ref, EntityDescriptor entityDescriptor) throws EntityNotFoundException {
DBObject entity = entities.findOne(byIdentity(ref));
if (entity == null) {
throw new EntityNotFoundException(ref);
}
entities.remove(entity, writeConcern);
}
});
db.requestDone();
}
Aggregations