use of com.mongodb.DBCursor in project jetty.project by eclipse.
the class MongoSessionDataStore method doGetExpired.
/**
* @see org.eclipse.jetty.server.session.SessionDataStore#getExpired(Set)
*/
@Override
public Set<String> doGetExpired(Set<String> candidates) {
long now = System.currentTimeMillis();
long upperBound = now;
Set<String> expiredSessions = new HashSet<>();
//firstly ask mongo to verify if these candidate ids have expired - all of
//these candidates will be for our node
BasicDBObject query = new BasicDBObject();
query.append(__ID, new BasicDBObject("$in", candidates));
query.append(__EXPIRY, new BasicDBObject("$gt", 0).append("$lt", upperBound));
DBCursor verifiedExpiredSessions = null;
try {
verifiedExpiredSessions = _dbSessions.find(query, new BasicDBObject(__ID, 1));
for (DBObject session : verifiedExpiredSessions) {
String id = (String) session.get(__ID);
if (LOG.isDebugEnabled())
LOG.debug("{} Mongo confirmed expired session {}", _context, id);
expiredSessions.add(id);
}
} finally {
if (verifiedExpiredSessions != null)
verifiedExpiredSessions.close();
}
//if this is our first expiry check, make sure that we only grab really old sessions
if (_lastExpiryCheckTime <= 0)
upperBound = (now - (3 * (1000L * _gracePeriodSec)));
else
upperBound = _lastExpiryCheckTime - (1000L * _gracePeriodSec);
query = new BasicDBObject();
BasicDBObject gt = new BasicDBObject(__EXPIRY, new BasicDBObject("$gt", 0));
BasicDBObject lt = new BasicDBObject(__EXPIRY, new BasicDBObject("$lt", upperBound));
BasicDBList list = new BasicDBList();
list.add(gt);
list.add(lt);
query.append("and", list);
DBCursor oldExpiredSessions = null;
try {
BasicDBObject bo = new BasicDBObject(__ID, 1);
bo.append(__EXPIRY, 1);
oldExpiredSessions = _dbSessions.find(query, bo);
for (DBObject session : oldExpiredSessions) {
String id = (String) session.get(__ID);
if (LOG.isDebugEnabled())
LOG.debug("{} Mongo found old expired session {}", _context, id + " exp=" + session.get(__EXPIRY));
expiredSessions.add(id);
}
} finally {
oldExpiredSessions.close();
}
return expiredSessions;
}
use of com.mongodb.DBCursor in project mongo-java-driver by mongodb.
the class CLI method main.
// CHECKSTYLE:OFF
public static void main(final String[] args) throws Exception {
if (args.length < 1) {
printUsage();
return;
}
for (int i = 0; i < args.length; i++) {
String s = args[i];
if (s.equals("--db")) {
db = args[i + 1];
i++;
continue;
}
if (s.equals("--host")) {
host = args[i + 1];
i++;
continue;
}
if (s.equals("help")) {
printUsage();
return;
}
if (s.equals("list")) {
GridFS fs = getGridFS();
System.out.printf("%-60s %-10s%n", "Filename", "Length");
DBCursor fileListCursor = fs.getFileList();
try {
while (fileListCursor.hasNext()) {
DBObject o = fileListCursor.next();
System.out.printf("%-60s %-10d%n", o.get("filename"), ((Number) o.get("length")).longValue());
}
} finally {
fileListCursor.close();
}
return;
}
if (s.equals("get")) {
GridFS fs = getGridFS();
String fn = args[i + 1];
GridFSDBFile f = fs.findOne(fn);
if (f == null) {
System.err.println("can't find file: " + fn);
return;
}
f.writeTo(f.getFilename());
return;
}
if (s.equals("put")) {
GridFS fs = getGridFS();
String fn = args[i + 1];
GridFSInputFile f = fs.createFile(new File(fn));
f.save();
f.validate();
return;
}
if (s.equals("md5")) {
GridFS fs = getGridFS();
String fn = args[i + 1];
GridFSDBFile f = fs.findOne(fn);
if (f == null) {
System.err.println("can't find file: " + fn);
return;
}
MessageDigest md5 = MessageDigest.getInstance("MD5");
md5.reset();
int read = 0;
DigestInputStream is = new DigestInputStream(f.getInputStream(), md5);
try {
while (is.read() >= 0) {
read++;
int r = is.read(new byte[17]);
if (r < 0) {
break;
}
read += r;
}
} finally {
is.close();
}
byte[] digest = md5.digest();
System.out.println("length: " + read + " md5: " + Util.toHex(digest));
return;
}
System.err.println("unknown option: " + s);
return;
}
}
use of com.mongodb.DBCursor in project morphia by mongodb.
the class AggregationTest method testOutNamedCollection.
@Test
public void testOutNamedCollection() {
checkMinServerVersion(2.6);
getDs().save(asList(new Book("The Banquet", "Dante", 2, "Italian", "Sophomore Slump"), new Book("Divine Comedy", "Dante", 1, "Not Very Funny", "I mean for a 'comedy'", "Ironic"), new Book("Eclogues", "Dante", 2, "Italian", ""), new Book("The Odyssey", "Homer", 10, "Classic", "Mythology", "Sequel"), new Book("Iliad", "Homer", 10, "Mythology", "Trojan War", "No Sequel")));
getDs().createAggregation(Book.class).match(getDs().getQueryFactory().createQuery(getDs()).field("author").equal("Homer")).group("author", grouping("copies", sum("copies"))).out("testAverage", Author.class);
DBCursor testAverage = getDb().getCollection("testAverage").find();
Assert.assertNotNull(testAverage);
try {
Assert.assertEquals(20, testAverage.next().get("copies"));
} finally {
testAverage.close();
}
}
use of com.mongodb.DBCursor in project javaee7-samples by javaee-samples.
the class PersonSessionBean method getPersons.
public List<Person> getPersons() {
List<Person> persons = new ArrayList();
DBCursor cur = personCollection.find();
System.out.println("getPersons: Found " + cur.length() + " person(s)");
for (DBObject dbo : cur.toArray()) {
persons.add(Person.fromDBObject(dbo));
}
return persons;
}
use of com.mongodb.DBCursor in project camel by apache.
the class GridFsConsumer method run.
@Override
public void run() {
DBCursor c = null;
java.util.Date fromDate = null;
QueryStrategy s = endpoint.getQueryStrategy();
boolean usesTimestamp = s != QueryStrategy.FileAttribute;
boolean persistsTimestamp = s == QueryStrategy.PersistentTimestamp || s == QueryStrategy.PersistentTimestampAndFileAttribute;
boolean usesAttribute = s == QueryStrategy.FileAttribute || s == QueryStrategy.TimeStampAndFileAttribute || s == QueryStrategy.PersistentTimestampAndFileAttribute;
DBCollection ptsCollection = null;
DBObject persistentTimestamp = null;
if (persistsTimestamp) {
ptsCollection = endpoint.getDB().getCollection(endpoint.getPersistentTSCollection());
// ensure standard indexes as long as collections are small
try {
if (ptsCollection.count() < 1000) {
ptsCollection.createIndex(new BasicDBObject("id", 1));
}
} catch (MongoException e) {
//TODO: Logging
}
persistentTimestamp = ptsCollection.findOne(new BasicDBObject("id", endpoint.getPersistentTSObject()));
if (persistentTimestamp == null) {
persistentTimestamp = new BasicDBObject("id", endpoint.getPersistentTSObject());
fromDate = new java.util.Date();
persistentTimestamp.put("timestamp", fromDate);
ptsCollection.save(persistentTimestamp);
}
fromDate = (java.util.Date) persistentTimestamp.get("timestamp");
} else if (usesTimestamp) {
fromDate = new java.util.Date();
}
try {
Thread.sleep(endpoint.getInitialDelay());
while (isStarted()) {
if (c == null || c.getCursorId() == 0) {
if (c != null) {
c.close();
}
String queryString = endpoint.getQuery();
DBObject query;
if (queryString == null) {
query = new BasicDBObject();
} else {
query = (DBObject) JSON.parse(queryString);
}
if (usesTimestamp) {
query.put("uploadDate", new BasicDBObject("$gt", fromDate));
}
if (usesAttribute) {
query.put(endpoint.getFileAttributeName(), null);
}
c = endpoint.getFilesCollection().find(query);
}
boolean dateModified = false;
while (c.hasNext() && isStarted()) {
GridFSDBFile file = (GridFSDBFile) c.next();
GridFSDBFile forig = file;
if (usesAttribute) {
file.put(endpoint.getFileAttributeName(), "processing");
DBObject q = BasicDBObjectBuilder.start("_id", file.getId()).append("camel-processed", null).get();
forig = (GridFSDBFile) endpoint.getFilesCollection().findAndModify(q, null, null, false, file, true, false);
}
if (forig != null) {
file = endpoint.getGridFs().findOne(new BasicDBObject("_id", file.getId()));
Exchange exchange = endpoint.createExchange();
exchange.getIn().setHeader(GridFsEndpoint.GRIDFS_METADATA, JSON.serialize(file.getMetaData()));
exchange.getIn().setHeader(Exchange.FILE_CONTENT_TYPE, file.getContentType());
exchange.getIn().setHeader(Exchange.FILE_LENGTH, file.getLength());
exchange.getIn().setHeader(Exchange.FILE_LAST_MODIFIED, file.getUploadDate());
exchange.getIn().setBody(file.getInputStream(), InputStream.class);
try {
getProcessor().process(exchange);
//System.out.println("Processing " + file.getFilename());
if (usesAttribute) {
forig.put(endpoint.getFileAttributeName(), "done");
endpoint.getFilesCollection().save(forig);
}
if (usesTimestamp) {
if (file.getUploadDate().compareTo(fromDate) > 0) {
fromDate = file.getUploadDate();
dateModified = true;
}
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
if (persistsTimestamp && dateModified) {
persistentTimestamp.put("timestamp", fromDate);
ptsCollection.save(persistentTimestamp);
}
Thread.sleep(endpoint.getDelay());
}
} catch (Throwable e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
if (c != null) {
c.close();
}
}
Aggregations