use of com.mongodb.DBCursor in project Twister by NicolasBizzozzero.
the class MessagesTools method listerMessagesToutLeMonde.
public static JSONObject listerMessagesToutLeMonde(String id_utilisateur, String recherche, String id_max, String id_min, String limite) throws UnknownHostException, InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException {
// On se connecte a la BDD puis on recupere les messages
DBCollection messages = getCollectionMessages();
// On recupere les id des amis de l'utilisateur
ArrayList<String> id_amis = bd.tools.AmitiesTools.getAmisEnArrayList(id_utilisateur);
id_amis.add(id_utilisateur);
// Creation de la requete
BasicDBObject requete = new BasicDBObject();
ArrayList<BasicDBObject> listeAnd = new ArrayList<BasicDBObject>();
if (!id_min.equals("-1")) {
listeAnd.add(new BasicDBObject(Noms.CHAMP_ID_MESSAGE, new BasicDBObject("$gt", Integer.parseInt(id_min))));
}
if (!id_max.equals("-1")) {
listeAnd.add(new BasicDBObject(Noms.CHAMP_ID_MESSAGE, new BasicDBObject("$lt", Integer.parseInt(id_max))));
}
if (listeAnd.size() != 0) {
requete.put("$and", listeAnd);
}
requete.put(String.format("%s.%s", Noms.CHAMP_AUTEUR, Noms.CHAMP_ID_AUTEUR), new BasicDBObject("$in", id_amis));
System.out.println(requete.toString());
// On itere sur les resultats
JSONObject reponse = new JSONObject();
reponse.put(Noms.CHAMP_MESSAGES, new JSONArray());
DBCursor curseur = messages.find(requete).sort(new BasicDBObject(Noms.CHAMP_ID_MESSAGE, -1)).limit(Integer.parseInt(limite));
while (curseur.hasNext()) {
reponse.accumulate(Noms.CHAMP_MESSAGES, curseur.next());
}
return reponse;
}
use of com.mongodb.DBCursor in project jackrabbit-oak by apache.
the class MongoDbTest method manyChildNodes.
@Test
@Ignore
public void manyChildNodes() {
DB db = MongoUtils.getConnection().getDB();
MongoUtils.dropCollections(db);
DBCollection nodes = db.getCollection(Collection.NODES.toString());
DBObject index = new BasicDBObject();
// modification time (descending)
index.put("_mod", -1L);
// and then id (ascending)
index.put("_id", 1L);
DBObject options = new BasicDBObject();
// options.put("unique", Boolean.TRUE);
nodes.createIndex(index, options);
// index on (_id, _mod):
// Query plan: { "cursor" : "BtreeCursor _id_1__mod_-1" ,
// "isMultiKey" : false , "n" : 2000 , "nscannedObjects" : 2000 ,
// "nscanned" : 954647 , "nscannedObjectsAllPlans" : 1907080 ,
// "nscannedAllPlans" : 2859727 , "scanAndOrder" : false ,
// "indexOnly" : true , "nYields" : 5 , "nChunkSkips" : 0 ,
// "millis" : 5112 ,...
// Time: 2229 ms
// Count: 2000
// index on (_mod, _id)
// Query plan: { "cursor" : "BtreeCursor _mod_-1__id_1" ,
// "isMultiKey" : false , "n" : 2000 , "nscannedObjects" : 2000 ,
// "nscanned" : 2000 , "nscannedObjectsAllPlans" : 2203 ,
// "nscannedAllPlans" : 2203 , "scanAndOrder" : false ,
// "indexOnly" : true , "nYields" : 0 , "nChunkSkips" : 0 ,
// "millis" : 3 ,...
// Time: 43 ms
// Count: 2000
int children = 1000000;
int perInsert = 1000;
int group = 0;
String parent = "/parent/node/abc";
for (int i = 0; i < children; ) {
DBObject[] inserts = new DBObject[perInsert];
group++;
for (int j = 0; j < perInsert; j++, i++) {
BasicDBObject doc = new BasicDBObject();
inserts[j] = doc;
doc.put("_id", parent + "/node" + i);
doc.put("_mod", group);
}
nodes.insert(inserts, WriteConcern.SAFE);
log("inserted " + i + "/" + children);
}
QueryBuilder queryBuilder = QueryBuilder.start("_mod");
queryBuilder.greaterThanEquals(group - 1);
queryBuilder.and("_id").greaterThan(parent + "/");
queryBuilder.and("_id").lessThanEquals(parent + "0");
DBObject query = queryBuilder.get();
BasicDBObject keys = new BasicDBObject();
keys.put("_id", 1);
DBCursor cursor = nodes.find(query, keys);
int count = 0;
log("Query plan: " + cursor.explain());
long time = System.currentTimeMillis();
while (cursor.hasNext()) {
DBObject obj = cursor.next();
// dummy read operation (to ensure we did get the data)
obj.get("_id");
count++;
// log(" read " + obj);
}
time = System.currentTimeMillis() - time;
log("Time: " + time + " ms");
log("Count: " + count);
db.getMongo().close();
}
use of com.mongodb.DBCursor in project teiid by teiid.
the class MongoDBMetadataProcessor method process.
@Override
public void process(MetadataFactory metadataFactory, MongoDBConnection connection) throws TranslatorException {
DB db = connection.getDatabase();
for (String tableName : db.getCollectionNames()) {
if (getExcludeTables() != null && shouldExclude(tableName)) {
continue;
}
if (getIncludeTables() != null && !shouldInclude(tableName)) {
continue;
}
try {
DBCollection collection = db.getCollection(tableName);
DBCursor cursor = collection.find();
while (cursor.hasNext()) {
BasicDBObject row = (BasicDBObject) cursor.next();
if (row == null) {
continue;
}
Table table = addTable(metadataFactory, tableName, row, null);
if (table != null) {
// top level documents can not be seen as merged
table.setProperty(TOP_LEVEL_DOC, String.valueOf(Boolean.TRUE));
}
if (cursor.numSeen() >= sampleSize) {
break;
}
}
cursor.close();
} catch (MongoException e) {
LogManager.logWarning(LogConstants.CTX_CONNECTOR, MongoDBPlugin.Util.gs(MongoDBPlugin.Event.TEIID18037, e));
}
}
for (Table table : metadataFactory.getSchema().getTables().values()) {
String merge = table.getProperty(MERGE, false);
if (merge != null) {
addForeignKey(metadataFactory, table, metadataFactory.getSchema().getTable(merge));
}
}
for (Table table : metadataFactory.getSchema().getTables().values()) {
String top = table.getProperty(TOP_LEVEL_DOC, false);
String merge = table.getProperty(MERGE, false);
if (top != null) {
table.setProperty(TOP_LEVEL_DOC, null);
if (merge != null) {
table.setProperty(MERGE, null);
// $NON-NLS-1$
table.setProperty(EMBEDDABLE, "true");
}
}
}
}
use of com.mongodb.DBCursor in project teiid by teiid.
the class TestMongoDBMetadataProcessor method processExampleMetadata.
private MetadataFactory processExampleMetadata(MongoDBMetadataProcessor mp) throws TranslatorException {
MetadataFactory mf = new MetadataFactory("vdb", 1, "mongodb", SystemMetadata.getInstance().getRuntimeTypeMap(), new Properties(), null);
MongoDBConnection conn = Mockito.mock(MongoDBConnection.class);
DBCollection tableDBCollection = Mockito.mock(DBCollection.class);
DBCollection embeddedDBCollection = Mockito.mock(DBCollection.class);
DBCollection emptyDBCollection = Mockito.mock(DBCollection.class);
DBCollection emptyFirstDBCollection = Mockito.mock(DBCollection.class);
LinkedHashSet<String> tables = new LinkedHashSet<String>();
tables.add("table");
tables.add("embedded");
tables.add("empty");
tables.add("emptyFirst");
DB db = Mockito.mock(DB.class);
BasicDBList array = new BasicDBList();
array.add("one");
array.add("two");
BasicDBObject row = new BasicDBObject();
row.append("_id", new Integer(1));
row.append("col2", new Double(2.0));
row.append("col3", new Long(3L));
row.append("col5", Boolean.TRUE);
row.append("col6", new Date(0L));
row.append("col6", new DBRef(db.getName(), "ns", "one"));
row.append("col7", array);
row.append("col8", new Binary("binary".getBytes()));
BasicDBObject child = new BasicDBObject();
child.append("col1", "one");
child.append("col2", "two");
row.append("child", child);
BasicDBObject emptyFirstRow = new BasicDBObject();
emptyFirstRow.append("_id", new ObjectId("5835a598944716c40d2f26ae"));
emptyFirstRow.append("col2", new Double(2.0));
emptyFirstRow.append("col3", new Long(3L));
BasicDBObject embedded = new BasicDBObject();
embedded.append("col1", 1);
embedded.append("col2", new byte[0]);
row.append("embedded", embedded);
Mockito.stub(db.getCollectionNames()).toReturn(tables);
Mockito.stub(db.getCollection(Mockito.eq("table"))).toReturn(tableDBCollection);
Mockito.stub(db.getCollection(Mockito.eq("embedded"))).toReturn(embeddedDBCollection);
Mockito.stub(db.getCollection(Mockito.eq("empty"))).toReturn(emptyDBCollection);
Mockito.stub(db.getCollection(Mockito.eq("emptyFirst"))).toReturn(emptyFirstDBCollection);
BasicDBObject nextRow = new BasicDBObject();
nextRow.append("_id", new Integer(2));
nextRow.append("col2", new Double(3.0));
nextRow.append("col3", "A");
nextRow.append("col5", Boolean.TRUE);
nextRow.append("col9", "another");
DBCursor tableCursor = Mockito.mock(DBCursor.class);
Mockito.when(tableCursor.numSeen()).thenReturn(1).thenReturn(2);
Mockito.when(tableCursor.hasNext()).thenReturn(true).thenReturn(true).thenReturn(false);
Mockito.when(tableCursor.next()).thenReturn(row).thenReturn(nextRow);
Mockito.when(tableDBCollection.find()).thenReturn(tableCursor);
DBCursor embeddedCursor = Mockito.mock(DBCursor.class);
Mockito.when(embeddedCursor.hasNext()).thenReturn(true).thenReturn(false);
Mockito.when(embeddedCursor.next()).thenReturn(child);
Mockito.when(embeddedDBCollection.find()).thenReturn(embeddedCursor);
DBCursor emptyFirstCursor = Mockito.mock(DBCursor.class);
Mockito.when(emptyFirstCursor.hasNext()).thenReturn(true).thenReturn(true).thenReturn(false);
Mockito.when(emptyFirstCursor.next()).thenReturn(null).thenReturn(emptyFirstRow);
Mockito.when(emptyFirstDBCollection.find()).thenReturn(emptyFirstCursor);
DBCursor emptyCursor = Mockito.mock(DBCursor.class);
Mockito.when(emptyCursor.hasNext()).thenReturn(true).thenReturn(false);
Mockito.when(emptyCursor.next()).thenReturn(null);
Mockito.when(emptyDBCollection.find()).thenReturn(emptyCursor);
Mockito.stub(conn.getDatabase()).toReturn(db);
mp.process(mf, conn);
return mf;
}
use of com.mongodb.DBCursor in project xDrip-plus by jamorham.
the class MongoWrapper method ReadFromMongo.
// records will be marked by their timestamp
public List<TransmitterRawData> ReadFromMongo(int numberOfRecords) {
System.out.println("Starting to read from mongodb");
List<TransmitterRawData> trd_list = new LinkedList<TransmitterRawData>();
DBCollection coll;
TransmitterRawData lastTrd = null;
try {
coll = openMongoDb();
BasicDBObject query = new BasicDBObject("RawValue", new BasicDBObject("$exists", true));
DBCursor cursor = coll.find(query);
cursor.sort(new BasicDBObject("CaptureDateTime", -1));
try {
while (cursor.hasNext() && trd_list.size() < numberOfRecords) {
// System.out.println(cursor.next());
Log.d(TAG, "Read an object from mongodb");
TransmitterRawData trd = new TransmitterRawData((BasicDBObject) cursor.next());
// Do our best to fix the relative time...
trd.RelativeTime = new Date().getTime() - trd.CaptureDateTime;
// since we are reading it from the db, it was uploaded...
trd.Uploaded = 1;
if (lastTrd == null) {
trd_list.add(0, trd);
lastTrd = trd;
System.out.println(trd.toTableString());
} else if (!WixelReader.almostEquals(lastTrd, trd)) {
lastTrd = trd;
trd_list.add(0, trd);
System.out.println(trd.toTableString());
}
}
} finally {
cursor.close();
}
} catch (UnknownHostException e) {
Log.e(TAG, "ReadFromMongo: caught UnknownHostException! ", e);
return null;
} catch (MongoException e) {
Log.e(TAG, "ReadFromMongo: caught MongoException! ", e);
return trd_list;
} catch (Exception e) {
Log.e(TAG, "ReadFromMongo: caught Exception! ", e);
closeMongoDb();
return null;
} finally {
closeMongoDb();
}
return trd_list;
}
Aggregations