Search in sources :

Example 46 with DBObject

use of com.mongodb.DBObject in project mongo-hadoop by mongodb.

the class TestSharded method testDirectAccess.

public void testDirectAccess() {
    DBCollection collection = getMongos().getDB("mongo_hadoop").getCollection("yield_historical.out");
    collection.drop();
    // HADOOP61 - simulate a failed migration by having some docs from one chunk
    // also exist on another shard who does not own that chunk(duplicates)
    DB config = getMongos().getDB("config");
    DBObject chunk = config.getCollection("chunks").findOne(new BasicDBObject("shard", "sh01"));
    DBObject query = new BasicDBObject("_id", new BasicDBObject("$gte", ((DBObject) chunk.get("min")).get("_id")).append("$lt", ((DBObject) chunk.get("max")).get("_id")));
    List<DBObject> data = toList(getMongos().getDB("mongo_hadoop").getCollection("yield_historical.in").find(query));
    DBCollection destination = getShard().getDB("mongo_hadoop").getCollection("yield_historical.in");
    for (DBObject doc : data) {
        destination.insert(doc, WriteConcern.UNACKNOWLEDGED);
    }
    MapReduceJob job = new MapReduceJob(TreasuryYieldXMLConfig.class.getName()).jar(JOBJAR_PATH).param(SPLITS_SLAVE_OK, "true").param(SPLITS_USE_SHARDS, "true").param(SPLITS_USE_CHUNKS, "false").inputUris(new MongoClientURIBuilder(getInputUri()).readPreference(ReadPreference.secondary()).build());
    if (isHadoopV1()) {
        job.outputCommitter(MongoOutputCommitter.class);
    }
    job.execute(isRunTestInVm());
    compareResults(collection, getReference());
    collection.drop();
    MapReduceJob jobWithChunks = new MapReduceJob(TreasuryYieldXMLConfig.class.getName()).jar(JOBJAR_PATH).inputUris(new MongoClientURIBuilder(getInputUri()).readPreference(ReadPreference.secondary()).build()).param(SPLITS_SLAVE_OK, "true").param(SPLITS_USE_SHARDS, "true").param(SPLITS_USE_CHUNKS, "true");
    if (isHadoopV1()) {
        jobWithChunks.outputCommitter(MongoOutputCommitter.class);
    }
    jobWithChunks.execute(isRunTestInVm());
    compareResults(collection, getReference());
}
Also used : DBCollection(com.mongodb.DBCollection) BasicDBObject(com.mongodb.BasicDBObject) MongoClientURIBuilder(com.mongodb.hadoop.util.MongoClientURIBuilder) MapReduceJob(com.mongodb.hadoop.testutils.MapReduceJob) BasicDBObject(com.mongodb.BasicDBObject) DBObject(com.mongodb.DBObject) DB(com.mongodb.DB)

Example 47 with DBObject

use of com.mongodb.DBObject in project mongo-hadoop by mongodb.

the class HiveMongoInputFormat method getRecordReader.

@Override
public RecordReader<BSONWritable, BSONWritable> getRecordReader(final InputSplit split, final JobConf conf, final Reporter reporter) throws IOException {
    // split is of type 'MongoHiveInputSplit'
    MongoHiveInputSplit mhis = (MongoHiveInputSplit) split;
    // Get column name mapping.
    Map<String, String> colToMongoNames = columnMapping(conf);
    // Add projection from Hive.
    DBObject mongoProjection = getProjection(conf, colToMongoNames);
    MongoInputSplit delegate = (MongoInputSplit) mhis.getDelegate();
    if (mongoProjection != null) {
        if (LOG.isDebugEnabled()) {
            LOG.debug("Adding MongoDB projection : " + mongoProjection);
        }
        delegate.setFields(mongoProjection);
    }
    // Filter from Hive.
    DBObject filter = getFilter(conf, colToMongoNames);
    // Combine with filter from table, if there is one.
    if (conf.get(MongoConfigUtil.INPUT_QUERY) != null) {
        DBObject tableFilter = MongoConfigUtil.getQuery(conf);
        if (null == filter) {
            filter = tableFilter;
        } else {
            BasicDBList conditions = new BasicDBList();
            conditions.add(filter);
            conditions.add(tableFilter);
            // Use $and clause so we don't overwrite any of the table
            // filter.
            filter = new BasicDBObject("$and", conditions);
        }
    }
    if (filter != null) {
        if (LOG.isDebugEnabled()) {
            LOG.debug("Adding MongoDB query: " + filter);
        }
        delegate.setQuery(filter);
    }
    // return MongoRecordReader. Delegate is of type 'MongoInputSplit'
    return new MongoRecordReader(delegate);
}
Also used : BasicDBList(com.mongodb.BasicDBList) BasicDBObject(com.mongodb.BasicDBObject) MongoInputSplit(com.mongodb.hadoop.input.MongoInputSplit) MongoRecordReader(com.mongodb.hadoop.mapred.input.MongoRecordReader) DBObject(com.mongodb.DBObject) BasicDBObject(com.mongodb.BasicDBObject)

Example 48 with DBObject

use of com.mongodb.DBObject in project mongo-hadoop by mongodb.

the class TestHDFSToMongoDBWithOptions method testMongoMapping.

@Test
@SuppressWarnings("unchecked")
public void testMongoMapping() {
    DBObject doc = getCollection(MONGO_COLLECTION).findOne();
    String[] propsSplit = SERDE_PROPERTIES.split("=");
    int propsSplitLen = propsSplit.length;
    assertEquals(propsSplitLen % 2, 0);
    // now read in the 'mongo.columns.mapping' mapping
    String colsMap = null;
    for (int i = 0; i < propsSplit.length && colsMap == null; i++) {
        final String entry = propsSplit[i];
        if (entry.toLowerCase().equals("'mongo.columns.mapping'") && i - 1 < propsSplitLen) {
            colsMap = propsSplit[i + 1];
        }
    }
    assertNotNull(colsMap);
    // first remove '' around colsMap
    colsMap = colsMap.substring(1, colsMap.length() - 1);
    Set<String> docKeys = doc.keySet();
    for (String s : ((Map<String, String>) JSON.parse(colsMap)).values()) {
        assertTrue(docKeys.contains(s));
    }
}
Also used : DBObject(com.mongodb.DBObject) Map(java.util.Map) Test(org.junit.Test)

Example 49 with DBObject

use of com.mongodb.DBObject in project mongo-hadoop by mongodb.

the class HiveMongoInputFormatTest method testProjectionWithColumnMapping.

@Test
public void testProjectionWithColumnMapping() {
    DBObject mapping = new BasicDBObjectBuilder().add("i", "mongo_i").add("j", "mongo_j").add("id", "_id").get();
    String selectedColumns = "id,i";
    JobConf conf = new JobConf();
    conf.set(ColumnProjectionUtils.READ_COLUMN_NAMES_CONF_STR, selectedColumns);
    conf.set(BSONSerDe.MONGO_COLS, JSON.serialize(mapping));
    conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
    // _id field is implicitly mapped to id field in Hive.
    assertEquals(new BasicDBObjectBuilder().add("mongo_i", 1).add("_id", 1).get(), inputFormat.getProjection(conf, colNameMapping));
}
Also used : BasicDBObjectBuilder(com.mongodb.BasicDBObjectBuilder) DBObject(com.mongodb.DBObject) BasicDBObject(com.mongodb.BasicDBObject) JobConf(org.apache.hadoop.mapred.JobConf) HiveTest(com.mongodb.hadoop.hive.HiveTest) Test(org.junit.Test)

Example 50 with DBObject

use of com.mongodb.DBObject in project mongo-hadoop by mongodb.

the class MongoStorageTest method testMap.

@Test
public void testMap() throws Exception {
    MongoStorage ms = new MongoStorage();
    BasicDBObjectBuilder builder = BasicDBObjectBuilder.start();
    ResourceSchema schema = new ResourceSchema(Utils.getSchemaFromString("m:map[]"));
    Map<String, Object> val = new HashMap<String, Object>();
    val.put("f1", 1);
    val.put("f2", "2");
    ms.writeField(builder, schema.getFields()[0], val);
    DBObject out = builder.get();
    Set<String> outKeySet = out.keySet();
    assertEquals(2, outKeySet.size());
    assertEquals(1, out.get("f1"));
    assertEquals("2", out.get("f2"));
}
Also used : BasicDBObjectBuilder(com.mongodb.BasicDBObjectBuilder) ResourceSchema(org.apache.pig.ResourceSchema) HashMap(java.util.HashMap) DBObject(com.mongodb.DBObject) DBObject(com.mongodb.DBObject) Test(org.junit.Test)

Aggregations

DBObject (com.mongodb.DBObject)646 BasicDBObject (com.mongodb.BasicDBObject)445 Test (org.junit.Test)267 YearFilterPagingRequest (org.devgateway.ocds.web.rest.controller.request.YearFilterPagingRequest)95 DBCollection (com.mongodb.DBCollection)84 Aggregation (org.springframework.data.mongodb.core.aggregation.Aggregation)79 ApiOperation (io.swagger.annotations.ApiOperation)71 RequestMapping (org.springframework.web.bind.annotation.RequestMapping)70 Aggregation.newAggregation (org.springframework.data.mongodb.core.aggregation.Aggregation.newAggregation)63 CustomProjectionOperation (org.devgateway.toolkit.persistence.mongo.aggregate.CustomProjectionOperation)53 ArrayList (java.util.ArrayList)44 DBCursor (com.mongodb.DBCursor)42 HashMap (java.util.HashMap)40 List (java.util.List)35 ObjectId (org.bson.types.ObjectId)30 BasicDBList (com.mongodb.BasicDBList)29 Map (java.util.Map)28 BasicDBObjectBuilder (com.mongodb.BasicDBObjectBuilder)20 CustomGroupingOperation (org.devgateway.toolkit.persistence.mongo.aggregate.CustomGroupingOperation)20 BSONObject (org.bson.BSONObject)19