Search in sources :

Example 21 with DBCollection

use of com.mongodb.DBCollection in project mongo-hadoop by mongodb.

the class TestStreaming method testBasicStreamingJob.

@Test
@Ignore
public void testBasicStreamingJob() {
    Map<String, String> params = new TreeMap<String, String>();
    params.put(MongoConfigUtil.INPUT_QUERY, "{_id:{$gt:{$date:883440000000}}}");
    new StreamingJob().params(params).inputUris(getInputUri()).outputUris(getOutputUri()).execute();
    DBCollection collection = getClient(getInputUri()).getDB("mongo_hadoop").getCollection("yield_historical.out");
    assertEquals(14, collection.count());
}
Also used : DBCollection(com.mongodb.DBCollection) TreeMap(java.util.TreeMap) Ignore(org.junit.Ignore) Test(org.junit.Test)

Example 22 with DBCollection

use of com.mongodb.DBCollection in project mongo-hadoop by mongodb.

the class TestSharded method testRangeQueries.

@Test
public void testRangeQueries() {
    DBCollection collection = getMongos().getDB(getOutputUri().getDatabase()).getCollection(getOutputUri().getCollection());
    collection.drop();
    MapReduceJob job = new MapReduceJob(TreasuryYieldXMLConfig.class.getName()).jar(JOBJAR_PATH).inputUris(getInputUri()).outputUri(getOutputUri()).param(SPLITS_USE_RANGEQUERY, "true");
    if (isHadoopV1()) {
        job.outputCommitter(MongoOutputCommitter.class);
    }
    job.execute(isRunTestInVm());
    compareResults(collection, getReference());
    collection.drop();
    job.param(INPUT_QUERY, "{\"_id\":{\"$gt\":{\"$date\":1182470400000}}}").execute(isRunTestInVm());
    // Make sure that this fails when rangequery is used with a query that conflicts
    assertFalse("This collection shouldn't exist because of the failure", getMongos().getDB("mongo_hadoop").getCollectionNames().contains("yield_historical.out"));
}
Also used : DBCollection(com.mongodb.DBCollection) MapReduceJob(com.mongodb.hadoop.testutils.MapReduceJob) TreasuryYieldXMLConfig(com.mongodb.hadoop.examples.treasury.TreasuryYieldXMLConfig) Test(org.junit.Test)

Example 23 with DBCollection

use of com.mongodb.DBCollection in project mongo-hadoop by mongodb.

the class TestSharded method testDirectAccess.

public void testDirectAccess() {
    DBCollection collection = getMongos().getDB("mongo_hadoop").getCollection("yield_historical.out");
    collection.drop();
    // HADOOP61 - simulate a failed migration by having some docs from one chunk
    // also exist on another shard who does not own that chunk(duplicates)
    DB config = getMongos().getDB("config");
    DBObject chunk = config.getCollection("chunks").findOne(new BasicDBObject("shard", "sh01"));
    DBObject query = new BasicDBObject("_id", new BasicDBObject("$gte", ((DBObject) chunk.get("min")).get("_id")).append("$lt", ((DBObject) chunk.get("max")).get("_id")));
    List<DBObject> data = toList(getMongos().getDB("mongo_hadoop").getCollection("yield_historical.in").find(query));
    DBCollection destination = getShard().getDB("mongo_hadoop").getCollection("yield_historical.in");
    for (DBObject doc : data) {
        destination.insert(doc, WriteConcern.UNACKNOWLEDGED);
    }
    MapReduceJob job = new MapReduceJob(TreasuryYieldXMLConfig.class.getName()).jar(JOBJAR_PATH).param(SPLITS_SLAVE_OK, "true").param(SPLITS_USE_SHARDS, "true").param(SPLITS_USE_CHUNKS, "false").inputUris(new MongoClientURIBuilder(getInputUri()).readPreference(ReadPreference.secondary()).build());
    if (isHadoopV1()) {
        job.outputCommitter(MongoOutputCommitter.class);
    }
    job.execute(isRunTestInVm());
    compareResults(collection, getReference());
    collection.drop();
    MapReduceJob jobWithChunks = new MapReduceJob(TreasuryYieldXMLConfig.class.getName()).jar(JOBJAR_PATH).inputUris(new MongoClientURIBuilder(getInputUri()).readPreference(ReadPreference.secondary()).build()).param(SPLITS_SLAVE_OK, "true").param(SPLITS_USE_SHARDS, "true").param(SPLITS_USE_CHUNKS, "true");
    if (isHadoopV1()) {
        jobWithChunks.outputCommitter(MongoOutputCommitter.class);
    }
    jobWithChunks.execute(isRunTestInVm());
    compareResults(collection, getReference());
}
Also used : DBCollection(com.mongodb.DBCollection) BasicDBObject(com.mongodb.BasicDBObject) MongoClientURIBuilder(com.mongodb.hadoop.util.MongoClientURIBuilder) MapReduceJob(com.mongodb.hadoop.testutils.MapReduceJob) BasicDBObject(com.mongodb.BasicDBObject) DBObject(com.mongodb.DBObject) DB(com.mongodb.DB)

Example 24 with DBCollection

use of com.mongodb.DBCollection in project mongo-hadoop by mongodb.

the class HiveMappingTest method queryBasedHiveTable.

@Test
public void queryBasedHiveTable() throws SQLException {
    String tableName = "filtered";
    DBCollection collection = getCollection(tableName);
    collection.drop();
    dropTable(tableName);
    int size = 1000;
    for (int i = 0; i < size; i++) {
        collection.insert(new BasicDBObject("_id", i).append("intField", i % 10).append("booleanField", i % 2 == 0).append("stringField", "" + (i % 2 == 0)));
    }
    MongoClientURI uri = authCheck(new MongoClientURIBuilder().collection("mongo_hadoop", collection.getName())).build();
    ColumnMapping map = new ColumnMapping().map("id", "_id", "INT").map("ints", "intField", "INT").map("booleans", "booleanField", "BOOLEAN").map("strings", "stringField", "STRING");
    HiveTableBuilder builder = new HiveTableBuilder().mapping(map).name(tableName).uri(uri).tableProperty(MongoConfigUtil.INPUT_QUERY, "{_id : {\"$gte\" : 900 }}");
    execute(builder.toString());
    assertEquals(format("Should find %d items", size), collection.count(), size);
    Results execute = query(format("SELECT * from %s where id=1", tableName));
    assertTrue(execute.size() == 0);
    int expected = size - 900;
    assertEquals(format("Should find only %d items", expected), query("SELECT count(*) as count from " + tableName).iterator().next().get(0), "" + expected);
}
Also used : DBCollection(com.mongodb.DBCollection) BasicDBObject(com.mongodb.BasicDBObject) MongoClientURIBuilder(com.mongodb.hadoop.util.MongoClientURIBuilder) MongoClientURI(com.mongodb.MongoClientURI) Test(org.junit.Test)

Example 25 with DBCollection

use of com.mongodb.DBCollection in project mongo-hadoop by mongodb.

the class HiveMappingTest method nestedObjects.

@Test
public void nestedObjects() throws SQLException {
    DBCollection collection = getCollection("hive_addresses");
    collection.drop();
    dropTable("hive_addresses");
    collection.insert(user(1, "Jim", "Beam", "Clermont", "KY"));
    collection.insert(user(2, "Don", "Draper", "New York", "NY"));
    collection.insert(user(3, "John", "Elway", "Denver", "CO"));
    MongoClientURI uri = authCheck(new MongoClientURIBuilder().collection("mongo_hadoop", collection.getName())).build();
    ColumnMapping map = new ColumnMapping().map("id", "_id", "INT").map("firstName", "firstName", "STRING").map("lastName", "lastName", "STRING").map("city", "address.city", "STRING").map("state", "address.state", "STRING");
    //, lastName STRING
    execute(format("CREATE TABLE hive_addresses (id INT, firstName STRING, lastName STRING, city STRING, state STRING)\n" + "STORED BY '%s'\n" + "WITH SERDEPROPERTIES('mongo.columns.mapping'='%s')\n" + "TBLPROPERTIES ('mongo.uri'='%s')", MongoStorageHandler.class.getName(), map.toSerDePropertiesString(), uri));
    Results execute = query("SELECT * from hive_addresses");
    assertEquals("KY", execute.getRow(0).get("state"));
    assertEquals("Don", execute.getRow(1).get("firstname"));
    assertEquals("Denver", execute.getRow(2).get("city"));
}
Also used : DBCollection(com.mongodb.DBCollection) MongoClientURIBuilder(com.mongodb.hadoop.util.MongoClientURIBuilder) MongoClientURI(com.mongodb.MongoClientURI) Test(org.junit.Test)

Aggregations

DBCollection (com.mongodb.DBCollection)165 DBObject (com.mongodb.DBObject)90 BasicDBObject (com.mongodb.BasicDBObject)86 Test (org.junit.Test)69 UsingDataSet (com.lordofthejars.nosqlunit.annotation.UsingDataSet)29 DBCursor (com.mongodb.DBCursor)23 MongoException (com.mongodb.MongoException)22 DB (com.mongodb.DB)20 BasicDBObjectBuilder (com.mongodb.BasicDBObjectBuilder)17 FailedDBOperationException (edu.umass.cs.gnscommon.exceptions.server.FailedDBOperationException)12 JSONObject (org.json.JSONObject)12 MongoClientURI (com.mongodb.MongoClientURI)11 QueryBuilder (com.mongodb.QueryBuilder)10 List (java.util.List)10 Map (java.util.Map)10 Stopwatch (com.google.common.base.Stopwatch)9 WriteResult (com.mongodb.WriteResult)9 HashMap (java.util.HashMap)9 IOException (java.io.IOException)8 ArrayList (java.util.ArrayList)8