use of com.mongodb.hadoop.mapred.input.MongoRecordReader in project mongo-hadoop by mongodb.
the class HiveMongoInputFormat method getRecordReader.
@Override
public RecordReader<BSONWritable, BSONWritable> getRecordReader(final InputSplit split, final JobConf conf, final Reporter reporter) throws IOException {
// split is of type 'MongoHiveInputSplit'
MongoHiveInputSplit mhis = (MongoHiveInputSplit) split;
// Get column name mapping.
Map<String, String> colToMongoNames = columnMapping(conf);
// Add projection from Hive.
DBObject mongoProjection = getProjection(conf, colToMongoNames);
MongoInputSplit delegate = (MongoInputSplit) mhis.getDelegate();
if (mongoProjection != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Adding MongoDB projection : " + mongoProjection);
}
delegate.setFields(mongoProjection);
}
// Filter from Hive.
DBObject filter = getFilter(conf, colToMongoNames);
// Combine with filter from table, if there is one.
if (conf.get(MongoConfigUtil.INPUT_QUERY) != null) {
DBObject tableFilter = MongoConfigUtil.getQuery(conf);
if (null == filter) {
filter = tableFilter;
} else {
BasicDBList conditions = new BasicDBList();
conditions.add(filter);
conditions.add(tableFilter);
// Use $and clause so we don't overwrite any of the table
// filter.
filter = new BasicDBObject("$and", conditions);
}
}
if (filter != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Adding MongoDB query: " + filter);
}
delegate.setQuery(filter);
}
// return MongoRecordReader. Delegate is of type 'MongoInputSplit'
return new MongoRecordReader(delegate);
}
Aggregations