use of org.apache.hadoop.io.DoubleWritable in project mongo-hadoop by mongodb.
the class BSONWritable method toBSON.
/**
* Unwrap a (usually Writable) Object, getting back a value suitable for
* putting into a BSONObject. If the given object is not Writable, then
* simply return the Object back.
*
* @param x the Object to turn into BSON.
* @return the BSON representation of the Object.
*/
@SuppressWarnings("unchecked")
public static Object toBSON(final Object x) {
if (x == null) {
return null;
}
if (x instanceof Text) {
return x.toString();
}
if (x instanceof BSONWritable) {
return ((BSONWritable) x).getDoc();
}
if (x instanceof Writable) {
if (x instanceof AbstractMapWritable) {
if (!(x instanceof Map)) {
throw new IllegalArgumentException(String.format("Cannot turn %s into BSON, since it does " + "not implement java.util.Map.", x.getClass().getName()));
}
Map<Writable, Writable> map = (Map<Writable, Writable>) x;
BasicBSONObject bson = new BasicBSONObject();
for (Map.Entry<Writable, Writable> entry : map.entrySet()) {
bson.put(entry.getKey().toString(), toBSON(entry.getValue()));
}
return bson;
}
if (x instanceof ArrayWritable) {
Writable[] o = ((ArrayWritable) x).get();
Object[] a = new Object[o.length];
for (int i = 0; i < o.length; i++) {
a[i] = toBSON(o[i]);
}
return a;
}
if (x instanceof NullWritable) {
return null;
}
if (x instanceof BooleanWritable) {
return ((BooleanWritable) x).get();
}
if (x instanceof BytesWritable) {
return ((BytesWritable) x).getBytes();
}
if (x instanceof ByteWritable) {
return ((ByteWritable) x).get();
}
if (x instanceof DoubleWritable) {
return ((DoubleWritable) x).get();
}
if (x instanceof FloatWritable) {
return ((FloatWritable) x).get();
}
if (x instanceof LongWritable) {
return ((LongWritable) x).get();
}
if (x instanceof IntWritable) {
return ((IntWritable) x).get();
}
// TODO - Support counters
}
return x;
}
use of org.apache.hadoop.io.DoubleWritable in project mongo-hadoop by mongodb.
the class TreasuryYieldUpdateReducer method reduce.
@Override
public void reduce(final IntWritable pKey, final Iterable<DoubleWritable> pValues, final Context pContext) throws IOException, InterruptedException {
int count = 0;
double sum = 0;
for (final DoubleWritable value : pValues) {
sum += value.get();
count++;
}
final double avg = sum / count;
if (LOG.isDebugEnabled()) {
LOG.debug("Average 10 Year Treasury for " + pKey.get() + " was " + avg);
}
BasicBSONObject query = new BasicBSONObject("_id", pKey.get());
BasicBSONObject modifiers = new BasicBSONObject();
modifiers.put("$set", BasicDBObjectBuilder.start().add("count", count).add("avg", avg).add("sum", sum).get());
modifiers.put("$push", new BasicBSONObject("calculatedAt", new Date()));
modifiers.put("$inc", new BasicBSONObject("numCalculations", 1));
reduceResult.setQuery(query);
reduceResult.setModifiers(modifiers);
pContext.write(null, reduceResult);
}
use of org.apache.hadoop.io.DoubleWritable in project mongo-hadoop by mongodb.
the class TreasuryYieldReducer method reduce.
@Override
public void reduce(final IntWritable pKey, final Iterable<DoubleWritable> pValues, final Context pContext) throws IOException, InterruptedException {
int count = 0;
double sum = 0;
for (final DoubleWritable value : pValues) {
sum += value.get();
count++;
}
final double avg = sum / count;
if (LOG.isDebugEnabled()) {
LOG.debug("Average 10 Year Treasury for " + pKey.get() + " was " + avg);
}
BasicBSONObject output = new BasicBSONObject();
output.put("count", count);
output.put("avg", avg);
output.put("sum", sum);
reduceResult.setDoc(output);
pContext.write(pKey, reduceResult);
}
use of org.apache.hadoop.io.DoubleWritable in project mongo-hadoop by mongodb.
the class BSONWritableTest method testToBSON.
@Test
public void testToBSON() {
assertEquals(null, toBSON(null));
assertEquals(null, toBSON(NullWritable.get()));
assertEquals("hello", toBSON(new Text("hello")));
DBObject obj = new BasicDBObject("hello", "world");
assertEquals(obj, toBSON(new BSONWritable(obj)));
final BasicBSONObject bsonResult = new BasicBSONObject("one", 1);
SortedMapWritable smw = new SortedMapWritable();
smw.put(new Text("one"), new IntWritable(1));
assertEquals(bsonResult, toBSON(smw));
MapWritable mw = new MapWritable();
mw.put(new Text("one"), new IntWritable(1));
assertEquals(bsonResult, toBSON(mw));
String[] expectedObjects = new String[] { "one", "two" };
Writable[] writableObjects = new Writable[] { new Text("one"), new Text("two") };
ArrayWritable aw = new ArrayWritable(Text.class, writableObjects);
Object[] actual = (Object[]) toBSON(aw);
assertTrue(Arrays.equals(expectedObjects, actual));
assertEquals(false, toBSON(new BooleanWritable(false)));
byte[] bytes = new byte[] { '0', '1', '2' };
assertEquals(bytes, toBSON(new BytesWritable(bytes)));
byte b = (byte) 'c';
assertEquals(b, toBSON(new ByteWritable(b)));
assertEquals(3.14159, toBSON(new DoubleWritable(3.14159)));
assertEquals(3.14159f, toBSON(new FloatWritable(3.14159f)));
assertEquals(42L, toBSON(new LongWritable(42L)));
assertEquals(42, toBSON(new IntWritable(42)));
// Catchall
assertEquals("hi", toBSON("hi"));
}
use of org.apache.hadoop.io.DoubleWritable in project hive by apache.
the class TestETypeConverter method testGetDoubleConverter.
@Test
public void testGetDoubleConverter() throws Exception {
MyConverterParent converterParent = new MyConverterParent();
PrimitiveType primitiveType = Types.optional(PrimitiveTypeName.DOUBLE).named("value");
PrimitiveConverter converter = ETypeConverter.getNewConverter(primitiveType, 1, converterParent, null);
((PrimitiveConverter) converter).addDouble(3276);
Writable writable = converterParent.getValue();
DoubleWritable doubleWritable = (DoubleWritable) writable;
assertEquals(3276, doubleWritable.get(), 0);
}
Aggregations