use of org.apache.hadoop.io.MapWritable in project hive by apache.
the class JdbcSerDe method deserialize.
@Override
public Object deserialize(Writable blob) throws SerDeException {
LOGGER.debug("Deserializing from SerDe");
if (!(blob instanceof MapWritable)) {
throw new SerDeException("Expected MapWritable. Got " + blob.getClass().getName());
}
if ((row == null) || (columnNames == null)) {
throw new SerDeException("JDBC SerDe hasn't been initialized properly");
}
row.clear();
MapWritable input = (MapWritable) blob;
Text columnKey = new Text();
for (int i = 0; i < numColumns; i++) {
columnKey.set(columnNames.get(i));
Writable value = input.get(columnKey);
if (value == null) {
row.add(null);
} else {
row.add(value.toString());
}
}
return row;
}
use of org.apache.hadoop.io.MapWritable in project wonderdog by infochimps-labs.
the class ElasticSearchIndex method putNext.
/**
Map a tuple object into a map-writable object for elasticsearch.
*/
@SuppressWarnings("unchecked")
@Override
public void putNext(Tuple t) throws IOException {
UDFContext context = UDFContext.getUDFContext();
Properties property = context.getUDFProperties(ResourceSchema.class);
MapWritable record = new MapWritable();
String[] fieldNames = property.getProperty(PIG_ES_FIELD_NAMES).split(COMMA);
for (int i = 0; i < t.size(); i++) {
if (i < fieldNames.length) {
try {
record.put(new Text(fieldNames[i]), new Text(t.get(i).toString()));
} catch (NullPointerException e) {
//LOG.info("Increment null field counter.");
}
}
}
try {
writer.write(NullWritable.get(), record);
} catch (InterruptedException e) {
throw new IOException(e);
}
}
use of org.apache.hadoop.io.MapWritable in project mongo-hadoop by mongodb.
the class BSONWritableTest method testToBSON.
@Test
public void testToBSON() {
assertEquals(null, toBSON(null));
assertEquals(null, toBSON(NullWritable.get()));
assertEquals("hello", toBSON(new Text("hello")));
DBObject obj = new BasicDBObject("hello", "world");
assertEquals(obj, toBSON(new BSONWritable(obj)));
final BasicBSONObject bsonResult = new BasicBSONObject("one", 1);
SortedMapWritable smw = new SortedMapWritable();
smw.put(new Text("one"), new IntWritable(1));
assertEquals(bsonResult, toBSON(smw));
MapWritable mw = new MapWritable();
mw.put(new Text("one"), new IntWritable(1));
assertEquals(bsonResult, toBSON(mw));
String[] expectedObjects = new String[] { "one", "two" };
Writable[] writableObjects = new Writable[] { new Text("one"), new Text("two") };
ArrayWritable aw = new ArrayWritable(Text.class, writableObjects);
Object[] actual = (Object[]) toBSON(aw);
assertTrue(Arrays.equals(expectedObjects, actual));
assertEquals(false, toBSON(new BooleanWritable(false)));
byte[] bytes = new byte[] { '0', '1', '2' };
assertEquals(bytes, toBSON(new BytesWritable(bytes)));
byte b = (byte) 'c';
assertEquals(b, toBSON(new ByteWritable(b)));
assertEquals(3.14159, toBSON(new DoubleWritable(3.14159)));
assertEquals(3.14159f, toBSON(new FloatWritable(3.14159f)));
assertEquals(42L, toBSON(new LongWritable(42L)));
assertEquals(42, toBSON(new IntWritable(42)));
// Catchall
assertEquals("hi", toBSON("hi"));
}
use of org.apache.hadoop.io.MapWritable in project gora by apache.
the class WritableUtils method writeProperties.
public static final void writeProperties(DataOutput out, Properties props) throws IOException {
MapWritable propsWritable = new MapWritable();
for (Entry<Object, Object> prop : props.entrySet()) {
Writable key = new Text(prop.getKey().toString());
Writable value = new Text(prop.getValue().toString());
propsWritable.put(key, value);
}
propsWritable.write(out);
}
Aggregations