use of org.apache.hadoop.hive.serde2.lazy.LazyMap in project cdap by caskdata.
the class ObjectSerializer method fromLazyObject.
private Object fromLazyObject(TypeInfo type, Object data) {
if (data == null) {
return null;
}
switch(type.getCategory()) {
case PRIMITIVE:
Writable writable = ((LazyPrimitive) data).getWritableObject();
return fromWritable(writable);
case LIST:
ListTypeInfo listType = (ListTypeInfo) type;
TypeInfo listElementType = listType.getListElementTypeInfo();
List<Object> list = ((LazyArray) data).getList();
if (list.isEmpty()) {
return ImmutableList.of();
}
Object[] arrayContent = new Object[list.size()];
for (int i = 0; i < arrayContent.length; i++) {
arrayContent[i] = fromLazyObject(listElementType, list.get(i));
}
return arrayContent;
case MAP:
MapTypeInfo mapType = (MapTypeInfo) type;
Map<Object, Object> mapContent = Maps.newConcurrentMap();
Map<Object, Object> map = ((LazyMap) data).getMap();
for (Map.Entry<Object, Object> entry : map.entrySet()) {
mapContent.put(fromLazyObject(mapType.getMapKeyTypeInfo(), entry.getKey()), fromLazyObject(mapType.getMapValueTypeInfo(), entry.getValue()));
}
return mapContent;
case STRUCT:
StructTypeInfo structType = (StructTypeInfo) type;
List<TypeInfo> info = structType.getAllStructFieldTypeInfos();
List<String> names = structType.getAllStructFieldNames();
Map<String, Object> structMap = Maps.newConcurrentMap();
List<Object> struct = ((LazyStruct) data).getFieldsAsList();
for (int structIndex = 0; structIndex < info.size(); structIndex++) {
structMap.put(names.get(structIndex), fromLazyObject(info.get(structIndex), struct.get(structIndex)));
}
return structMap;
case UNION:
throw new UnsupportedOperationException("union not yet supported");
default:
return data.toString();
}
}
Aggregations