use of org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector in project hive by apache.
the class AvroSerializer method serializeList.
private Object serializeList(ListTypeInfo typeInfo, ListObjectInspector fieldOI, Object structFieldData, Schema schema) throws AvroSerdeException {
List<?> list = fieldOI.getList(structFieldData);
List<Object> deserialized = new GenericData.Array<Object>(list.size(), schema);
TypeInfo listElementTypeInfo = typeInfo.getListElementTypeInfo();
ObjectInspector listElementObjectInspector = fieldOI.getListElementObjectInspector();
Schema elementType = schema.getElementType();
for (int i = 0; i < list.size(); i++) {
deserialized.add(i, serialize(listElementTypeInfo, listElementObjectInspector, list.get(i), elementType));
}
return deserialized;
}
use of org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector in project mongo-hadoop by mongodb.
the class BSONSerDeTest method testList.
@Test
public void testList() throws SerDeException {
String columnNames = "a";
String columnTypes = "array<string>";
String inner = "inside";
ArrayList<String> value = new ArrayList<String>();
value.add(inner);
BasicBSONList b = new BasicBSONList();
b.add(inner);
BSONSerDe serde = new BSONSerDe();
Object result = helpDeserialize(serde, columnNames, columnTypes, b);
assertThat(value.toArray(), equalTo(result));
// Since objectid is currently taken to be a string
ObjectInspector innerInspector = PrimitiveObjectInspectorFactory.getPrimitiveObjectInspectorFromClass(String.class);
ListObjectInspector listInspector = ObjectInspectorFactory.getStandardListObjectInspector(innerInspector);
BasicBSONObject bObject = new BasicBSONObject();
Object serialized = helpSerialize(columnNames, listInspector, bObject, value, serde);
assertThat(new BSONWritable(bObject), equalTo(serialized));
}
use of org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector in project mongo-hadoop by mongodb.
the class BSONSerDe method serializeList.
private Object serializeList(final Object obj, final ListObjectInspector oi, final String ext) {
BasicBSONList list = new BasicBSONList();
List<?> field = oi.getList(obj);
if (field == null) {
return list;
}
ObjectInspector elemOI = oi.getListElementObjectInspector();
for (Object elem : field) {
list.add(serializeObject(elem, elemOI, ext));
}
return list;
}
use of org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector in project asterixdb by apache.
the class HiveRecordParser method parseUnorderedList.
private void parseUnorderedList(AUnorderedListType uoltype, Object obj, ListObjectInspector oi) throws IOException {
UnorderedListBuilder unorderedListBuilder = getUnorderedListBuilder();
IAType itemType = null;
if (uoltype != null)
itemType = uoltype.getItemType();
byte tagByte = itemType.getTypeTag().serialize();
unorderedListBuilder.reset(uoltype);
int n = oi.getListLength(obj);
for (int i = 0; i < n; i++) {
Object element = oi.getListElement(obj, i);
ObjectInspector eoi = oi.getListElementObjectInspector();
if (element == null) {
throw new RuntimeDataException(ErrorCode.PARSER_HIVE_NULL_VALUE_IN_LIST);
}
listItemBuffer.reset();
final DataOutput dataOutput = listItemBuffer.getDataOutput();
dataOutput.writeByte(tagByte);
parseItem(itemType, element, eoi, dataOutput, true);
unorderedListBuilder.addItem(listItemBuffer);
}
unorderedListBuilder.write(fieldValueBuffer.getDataOutput(), true);
}
use of org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector in project hive by apache.
the class DataWritableWriter method createWriter.
/**
* Creates a writer for the specific object inspector. The returned writer will be used
* to call Parquet API for the specific data type.
* @param inspector The object inspector used to get the correct value type.
* @param type Type that contains information about the type schema.
* @return A ParquetWriter object used to call the Parquet API fo the specific data type.
*/
private DataWriter createWriter(ObjectInspector inspector, Type type) {
if (type.isPrimitive()) {
checkInspectorCategory(inspector, ObjectInspector.Category.PRIMITIVE);
PrimitiveObjectInspector primitiveInspector = (PrimitiveObjectInspector) inspector;
switch(primitiveInspector.getPrimitiveCategory()) {
case BOOLEAN:
return new BooleanDataWriter((BooleanObjectInspector) inspector);
case BYTE:
return new ByteDataWriter((ByteObjectInspector) inspector);
case SHORT:
return new ShortDataWriter((ShortObjectInspector) inspector);
case INT:
return new IntDataWriter((IntObjectInspector) inspector);
case LONG:
return new LongDataWriter((LongObjectInspector) inspector);
case FLOAT:
return new FloatDataWriter((FloatObjectInspector) inspector);
case DOUBLE:
return new DoubleDataWriter((DoubleObjectInspector) inspector);
case STRING:
return new StringDataWriter((StringObjectInspector) inspector);
case CHAR:
return new CharDataWriter((HiveCharObjectInspector) inspector);
case VARCHAR:
return new VarcharDataWriter((HiveVarcharObjectInspector) inspector);
case BINARY:
return new BinaryDataWriter((BinaryObjectInspector) inspector);
case TIMESTAMP:
return new TimestampDataWriter((TimestampObjectInspector) inspector);
case DECIMAL:
return new DecimalDataWriter((HiveDecimalObjectInspector) inspector);
case DATE:
return new DateDataWriter((DateObjectInspector) inspector);
default:
throw new IllegalArgumentException("Unsupported primitive data type: " + primitiveInspector.getPrimitiveCategory());
}
} else {
GroupType groupType = type.asGroupType();
OriginalType originalType = type.getOriginalType();
if (originalType != null && originalType.equals(OriginalType.LIST)) {
checkInspectorCategory(inspector, ObjectInspector.Category.LIST);
return new ListDataWriter((ListObjectInspector) inspector, groupType);
} else if (originalType != null && originalType.equals(OriginalType.MAP)) {
checkInspectorCategory(inspector, ObjectInspector.Category.MAP);
return new MapDataWriter((MapObjectInspector) inspector, groupType);
} else {
checkInspectorCategory(inspector, ObjectInspector.Category.STRUCT);
return new StructDataWriter((StructObjectInspector) inspector, groupType);
}
}
}
Aggregations