use of org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector in project hive by apache.
the class MetaStoreUtils method getFieldsFromDeserializer.
/**
* @param tableName
* @param deserializer
* @return the list of fields
* @throws SerDeException
* @throws MetaException
*/
public static List<FieldSchema> getFieldsFromDeserializer(String tableName, Deserializer deserializer) throws SerDeException, MetaException {
ObjectInspector oi = deserializer.getObjectInspector();
String[] names = tableName.split("\\.");
String last_name = names[names.length - 1];
for (int i = 1; i < names.length; i++) {
if (oi instanceof StructObjectInspector) {
StructObjectInspector soi = (StructObjectInspector) oi;
StructField sf = soi.getStructFieldRef(names[i]);
if (sf == null) {
throw new MetaException("Invalid Field " + names[i]);
} else {
oi = sf.getFieldObjectInspector();
}
} else if (oi instanceof ListObjectInspector && names[i].equalsIgnoreCase("$elem$")) {
ListObjectInspector loi = (ListObjectInspector) oi;
oi = loi.getListElementObjectInspector();
} else if (oi instanceof MapObjectInspector && names[i].equalsIgnoreCase("$key$")) {
MapObjectInspector moi = (MapObjectInspector) oi;
oi = moi.getMapKeyObjectInspector();
} else if (oi instanceof MapObjectInspector && names[i].equalsIgnoreCase("$value$")) {
MapObjectInspector moi = (MapObjectInspector) oi;
oi = moi.getMapValueObjectInspector();
} else {
throw new MetaException("Unknown type for " + names[i]);
}
}
ArrayList<FieldSchema> str_fields = new ArrayList<FieldSchema>();
// rules on how to recurse the ObjectInspector based on its type
if (oi.getCategory() != Category.STRUCT) {
str_fields.add(new FieldSchema(last_name, oi.getTypeName(), FROM_SERIALIZER));
} else {
List<? extends StructField> fields = ((StructObjectInspector) oi).getAllStructFieldRefs();
for (int i = 0; i < fields.size(); i++) {
StructField structField = fields.get(i);
String fieldName = structField.getFieldName();
String fieldTypeName = structField.getFieldObjectInspector().getTypeName();
String fieldComment = determineFieldComment(structField.getFieldComment());
str_fields.add(new FieldSchema(fieldName, fieldTypeName, fieldComment));
}
}
return str_fields;
}
use of org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector in project hive by apache.
the class MultiDelimitSerDe method serializeNoEncode.
// This is basically the same as LazySimpleSerDe.serialize. Except that we don't use
// Base64 to encode binary data because we're using printable string as delimiter.
// Consider such a row "strAQ==\1", str is a string, AQ== is the delimiter and \1
// is the binary data.
private static void serializeNoEncode(ByteStream.Output out, Object obj, ObjectInspector objInspector, byte[] separators, int level, Text nullSequence, boolean escaped, byte escapeChar, boolean[] needsEscape) throws IOException, SerDeException {
if (obj == null) {
out.write(nullSequence.getBytes(), 0, nullSequence.getLength());
return;
}
char separator;
List<?> list;
switch(objInspector.getCategory()) {
case PRIMITIVE:
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) objInspector;
if (oi.getPrimitiveCategory() == PrimitiveCategory.BINARY) {
BytesWritable bw = ((BinaryObjectInspector) oi).getPrimitiveWritableObject(obj);
byte[] toWrite = new byte[bw.getLength()];
System.arraycopy(bw.getBytes(), 0, toWrite, 0, bw.getLength());
out.write(toWrite, 0, toWrite.length);
} else {
LazyUtils.writePrimitiveUTF8(out, obj, oi, escaped, escapeChar, needsEscape);
}
return;
case LIST:
separator = (char) separators[level];
ListObjectInspector loi = (ListObjectInspector) objInspector;
list = loi.getList(obj);
ObjectInspector eoi = loi.getListElementObjectInspector();
if (list == null) {
out.write(nullSequence.getBytes(), 0, nullSequence.getLength());
} else {
for (int i = 0; i < list.size(); i++) {
if (i > 0) {
out.write(separator);
}
serializeNoEncode(out, list.get(i), eoi, separators, level + 1, nullSequence, escaped, escapeChar, needsEscape);
}
}
return;
case MAP:
separator = (char) separators[level];
char keyValueSeparator = (char) separators[level + 1];
MapObjectInspector moi = (MapObjectInspector) objInspector;
ObjectInspector koi = moi.getMapKeyObjectInspector();
ObjectInspector voi = moi.getMapValueObjectInspector();
Map<?, ?> map = moi.getMap(obj);
if (map == null) {
out.write(nullSequence.getBytes(), 0, nullSequence.getLength());
} else {
boolean first = true;
for (Map.Entry<?, ?> entry : map.entrySet()) {
if (first) {
first = false;
} else {
out.write(separator);
}
serializeNoEncode(out, entry.getKey(), koi, separators, level + 2, nullSequence, escaped, escapeChar, needsEscape);
out.write(keyValueSeparator);
serializeNoEncode(out, entry.getValue(), voi, separators, level + 2, nullSequence, escaped, escapeChar, needsEscape);
}
}
return;
case STRUCT:
separator = (char) separators[level];
StructObjectInspector soi = (StructObjectInspector) objInspector;
List<? extends StructField> fields = soi.getAllStructFieldRefs();
list = soi.getStructFieldsDataAsList(obj);
if (list == null) {
out.write(nullSequence.getBytes(), 0, nullSequence.getLength());
} else {
for (int i = 0; i < list.size(); i++) {
if (i > 0) {
out.write(separator);
}
serializeNoEncode(out, list.get(i), fields.get(i).getFieldObjectInspector(), separators, level + 1, nullSequence, escaped, escapeChar, needsEscape);
}
}
return;
}
throw new RuntimeException("Unknown category type: " + objInspector.getCategory());
}
use of org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector in project druid by druid-io.
the class OrcHadoopInputRowParser method parse.
@Override
public InputRow parse(OrcStruct input) {
Map<String, Object> map = Maps.newHashMap();
List<? extends StructField> fields = oip.getAllStructFieldRefs();
for (StructField field : fields) {
ObjectInspector objectInspector = field.getFieldObjectInspector();
switch(objectInspector.getCategory()) {
case PRIMITIVE:
PrimitiveObjectInspector primitiveObjectInspector = (PrimitiveObjectInspector) objectInspector;
map.put(field.getFieldName(), primitiveObjectInspector.getPrimitiveJavaObject(oip.getStructFieldData(input, field)));
break;
case // array case - only 1-depth array supported yet
LIST:
ListObjectInspector listObjectInspector = (ListObjectInspector) objectInspector;
map.put(field.getFieldName(), getListObject(listObjectInspector, oip.getStructFieldData(input, field)));
break;
default:
break;
}
}
TimestampSpec timestampSpec = parseSpec.getTimestampSpec();
DateTime dateTime = timestampSpec.extractTimestamp(map);
return new MapBasedInputRow(dateTime, dimensions, map);
}
use of org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector in project presto by prestodb.
the class OrcTester method hasType.
private static boolean hasType(ObjectInspector objectInspector, PrimitiveCategory... types) {
if (objectInspector instanceof PrimitiveObjectInspector) {
PrimitiveObjectInspector primitiveInspector = (PrimitiveObjectInspector) objectInspector;
PrimitiveCategory primitiveCategory = primitiveInspector.getPrimitiveCategory();
for (PrimitiveCategory type : types) {
if (primitiveCategory == type) {
return true;
}
}
return false;
}
if (objectInspector instanceof ListObjectInspector) {
ListObjectInspector listInspector = (ListObjectInspector) objectInspector;
return hasType(listInspector.getListElementObjectInspector(), types);
}
if (objectInspector instanceof MapObjectInspector) {
MapObjectInspector mapInspector = (MapObjectInspector) objectInspector;
return hasType(mapInspector.getMapKeyObjectInspector(), types) || hasType(mapInspector.getMapValueObjectInspector(), types);
}
if (objectInspector instanceof StructObjectInspector) {
for (StructField field : ((StructObjectInspector) objectInspector).getAllStructFieldRefs()) {
if (hasType(field.getFieldObjectInspector(), types)) {
return true;
}
}
return false;
}
throw new IllegalArgumentException("Unknown object inspector type " + objectInspector);
}
use of org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector in project presto by prestodb.
the class SerDeUtils method serializeList.
private static Block serializeList(Type type, BlockBuilder builder, Object object, ListObjectInspector inspector) {
List<?> list = inspector.getList(object);
if (list == null) {
requireNonNull(builder, "parent builder is null").appendNull();
return null;
}
List<Type> typeParameters = type.getTypeParameters();
checkArgument(typeParameters.size() == 1, "list must have exactly 1 type parameter");
Type elementType = typeParameters.get(0);
ObjectInspector elementInspector = inspector.getListElementObjectInspector();
BlockBuilder currentBuilder;
if (builder != null) {
currentBuilder = builder.beginBlockEntry();
} else {
currentBuilder = elementType.createBlockBuilder(new BlockBuilderStatus(), list.size());
}
for (Object element : list) {
serializeObject(elementType, currentBuilder, element, elementInspector);
}
if (builder != null) {
builder.closeEntry();
return null;
} else {
Block resultBlock = currentBuilder.build();
return resultBlock;
}
}
Aggregations