use of org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo in project presto by prestodb.
the class OrcTester method preprocessWriteValueOld.
private static Object preprocessWriteValueOld(TypeInfo typeInfo, Object value) {
if (value == null) {
return null;
}
switch(typeInfo.getCategory()) {
case PRIMITIVE:
PrimitiveObjectInspector.PrimitiveCategory primitiveCategory = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
switch(primitiveCategory) {
case BOOLEAN:
return value;
case BYTE:
return ((Number) value).byteValue();
case SHORT:
return ((Number) value).shortValue();
case INT:
return ((Number) value).intValue();
case LONG:
return ((Number) value).longValue();
case FLOAT:
return ((Number) value).floatValue();
case DOUBLE:
return ((Number) value).doubleValue();
case DECIMAL:
return HiveDecimal.create(((SqlDecimal) value).toBigDecimal());
case STRING:
return value;
case CHAR:
return new HiveChar(value.toString(), ((CharTypeInfo) typeInfo).getLength());
case DATE:
int days = ((SqlDate) value).getDays();
LocalDate localDate = LocalDate.ofEpochDay(days);
ZonedDateTime zonedDateTime = localDate.atStartOfDay(ZoneId.systemDefault());
long millis = zonedDateTime.toEpochSecond() * 1000;
Date date = new Date(0);
// mills must be set separately to avoid masking
date.setTime(millis);
return date;
case TIMESTAMP:
long millisUtc = (int) ((SqlTimestamp) value).getMillisUtc();
return new Timestamp(millisUtc);
case BINARY:
return ((SqlVarbinary) value).getBytes();
}
break;
case MAP:
MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
TypeInfo keyTypeInfo = mapTypeInfo.getMapKeyTypeInfo();
TypeInfo valueTypeInfo = mapTypeInfo.getMapValueTypeInfo();
Map<Object, Object> newMap = new HashMap<>();
for (Entry<?, ?> entry : ((Map<?, ?>) value).entrySet()) {
newMap.put(preprocessWriteValueOld(keyTypeInfo, entry.getKey()), preprocessWriteValueOld(valueTypeInfo, entry.getValue()));
}
return newMap;
case LIST:
ListTypeInfo listTypeInfo = (ListTypeInfo) typeInfo;
TypeInfo elementTypeInfo = listTypeInfo.getListElementTypeInfo();
List<Object> newList = new ArrayList<>(((Collection<?>) value).size());
for (Object element : (Iterable<?>) value) {
newList.add(preprocessWriteValueOld(elementTypeInfo, element));
}
return newList;
case STRUCT:
StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
List<?> fieldValues = (List<?>) value;
List<TypeInfo> fieldTypeInfos = structTypeInfo.getAllStructFieldTypeInfos();
List<Object> newStruct = new ArrayList<>();
for (int fieldId = 0; fieldId < fieldValues.size(); fieldId++) {
newStruct.add(preprocessWriteValueOld(fieldTypeInfos.get(fieldId), fieldValues.get(fieldId)));
}
return newStruct;
}
throw new PrestoException(NOT_SUPPORTED, format("Unsupported Hive type: %s", typeInfo));
}
use of org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo in project cdap by caskdata.
the class ObjectSerializer method serializeList.
private Object serializeList(List<Object> list, ListTypeInfo typeInfo) {
// need to recurse since it may contain structs
TypeInfo elementType = typeInfo.getListElementTypeInfo();
List<Object> serialized = Lists.newArrayListWithCapacity(list.size());
for (int i = 0; i < list.size(); i++) {
serialized.add(i, serialize(list.get(i), elementType));
}
return serialized;
}
use of org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo in project cdap by caskdata.
the class ObjectDeserializer method deserializeList.
private Object deserializeList(Object listField, ListTypeInfo typeInfo, Schema elementSchema) throws NoSuchFieldException, IllegalAccessException {
TypeInfo listElementType = typeInfo.getListElementTypeInfo();
List<Object> hiveList = Lists.newArrayList();
if (listField instanceof Collection) {
for (Object obj : (Collection<?>) listField) {
hiveList.add(deserializeField(obj, listElementType, elementSchema));
}
} else {
for (int i = 0; i < Array.getLength(listField); i++) {
hiveList.add(deserializeField(Array.get(listField, i), listElementType, elementSchema));
}
}
return hiveList;
}
use of org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo in project hive by apache.
the class TestAvroObjectInspectorGenerator method canHandleArrays.
@Test
public void canHandleArrays() throws SerDeException {
Schema s = AvroSerdeUtils.getSchemaFor(ARRAY_WITH_PRIMITIVE_ELEMENT_TYPE);
AvroObjectInspectorGenerator aoig = new AvroObjectInspectorGenerator(s);
// Column names
assertEquals(1, aoig.getColumnNames().size());
assertEquals("anArray", aoig.getColumnNames().get(0));
// Column types
assertEquals(1, aoig.getColumnTypes().size());
TypeInfo typeInfo = aoig.getColumnTypes().get(0);
assertEquals(ObjectInspector.Category.LIST, typeInfo.getCategory());
assertTrue(typeInfo instanceof ListTypeInfo);
ListTypeInfo listTypeInfo = (ListTypeInfo) typeInfo;
assertEquals("string", listTypeInfo.getListElementTypeInfo().getTypeName());
}
use of org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo in project hive by apache.
the class TestTypeInfoToSchema method createAvroListSchema.
@Test
public void createAvroListSchema() {
ListTypeInfo listTypeInfo = new ListTypeInfo();
listTypeInfo.setListElementTypeInfo(STRING);
final String specificSchema = Schema.createArray(Schema.createUnion(Arrays.asList(Schema.create(Schema.Type.NULL), Schema.create(Schema.Type.STRING)))).toString();
String expectedSchema = genSchema(specificSchema);
Assert.assertEquals("Test for list's avro schema failed", expectedSchema, getAvroSchemaString(listTypeInfo));
}
Aggregations