use of org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo in project hive by apache.
the class ExprNodeConverter method visitFieldAccess.
/**
* TODO: Handle 1) cast 2), Windowing Agg Call
*/
@Override
public /*
* Handles expr like struct(key,value).key
* Follows same rules as TypeCheckProcFactory::getXpathOrFuncExprNodeDesc()
* which is equivalent version of parsing such an expression from AST
*/
ExprNodeDesc visitFieldAccess(RexFieldAccess fieldAccess) {
ExprNodeDesc parent = fieldAccess.getReferenceExpr().accept(this);
String child = fieldAccess.getField().getName();
TypeInfo parentType = parent.getTypeInfo();
// Allow accessing a field of list element structs directly from a list
boolean isList = (parentType.getCategory() == ObjectInspector.Category.LIST);
if (isList) {
parentType = ((ListTypeInfo) parentType).getListElementTypeInfo();
}
TypeInfo t = ((StructTypeInfo) parentType).getStructFieldTypeInfo(child);
return new ExprNodeFieldDesc(t, parent, child, isList);
}
use of org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo in project hive by apache.
the class FunctionRegistry method matchCost.
/**
* Returns -1 if passed does not match accepted. Otherwise return the cost
* (usually 0 for no conversion and 1 for conversion).
*/
public static int matchCost(TypeInfo argumentPassed, TypeInfo argumentAccepted, boolean exact) {
if (argumentAccepted.equals(argumentPassed) || TypeInfoUtils.doPrimitiveCategoriesMatch(argumentPassed, argumentAccepted)) {
// matches
return 0;
}
if (argumentPassed.equals(TypeInfoFactory.voidTypeInfo)) {
// passing null matches everything
return 0;
}
if (argumentPassed.getCategory().equals(Category.LIST) && argumentAccepted.getCategory().equals(Category.LIST)) {
// lists are compatible if and only-if the elements are compatible
TypeInfo argumentPassedElement = ((ListTypeInfo) argumentPassed).getListElementTypeInfo();
TypeInfo argumentAcceptedElement = ((ListTypeInfo) argumentAccepted).getListElementTypeInfo();
return matchCost(argumentPassedElement, argumentAcceptedElement, exact);
}
if (argumentPassed.getCategory().equals(Category.MAP) && argumentAccepted.getCategory().equals(Category.MAP)) {
// lists are compatible if and only-if the elements are compatible
TypeInfo argumentPassedKey = ((MapTypeInfo) argumentPassed).getMapKeyTypeInfo();
TypeInfo argumentAcceptedKey = ((MapTypeInfo) argumentAccepted).getMapKeyTypeInfo();
TypeInfo argumentPassedValue = ((MapTypeInfo) argumentPassed).getMapValueTypeInfo();
TypeInfo argumentAcceptedValue = ((MapTypeInfo) argumentAccepted).getMapValueTypeInfo();
int cost1 = matchCost(argumentPassedKey, argumentAcceptedKey, exact);
int cost2 = matchCost(argumentPassedValue, argumentAcceptedValue, exact);
if (cost1 < 0 || cost2 < 0) {
return -1;
}
return Math.max(cost1, cost2);
}
if (argumentAccepted.equals(TypeInfoFactory.unknownTypeInfo)) {
// but there is a conversion cost.
return 1;
}
if (!exact && TypeInfoUtils.implicitConvertible(argumentPassed, argumentAccepted)) {
return 1;
}
return -1;
}
use of org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo in project hive by apache.
the class HCatSchemaUtils method getHCatFieldSchema.
private static HCatFieldSchema getHCatFieldSchema(String fieldName, TypeInfo fieldTypeInfo, String comment) throws HCatException {
Category typeCategory = fieldTypeInfo.getCategory();
HCatFieldSchema hCatFieldSchema;
if (Category.PRIMITIVE == typeCategory) {
hCatFieldSchema = new HCatFieldSchema(fieldName, (PrimitiveTypeInfo) fieldTypeInfo, comment);
} else if (Category.STRUCT == typeCategory) {
HCatSchema subSchema = constructHCatSchema((StructTypeInfo) fieldTypeInfo);
hCatFieldSchema = new HCatFieldSchema(fieldName, HCatFieldSchema.Type.STRUCT, subSchema, comment);
} else if (Category.LIST == typeCategory) {
HCatSchema subSchema = getHCatSchema(((ListTypeInfo) fieldTypeInfo).getListElementTypeInfo());
hCatFieldSchema = new HCatFieldSchema(fieldName, HCatFieldSchema.Type.ARRAY, subSchema, comment);
} else if (Category.MAP == typeCategory) {
HCatSchema subSchema = getHCatSchema(((MapTypeInfo) fieldTypeInfo).getMapValueTypeInfo());
hCatFieldSchema = HCatFieldSchema.createMapTypeFieldSchema(fieldName, (PrimitiveTypeInfo) ((MapTypeInfo) fieldTypeInfo).getMapKeyTypeInfo(), subSchema, comment);
} else {
throw new TypeNotPresentException(fieldTypeInfo.getTypeName(), null);
}
return hCatFieldSchema;
}
use of org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo in project mongo-hadoop by mongodb.
the class BSONSerDe method deserializeList.
/**
* Deserialize a List with the same listElemTypeInfo for its elements
* @param value the value for which to get the Hive representation
* @param valueTypeInfo a description of the value's type
* @param ext the field name
* @return the Hive representation of the value
*/
private Object deserializeList(final Object value, final ListTypeInfo valueTypeInfo, final String ext) {
BasicBSONList list = (BasicBSONList) value;
TypeInfo listElemTypeInfo = valueTypeInfo.getListElementTypeInfo();
for (int i = 0; i < list.size(); i++) {
list.set(i, deserializeField(list.get(i), listElemTypeInfo, ext));
}
return list.toArray();
}
use of org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo in project phoenix by apache.
the class PhoenixObjectInspectorFactory method createObjectInspector.
public static ObjectInspector createObjectInspector(TypeInfo type, LazySerDeParameters serdeParams) {
ObjectInspector oi = null;
if (LOG.isDebugEnabled()) {
LOG.debug("Type : " + type);
}
switch(type.getCategory()) {
case PRIMITIVE:
switch(((PrimitiveTypeInfo) type).getPrimitiveCategory()) {
case BOOLEAN:
oi = new PhoenixBooleanObjectInspector();
break;
case BYTE:
oi = new PhoenixByteObjectInspector();
break;
case SHORT:
oi = new PhoenixShortObjectInspector();
break;
case INT:
oi = new PhoenixIntObjectInspector();
break;
case LONG:
oi = new PhoenixLongObjectInspector();
break;
case FLOAT:
oi = new PhoenixFloatObjectInspector();
break;
case DOUBLE:
oi = new PhoenixDoubleObjectInspector();
break;
case VARCHAR:
// same string
case STRING:
oi = new PhoenixStringObjectInspector(serdeParams.isEscaped(), serdeParams.getEscapeChar());
break;
case CHAR:
oi = new PhoenixCharObjectInspector((PrimitiveTypeInfo) type);
break;
case DATE:
oi = new PhoenixDateObjectInspector();
break;
case TIMESTAMP:
oi = new PhoenixTimestampObjectInspector();
break;
case DECIMAL:
oi = new PhoenixDecimalObjectInspector((PrimitiveTypeInfo) type);
break;
case BINARY:
oi = new PhoenixBinaryObjectInspector();
break;
default:
throw new RuntimeException("Hive internal error. not supported data type " + ": " + type);
}
break;
case LIST:
if (LOG.isDebugEnabled()) {
LOG.debug("List type started");
}
ObjectInspector listElementObjectInspector = createObjectInspector(((ListTypeInfo) type).getListElementTypeInfo(), serdeParams);
if (LOG.isDebugEnabled()) {
LOG.debug("List type ended");
}
oi = new PhoenixListObjectInspector(listElementObjectInspector, serdeParams.getSeparators()[0], serdeParams);
break;
default:
throw new RuntimeException("Hive internal error. not supported data type : " + type);
}
return oi;
}
Aggregations