Search in sources :

Example 21 with ListTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo in project hive by apache.

the class ExprNodeConverter method visitFieldAccess.

/**
   * TODO: Handle 1) cast 2), Windowing Agg Call
   */
@Override
public /*
   * Handles expr like struct(key,value).key
   * Follows same rules as TypeCheckProcFactory::getXpathOrFuncExprNodeDesc()
   * which is equivalent version of parsing such an expression from AST
   */
ExprNodeDesc visitFieldAccess(RexFieldAccess fieldAccess) {
    ExprNodeDesc parent = fieldAccess.getReferenceExpr().accept(this);
    String child = fieldAccess.getField().getName();
    TypeInfo parentType = parent.getTypeInfo();
    // Allow accessing a field of list element structs directly from a list
    boolean isList = (parentType.getCategory() == ObjectInspector.Category.LIST);
    if (isList) {
        parentType = ((ListTypeInfo) parentType).getListElementTypeInfo();
    }
    TypeInfo t = ((StructTypeInfo) parentType).getStructFieldTypeInfo(child);
    return new ExprNodeFieldDesc(t, parent, child, isList);
}
Also used : ExprNodeFieldDesc(org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)

Example 22 with ListTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo in project hive by apache.

the class FunctionRegistry method matchCost.

/**
   * Returns -1 if passed does not match accepted. Otherwise return the cost
   * (usually 0 for no conversion and 1 for conversion).
   */
public static int matchCost(TypeInfo argumentPassed, TypeInfo argumentAccepted, boolean exact) {
    if (argumentAccepted.equals(argumentPassed) || TypeInfoUtils.doPrimitiveCategoriesMatch(argumentPassed, argumentAccepted)) {
        // matches
        return 0;
    }
    if (argumentPassed.equals(TypeInfoFactory.voidTypeInfo)) {
        // passing null matches everything
        return 0;
    }
    if (argumentPassed.getCategory().equals(Category.LIST) && argumentAccepted.getCategory().equals(Category.LIST)) {
        // lists are compatible if and only-if the elements are compatible
        TypeInfo argumentPassedElement = ((ListTypeInfo) argumentPassed).getListElementTypeInfo();
        TypeInfo argumentAcceptedElement = ((ListTypeInfo) argumentAccepted).getListElementTypeInfo();
        return matchCost(argumentPassedElement, argumentAcceptedElement, exact);
    }
    if (argumentPassed.getCategory().equals(Category.MAP) && argumentAccepted.getCategory().equals(Category.MAP)) {
        // lists are compatible if and only-if the elements are compatible
        TypeInfo argumentPassedKey = ((MapTypeInfo) argumentPassed).getMapKeyTypeInfo();
        TypeInfo argumentAcceptedKey = ((MapTypeInfo) argumentAccepted).getMapKeyTypeInfo();
        TypeInfo argumentPassedValue = ((MapTypeInfo) argumentPassed).getMapValueTypeInfo();
        TypeInfo argumentAcceptedValue = ((MapTypeInfo) argumentAccepted).getMapValueTypeInfo();
        int cost1 = matchCost(argumentPassedKey, argumentAcceptedKey, exact);
        int cost2 = matchCost(argumentPassedValue, argumentAcceptedValue, exact);
        if (cost1 < 0 || cost2 < 0) {
            return -1;
        }
        return Math.max(cost1, cost2);
    }
    if (argumentAccepted.equals(TypeInfoFactory.unknownTypeInfo)) {
        // but there is a conversion cost.
        return 1;
    }
    if (!exact && TypeInfoUtils.implicitConvertible(argumentPassed, argumentAccepted)) {
        return 1;
    }
    return -1;
}
Also used : ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)

Example 23 with ListTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo in project hive by apache.

the class HCatSchemaUtils method getHCatFieldSchema.

private static HCatFieldSchema getHCatFieldSchema(String fieldName, TypeInfo fieldTypeInfo, String comment) throws HCatException {
    Category typeCategory = fieldTypeInfo.getCategory();
    HCatFieldSchema hCatFieldSchema;
    if (Category.PRIMITIVE == typeCategory) {
        hCatFieldSchema = new HCatFieldSchema(fieldName, (PrimitiveTypeInfo) fieldTypeInfo, comment);
    } else if (Category.STRUCT == typeCategory) {
        HCatSchema subSchema = constructHCatSchema((StructTypeInfo) fieldTypeInfo);
        hCatFieldSchema = new HCatFieldSchema(fieldName, HCatFieldSchema.Type.STRUCT, subSchema, comment);
    } else if (Category.LIST == typeCategory) {
        HCatSchema subSchema = getHCatSchema(((ListTypeInfo) fieldTypeInfo).getListElementTypeInfo());
        hCatFieldSchema = new HCatFieldSchema(fieldName, HCatFieldSchema.Type.ARRAY, subSchema, comment);
    } else if (Category.MAP == typeCategory) {
        HCatSchema subSchema = getHCatSchema(((MapTypeInfo) fieldTypeInfo).getMapValueTypeInfo());
        hCatFieldSchema = HCatFieldSchema.createMapTypeFieldSchema(fieldName, (PrimitiveTypeInfo) ((MapTypeInfo) fieldTypeInfo).getMapKeyTypeInfo(), subSchema, comment);
    } else {
        throw new TypeNotPresentException(fieldTypeInfo.getTypeName(), null);
    }
    return hCatFieldSchema;
}
Also used : Category(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)

Example 24 with ListTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo in project mongo-hadoop by mongodb.

the class BSONSerDe method deserializeList.

/**
     * Deserialize a List with the same listElemTypeInfo for its elements
     * @param value the value for which to get the Hive representation
     * @param valueTypeInfo a description of the value's type
     * @param ext the field name
     * @return the Hive representation of the value
     */
private Object deserializeList(final Object value, final ListTypeInfo valueTypeInfo, final String ext) {
    BasicBSONList list = (BasicBSONList) value;
    TypeInfo listElemTypeInfo = valueTypeInfo.getListElementTypeInfo();
    for (int i = 0; i < list.size(); i++) {
        list.set(i, deserializeField(list.get(i), listElemTypeInfo, ext));
    }
    return list.toArray();
}
Also used : BasicBSONList(org.bson.types.BasicBSONList) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)

Example 25 with ListTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo in project phoenix by apache.

the class PhoenixObjectInspectorFactory method createObjectInspector.

public static ObjectInspector createObjectInspector(TypeInfo type, LazySerDeParameters serdeParams) {
    ObjectInspector oi = null;
    if (LOG.isDebugEnabled()) {
        LOG.debug("Type : " + type);
    }
    switch(type.getCategory()) {
        case PRIMITIVE:
            switch(((PrimitiveTypeInfo) type).getPrimitiveCategory()) {
                case BOOLEAN:
                    oi = new PhoenixBooleanObjectInspector();
                    break;
                case BYTE:
                    oi = new PhoenixByteObjectInspector();
                    break;
                case SHORT:
                    oi = new PhoenixShortObjectInspector();
                    break;
                case INT:
                    oi = new PhoenixIntObjectInspector();
                    break;
                case LONG:
                    oi = new PhoenixLongObjectInspector();
                    break;
                case FLOAT:
                    oi = new PhoenixFloatObjectInspector();
                    break;
                case DOUBLE:
                    oi = new PhoenixDoubleObjectInspector();
                    break;
                case VARCHAR:
                // same string
                case STRING:
                    oi = new PhoenixStringObjectInspector(serdeParams.isEscaped(), serdeParams.getEscapeChar());
                    break;
                case CHAR:
                    oi = new PhoenixCharObjectInspector((PrimitiveTypeInfo) type);
                    break;
                case DATE:
                    oi = new PhoenixDateObjectInspector();
                    break;
                case TIMESTAMP:
                    oi = new PhoenixTimestampObjectInspector();
                    break;
                case DECIMAL:
                    oi = new PhoenixDecimalObjectInspector((PrimitiveTypeInfo) type);
                    break;
                case BINARY:
                    oi = new PhoenixBinaryObjectInspector();
                    break;
                default:
                    throw new RuntimeException("Hive internal error. not supported data type " + ": " + type);
            }
            break;
        case LIST:
            if (LOG.isDebugEnabled()) {
                LOG.debug("List type started");
            }
            ObjectInspector listElementObjectInspector = createObjectInspector(((ListTypeInfo) type).getListElementTypeInfo(), serdeParams);
            if (LOG.isDebugEnabled()) {
                LOG.debug("List type ended");
            }
            oi = new PhoenixListObjectInspector(listElementObjectInspector, serdeParams.getSeparators()[0], serdeParams);
            break;
        default:
            throw new RuntimeException("Hive internal error. not supported data type : " + type);
    }
    return oi;
}
Also used : LazySimpleStructObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)

Aggregations

ListTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo)24 StructTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo)21 MapTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo)19 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)19 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)19 ArrayList (java.util.ArrayList)9 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)7 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)5 UnionTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo)5 HashMap (java.util.HashMap)3 List (java.util.List)3 Map (java.util.Map)3 Schema (org.apache.avro.Schema)3 CharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo)3 PrestoException (com.facebook.presto.spi.PrestoException)2 ImmutableList (com.google.common.collect.ImmutableList)2 Collection (java.util.Collection)2 UDFArgumentTypeException (org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException)2 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)2 DateWritable (org.apache.hadoop.hive.serde2.io.DateWritable)2