Search in sources :

Example 46 with PRIMITIVE

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category.PRIMITIVE in project hive by apache.

the class GenericUDAFSumList method getEvaluator.

@Override
public GenericUDAFEvaluator getEvaluator(GenericUDAFParameterInfo info) throws SemanticException {
    ObjectInspector[] inspectors = info.getParameterObjectInspectors();
    if (inspectors.length != 1) {
        throw new UDFArgumentTypeException(inspectors.length - 1, "Exactly one argument is expected.");
    }
    if (inspectors[0].getCategory() != ObjectInspector.Category.LIST) {
        throw new UDFArgumentTypeException(0, "Argument should be a list type");
    }
    ListObjectInspector listOI = (ListObjectInspector) inspectors[0];
    ObjectInspector elementOI = listOI.getListElementObjectInspector();
    if (elementOI.getCategory() != ObjectInspector.Category.PRIMITIVE) {
        throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but " + elementOI.getTypeName() + " is passed.");
    }
    PrimitiveObjectInspector.PrimitiveCategory pcat = ((PrimitiveObjectInspector) elementOI).getPrimitiveCategory();
    return new GenericUDAFSumLong();
}
Also used : ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)

Example 47 with PRIMITIVE

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category.PRIMITIVE in project hive by apache.

the class InternalUtil method getObjectInspector.

private static ObjectInspector getObjectInspector(TypeInfo type) throws IOException {
    switch(type.getCategory()) {
        case PRIMITIVE:
            PrimitiveTypeInfo primitiveType = (PrimitiveTypeInfo) type;
            return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(primitiveType);
        case MAP:
            MapTypeInfo mapType = (MapTypeInfo) type;
            MapObjectInspector mapInspector = ObjectInspectorFactory.getStandardMapObjectInspector(getObjectInspector(mapType.getMapKeyTypeInfo()), getObjectInspector(mapType.getMapValueTypeInfo()));
            return mapInspector;
        case LIST:
            ListTypeInfo listType = (ListTypeInfo) type;
            ListObjectInspector listInspector = ObjectInspectorFactory.getStandardListObjectInspector(getObjectInspector(listType.getListElementTypeInfo()));
            return listInspector;
        case STRUCT:
            StructTypeInfo structType = (StructTypeInfo) type;
            List<TypeInfo> fieldTypes = structType.getAllStructFieldTypeInfos();
            List<ObjectInspector> fieldInspectors = new ArrayList<ObjectInspector>();
            for (TypeInfo fieldType : fieldTypes) {
                fieldInspectors.add(getObjectInspector(fieldType));
            }
            StructObjectInspector structInspector = ObjectInspectorFactory.getStandardStructObjectInspector(structType.getAllStructFieldNames(), fieldInspectors);
            return structInspector;
        default:
            throw new IOException("Unknown field schema type");
    }
}
Also used : ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ArrayList(java.util.ArrayList) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) IOException(java.io.IOException) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 48 with PRIMITIVE

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category.PRIMITIVE in project hive by apache.

the class ColumnarStorageBench method createRecord.

private ArrayWritable createRecord(final List<TypeInfo> columnTypes) {
    Writable[] fields = new Writable[columnTypes.size()];
    int pos = 0;
    for (TypeInfo type : columnTypes) {
        switch(type.getCategory()) {
            case PRIMITIVE:
                fields[pos++] = getPrimitiveWritable((PrimitiveTypeInfo) type);
                break;
            case LIST:
                {
                    List<TypeInfo> elementType = new ArrayList<TypeInfo>();
                    elementType.add(((ListTypeInfo) type).getListElementTypeInfo());
                    fields[pos++] = createRecord(elementType);
                }
                break;
            case MAP:
                {
                    List<TypeInfo> keyValueType = new ArrayList<TypeInfo>();
                    keyValueType.add(((MapTypeInfo) type).getMapKeyTypeInfo());
                    keyValueType.add(((MapTypeInfo) type).getMapValueTypeInfo());
                    fields[pos++] = record(createRecord(keyValueType));
                }
                break;
            case STRUCT:
                {
                    List<TypeInfo> elementType = ((StructTypeInfo) type).getAllStructFieldTypeInfos();
                    fields[pos++] = createRecord(elementType);
                }
                break;
            default:
                throw new IllegalStateException("Invalid column type: " + type);
        }
    }
    return record(fields);
}
Also used : ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) Writable(org.apache.hadoop.io.Writable) ArrayWritable(org.apache.hadoop.io.ArrayWritable) IntWritable(org.apache.hadoop.io.IntWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) List(java.util.List) ArrayList(java.util.ArrayList) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)

Example 49 with PRIMITIVE

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category.PRIMITIVE in project hive by apache.

the class KafkaJsonSerDe method deserialize.

@Override
public Object deserialize(Writable blob) throws SerDeException {
    BytesWritable record = (BytesWritable) blob;
    Map<String, JsonNode> payload;
    try {
        payload = parseAsJson(record.getBytes());
        rowCount += 1;
        rawDataSize += record.getLength();
    } catch (IOException e) {
        throw new SerDeException(e);
    }
    final List<Object> output = new ArrayList<>(getColumnNames().size());
    for (int i = 0; i < getColumnNames().size(); i++) {
        final String name = getColumnNames().get(i);
        final TypeInfo typeInfo = getColumnTypes().get(i);
        final JsonNode value = payload.get(name);
        if (value == null) {
            output.add(null);
        } else {
            switch(getColumnTypes().get(i).getCategory()) {
                case PRIMITIVE:
                    output.add(parseAsPrimitive(value, typeInfo));
                    break;
                case MAP:
                case LIST:
                case UNION:
                case STRUCT:
                default:
                    throw new SerDeException("not supported yet");
            }
        }
    }
    return output;
}
Also used : ArrayList(java.util.ArrayList) BytesWritable(org.apache.hadoop.io.BytesWritable) JsonNode(com.fasterxml.jackson.databind.JsonNode) IOException(java.io.IOException) TimestampLocalTZTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TimestampLocalTZTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) SerDeException(org.apache.hadoop.hive.serde2.SerDeException)

Example 50 with PRIMITIVE

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category.PRIMITIVE in project hive by apache.

the class FunctionRegistry method getCommonClassForComparison.

/**
 * Find a common class that objects of both TypeInfo a and TypeInfo b can
 * convert to. This is used for comparing objects of type a and type b.
 *
 * When we are comparing string and double, we will always convert both of
 * them to double and then compare.
 *
 * @return null if no common class could be found.
 */
public static TypeInfo getCommonClassForComparison(TypeInfo a, TypeInfo b) {
    // If same return one of them
    if (a.equals(b)) {
        return a;
    }
    if (a.getCategory() != Category.PRIMITIVE || b.getCategory() != Category.PRIMITIVE) {
        // It is not primitive; check if it is a struct and we can infer a common class
        if (a.getCategory() == Category.STRUCT && b.getCategory() == Category.STRUCT) {
            return getCommonClassForStruct((StructTypeInfo) a, (StructTypeInfo) b, (type1, type2) -> getCommonClassForComparison(type1, type2));
        }
        return null;
    }
    PrimitiveCategory pcA = ((PrimitiveTypeInfo) a).getPrimitiveCategory();
    PrimitiveCategory pcB = ((PrimitiveTypeInfo) b).getPrimitiveCategory();
    if (pcA == pcB) {
        // Rely on getTypeInfoForPrimitiveCategory() to sort out the type params.
        return getTypeInfoForPrimitiveCategory((PrimitiveTypeInfo) a, (PrimitiveTypeInfo) b, pcA);
    }
    if (pcA == PrimitiveCategory.VOID) {
        // Handle NULL, we return the type of pcB
        return b;
    }
    if (pcB == PrimitiveCategory.VOID) {
        // Handle NULL, we return the type of pcA
        return a;
    }
    PrimitiveGrouping pgA = PrimitiveObjectInspectorUtils.getPrimitiveGrouping(pcA);
    PrimitiveGrouping pgB = PrimitiveObjectInspectorUtils.getPrimitiveGrouping(pcB);
    if (pgA == pgB) {
        // grouping is same, but category is not.
        if (pgA == PrimitiveGrouping.DATE_GROUP) {
            Integer ai = TypeInfoUtils.dateTypes.get(pcA);
            Integer bi = TypeInfoUtils.dateTypes.get(pcB);
            return (ai > bi) ? a : b;
        }
    }
    // handle string types properly
    if (pgA == PrimitiveGrouping.STRING_GROUP && pgB == PrimitiveGrouping.STRING_GROUP) {
        // Compare as strings. Char comparison semantics may be different if/when implemented.
        return getTypeInfoForPrimitiveCategory((PrimitiveTypeInfo) a, (PrimitiveTypeInfo) b, PrimitiveCategory.STRING);
    }
    // timestamp/date is higher precedence than String_GROUP
    if (pgA == PrimitiveGrouping.STRING_GROUP && pgB == PrimitiveGrouping.DATE_GROUP) {
        return b;
    }
    // date/timestamp is higher precedence than String_GROUP
    if (pgB == PrimitiveGrouping.STRING_GROUP && pgA == PrimitiveGrouping.DATE_GROUP) {
        return a;
    }
    // Another special case, because timestamp is not implicitly convertible to numeric types.
    if ((pgA == PrimitiveGrouping.NUMERIC_GROUP || pgB == PrimitiveGrouping.NUMERIC_GROUP) && (pcA == PrimitiveCategory.TIMESTAMP || pcB == PrimitiveCategory.TIMESTAMP)) {
        return TypeInfoFactory.doubleTypeInfo;
    }
    for (PrimitiveCategory t : TypeInfoUtils.numericTypeList) {
        if (TypeInfoUtils.implicitConvertible(pcA, t) && TypeInfoUtils.implicitConvertible(pcB, t)) {
            return getTypeInfoForPrimitiveCategory((PrimitiveTypeInfo) a, (PrimitiveTypeInfo) b, t);
        }
    }
    return null;
}
Also used : UDFToInteger(org.apache.hadoop.hive.ql.udf.UDFToInteger) UDFXPathInteger(org.apache.hadoop.hive.ql.udf.xml.UDFXPathInteger) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) PrimitiveGrouping(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping)

Aggregations

PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)83 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)75 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)74 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)64 ArrayList (java.util.ArrayList)54 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)52 ListTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo)47 StructTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo)46 MapTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo)45 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)44 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)43 ListObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector)38 BytesWritable (org.apache.hadoop.io.BytesWritable)36 Text (org.apache.hadoop.io.Text)35 MapObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector)34 List (java.util.List)30 Map (java.util.Map)30 CharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo)30 UnionTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo)30 UDFArgumentException (org.apache.hadoop.hive.ql.exec.UDFArgumentException)27