Search in sources :

Example 91 with PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.

the class GenericUDFRound method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length < 1 || arguments.length > 2) {
        throw new UDFArgumentLengthException("ROUND requires one or two argument, got " + arguments.length);
    }
    if (arguments[0].getCategory() != Category.PRIMITIVE) {
        throw new UDFArgumentTypeException(0, "ROUND input only takes primitive types, got " + arguments[0].getTypeName());
    }
    inputOI = (PrimitiveObjectInspector) arguments[0];
    if (arguments.length == 2) {
        if (arguments[1].getCategory() != Category.PRIMITIVE) {
            throw new UDFArgumentTypeException(1, "ROUND second argument only takes primitive types, got " + arguments[1].getTypeName());
        }
        scaleOI = (PrimitiveObjectInspector) arguments[1];
        switch(scaleOI.getPrimitiveCategory()) {
            case VOID:
                break;
            case BYTE:
                if (scaleOI instanceof WritableConstantByteObjectInspector) {
                    scale = ((WritableConstantByteObjectInspector) scaleOI).getWritableConstantValue().get();
                } else {
                    constantScale = false;
                }
                break;
            case SHORT:
                if (scaleOI instanceof WritableConstantShortObjectInspector) {
                    scale = ((WritableConstantShortObjectInspector) scaleOI).getWritableConstantValue().get();
                } else {
                    constantScale = false;
                }
                break;
            case INT:
                if (scaleOI instanceof WritableConstantIntObjectInspector) {
                    scale = ((WritableConstantIntObjectInspector) scaleOI).getWritableConstantValue().get();
                } else {
                    constantScale = false;
                }
                break;
            case LONG:
                if (scaleOI instanceof WritableConstantLongObjectInspector) {
                    long l = ((WritableConstantLongObjectInspector) scaleOI).getWritableConstantValue().get();
                    if (l < Integer.MIN_VALUE || l > Integer.MAX_VALUE) {
                        throw new UDFArgumentException(getFuncName().toUpperCase() + " scale argument out of allowed range");
                    }
                    scale = (int) l;
                } else {
                    constantScale = false;
                }
                break;
            default:
                throw new UDFArgumentTypeException(1, getFuncName().toUpperCase() + " second argument only takes numeric type");
        }
    }
    inputType = inputOI.getPrimitiveCategory();
    ObjectInspector outputOI = null;
    switch(inputType) {
        case DECIMAL:
            DecimalTypeInfo inputTypeInfo = (DecimalTypeInfo) inputOI.getTypeInfo();
            DecimalTypeInfo typeInfo = getOutputTypeInfo(inputTypeInfo, scale);
            outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(typeInfo);
            if (!constantScale) {
                throw new UDFArgumentTypeException(1, getFuncName().toUpperCase() + " scale argument for " + "decimal must be constant");
            }
            break;
        case VOID:
        case BYTE:
        case SHORT:
        case INT:
        case LONG:
        case FLOAT:
        case DOUBLE:
            outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputType);
            break;
        case STRING:
        case VARCHAR:
        case CHAR:
            outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(PrimitiveCategory.DOUBLE);
            converterFromString = ObjectInspectorConverters.getConverter(inputOI, outputOI);
            break;
        default:
            throw new UDFArgumentTypeException(0, "Only numeric or string group data types are allowed for ROUND function. Got " + inputType.name());
    }
    return outputOI;
}
Also used : WritableConstantByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantByteObjectInspector) UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) WritableConstantLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantLongObjectInspector) WritableConstantByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantByteObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) WritableConstantLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantLongObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) WritableConstantShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantShortObjectInspector) WritableConstantIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantIntObjectInspector) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) WritableConstantIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantIntObjectInspector) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) WritableConstantShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantShortObjectInspector)

Example 92 with PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.

the class GenericUDFToTimestampLocalTZ method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length < 1) {
        throw new UDFArgumentLengthException("The function CAST as TIMESTAMP WITH LOCAL TIME ZONE requires at least one argument, got " + arguments.length);
    }
    try {
        argumentOI = (PrimitiveObjectInspector) arguments[0];
        switch(argumentOI.getPrimitiveCategory()) {
            case CHAR:
            case VARCHAR:
            case STRING:
            case DATE:
            case TIMESTAMP:
            case TIMESTAMPLOCALTZ:
                break;
            default:
                throw new UDFArgumentException("CAST as TIMESTAMP WITH LOCAL TIME ZONE only allows" + "string/date/timestamp/timestamp with time zone types");
        }
    } catch (ClassCastException e) {
        throw new UDFArgumentException("The function CAST as TIMESTAMP WITH LOCAL TIME ZONE takes only primitive types");
    }
    SettableTimestampLocalTZObjectInspector outputOI = (SettableTimestampLocalTZObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(typeInfo);
    converter = new TimestampLocalTZConverter(argumentOI, outputOI);
    return outputOI;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) SettableTimestampLocalTZObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableTimestampLocalTZObjectInspector) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) TimestampLocalTZConverter(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampLocalTZConverter)

Example 93 with PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.

the class PartExprEvalUtils method evalExprWithPart.

/**
 * Evaluate expression with partition columns
 *
 * @param expr
 * @param partSpec
 * @param rowObjectInspector
 * @return value returned by the expression
 * @throws HiveException
 */
public static synchronized Object evalExprWithPart(ExprNodeDesc expr, Partition p, List<VirtualColumn> vcs, StructObjectInspector rowObjectInspector) throws HiveException {
    LinkedHashMap<String, String> partSpec = p.getSpec();
    Properties partProps = p.getSchema();
    String pcolTypes = partProps.getProperty(hive_metastoreConstants.META_TABLE_PARTITION_COLUMN_TYPES);
    String[] partKeyTypes = pcolTypes.trim().split(":");
    if (partSpec.size() != partKeyTypes.length) {
        throw new HiveException("Internal error : Partition Spec size, " + partSpec.size() + " doesn't match partition key definition size, " + partKeyTypes.length);
    }
    boolean hasVC = vcs != null && !vcs.isEmpty();
    Object[] rowWithPart = new Object[hasVC ? 3 : 2];
    // Create the row object
    ArrayList<String> partNames = new ArrayList<String>();
    ArrayList<Object> partValues = new ArrayList<Object>();
    ArrayList<ObjectInspector> partObjectInspectors = new ArrayList<ObjectInspector>();
    int i = 0;
    for (Map.Entry<String, String> entry : partSpec.entrySet()) {
        partNames.add(entry.getKey());
        ObjectInspector oi = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(partKeyTypes[i++]));
        partValues.add(ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, oi).convert(entry.getValue()));
        partObjectInspectors.add(oi);
    }
    StructObjectInspector partObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(partNames, partObjectInspectors);
    rowWithPart[1] = partValues;
    ArrayList<StructObjectInspector> ois = new ArrayList<StructObjectInspector>(2);
    ois.add(rowObjectInspector);
    ois.add(partObjectInspector);
    if (hasVC) {
        ois.add(VirtualColumn.getVCSObjectInspector(vcs));
    }
    StructObjectInspector rowWithPartObjectInspector = ObjectInspectorFactory.getUnionStructObjectInspector(ois);
    ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(expr);
    ObjectInspector evaluateResultOI = evaluator.initialize(rowWithPartObjectInspector);
    Object evaluateResultO = evaluator.evaluate(rowWithPart);
    return ((PrimitiveObjectInspector) evaluateResultOI).getPrimitiveJavaObject(evaluateResultO);
}
Also used : StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) ArrayList(java.util.ArrayList) ExprNodeEvaluator(org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator) Properties(java.util.Properties) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 94 with PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.

the class TypeInfoUtils method getExtendedTypeInfoFromJavaType.

/**
 * Return the extended TypeInfo from a Java type. By extended TypeInfo, we
 * allow unknownType for java.lang.Object.
 *
 * @param t
 *          The Java type.
 * @param m
 *          The method, only used for generating error messages.
 */
private static TypeInfo getExtendedTypeInfoFromJavaType(Type t, Method m) {
    if (t == Object.class) {
        return TypeInfoFactory.unknownTypeInfo;
    }
    if (t instanceof ParameterizedType) {
        ParameterizedType pt = (ParameterizedType) t;
        // List?
        if (List.class == (Class<?>) pt.getRawType() || ArrayList.class == (Class<?>) pt.getRawType()) {
            return TypeInfoFactory.getListTypeInfo(getExtendedTypeInfoFromJavaType(pt.getActualTypeArguments()[0], m));
        }
        // Map?
        if (Map.class == (Class<?>) pt.getRawType() || HashMap.class == (Class<?>) pt.getRawType()) {
            return TypeInfoFactory.getMapTypeInfo(getExtendedTypeInfoFromJavaType(pt.getActualTypeArguments()[0], m), getExtendedTypeInfoFromJavaType(pt.getActualTypeArguments()[1], m));
        }
        // Otherwise convert t to RawType so we will fall into the following if
        // block.
        t = pt.getRawType();
    }
    // Must be a class.
    if (!(t instanceof Class)) {
        throw new RuntimeException("Hive does not understand type " + t + " from " + m);
    }
    Class<?> c = (Class<?>) t;
    // Java Primitive Type?
    if (PrimitiveObjectInspectorUtils.isPrimitiveJavaType(c)) {
        return TypeInfoUtils.getTypeInfoFromObjectInspector(PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveObjectInspectorUtils.getTypeEntryFromPrimitiveJavaType(c).primitiveCategory));
    }
    // Java Primitive Class?
    if (PrimitiveObjectInspectorUtils.isPrimitiveJavaClass(c)) {
        return TypeInfoUtils.getTypeInfoFromObjectInspector(PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveObjectInspectorUtils.getTypeEntryFromPrimitiveJavaClass(c).primitiveCategory));
    }
    // Primitive Writable class?
    if (PrimitiveObjectInspectorUtils.isPrimitiveWritableClass(c)) {
        return TypeInfoUtils.getTypeInfoFromObjectInspector(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(PrimitiveObjectInspectorUtils.getTypeEntryFromPrimitiveWritableClass(c).primitiveCategory));
    }
    // Must be a struct
    Field[] fields = ObjectInspectorUtils.getDeclaredNonStaticFields(c);
    ArrayList<String> fieldNames = new ArrayList<String>(fields.length);
    ArrayList<TypeInfo> fieldTypeInfos = new ArrayList<TypeInfo>(fields.length);
    for (Field field : fields) {
        fieldNames.add(field.getName());
        fieldTypeInfos.add(getExtendedTypeInfoFromJavaType(field.getGenericType(), m));
    }
    return TypeInfoFactory.getStructTypeInfo(fieldNames, fieldTypeInfos);
}
Also used : HashMap(java.util.HashMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) ArrayList(java.util.ArrayList) ParameterizedType(java.lang.reflect.ParameterizedType) Field(java.lang.reflect.Field) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) ArrayList(java.util.ArrayList) LinkedList(java.util.LinkedList) List(java.util.List) HashMap(java.util.HashMap) Map(java.util.Map) EnumMap(java.util.EnumMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap)

Example 95 with PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.

the class SerdeRandomRowSource method chooseSchema.

private void chooseSchema(SupportedTypes supportedTypes, int maxComplexDepth) {
    HashSet hashSet = null;
    final boolean allTypes;
    final boolean onlyOne = (r.nextInt(100) == 7);
    if (onlyOne) {
        columnCount = 1;
        allTypes = false;
    } else {
        allTypes = r.nextBoolean();
        if (allTypes) {
            switch(supportedTypes) {
                case ALL:
                    columnCount = possibleHivePrimitiveTypeNames.length + possibleHiveComplexTypeNames.length;
                    break;
                case ALL_EXCEPT_MAP:
                    columnCount = possibleHivePrimitiveTypeNames.length + possibleHiveComplexTypeNames.length - 1;
                    break;
                case PRIMITIVE:
                    columnCount = possibleHivePrimitiveTypeNames.length;
                    break;
            }
            hashSet = new HashSet<Integer>();
        } else {
            columnCount = 1 + r.nextInt(20);
        }
    }
    typeNames = new ArrayList<String>(columnCount);
    categories = new Category[columnCount];
    typeInfos = new TypeInfo[columnCount];
    objectInspectorList = new ArrayList<ObjectInspector>(columnCount);
    primitiveCategories = new PrimitiveCategory[columnCount];
    primitiveTypeInfos = new PrimitiveTypeInfo[columnCount];
    primitiveObjectInspectorList = new ArrayList<ObjectInspector>(columnCount);
    final List<String> columnNames = new ArrayList<String>(columnCount);
    for (int c = 0; c < columnCount; c++) {
        columnNames.add(String.format("col%d", c));
        String typeName;
        if (onlyOne) {
            typeName = getRandomTypeName(supportedTypes);
        } else {
            int typeNum;
            if (allTypes) {
                int maxTypeNum = 0;
                switch(supportedTypes) {
                    case PRIMITIVE:
                        maxTypeNum = possibleHivePrimitiveTypeNames.length;
                        break;
                    case ALL_EXCEPT_MAP:
                        maxTypeNum = possibleHivePrimitiveTypeNames.length + possibleHiveComplexTypeNames.length - 1;
                        break;
                    case ALL:
                        maxTypeNum = possibleHivePrimitiveTypeNames.length + possibleHiveComplexTypeNames.length;
                        break;
                }
                while (true) {
                    typeNum = r.nextInt(maxTypeNum);
                    final Integer typeNumInteger = new Integer(typeNum);
                    if (!hashSet.contains(typeNumInteger)) {
                        hashSet.add(typeNumInteger);
                        break;
                    }
                }
            } else {
                if (supportedTypes == SupportedTypes.PRIMITIVE || r.nextInt(10) != 0) {
                    typeNum = r.nextInt(possibleHivePrimitiveTypeNames.length);
                } else {
                    typeNum = possibleHivePrimitiveTypeNames.length + r.nextInt(possibleHiveComplexTypeNames.length);
                    if (supportedTypes == SupportedTypes.ALL_EXCEPT_MAP) {
                        typeNum--;
                    }
                }
            }
            if (typeNum < possibleHivePrimitiveTypeNames.length) {
                typeName = possibleHivePrimitiveTypeNames[typeNum];
            } else {
                typeName = possibleHiveComplexTypeNames[typeNum - possibleHivePrimitiveTypeNames.length];
            }
        }
        final String decoratedTypeName = getDecoratedTypeName(typeName, supportedTypes, 0, maxComplexDepth);
        final TypeInfo typeInfo;
        try {
            typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(decoratedTypeName);
        } catch (Exception e) {
            throw new RuntimeException("Cannot convert type name " + decoratedTypeName + " to a type " + e);
        }
        typeInfos[c] = typeInfo;
        final Category category = typeInfo.getCategory();
        categories[c] = category;
        ObjectInspector objectInspector = getObjectInspector(typeInfo);
        switch(category) {
            case PRIMITIVE:
                {
                    final PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
                    final PrimitiveCategory primitiveCategory = primitiveTypeInfo.getPrimitiveCategory();
                    objectInspector = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(primitiveTypeInfo);
                    primitiveTypeInfos[c] = primitiveTypeInfo;
                    primitiveCategories[c] = primitiveCategory;
                    primitiveObjectInspectorList.add(objectInspector);
                }
                break;
            case LIST:
            case MAP:
            case STRUCT:
            case UNION:
                primitiveObjectInspectorList.add(null);
                break;
            default:
                throw new RuntimeException("Unexpected catagory " + category);
        }
        objectInspectorList.add(objectInspector);
        if (category == Category.PRIMITIVE) {
        }
        typeNames.add(decoratedTypeName);
    }
    rowStructObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, objectInspectorList);
    alphabets = new String[columnCount];
}
Also used : WritableIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIntObjectInspector) WritableByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableByteObjectInspector) UnionObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) WritableDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDoubleObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) StandardListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardListObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) WritableStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) WritableHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveCharObjectInspector) WritableHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveVarcharObjectInspector) WritableBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBooleanObjectInspector) WritableTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableTimestampObjectInspector) WritableHiveIntervalDayTimeObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveIntervalDayTimeObjectInspector) WritableShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableShortObjectInspector) StandardMapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardMapObjectInspector) WritableHiveIntervalYearMonthObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveIntervalYearMonthObjectInspector) WritableFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableFloatObjectInspector) WritableLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableLongObjectInspector) StandardUnionObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector) WritableDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDateObjectInspector) WritableHiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveDecimalObjectInspector) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) Category(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category) ArrayList(java.util.ArrayList) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) HashSet(java.util.HashSet)

Aggregations

ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)77 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)63 Test (org.junit.Test)45 DeferredJavaObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject)35 DeferredObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject)35 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)24 ArrayList (java.util.ArrayList)20 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)20 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)18 UDFArgumentException (org.apache.hadoop.hive.ql.exec.UDFArgumentException)17 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)14 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)13 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)12 WritableHiveCharObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveCharObjectInspector)9 WritableHiveDecimalObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveDecimalObjectInspector)9 WritableHiveVarcharObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveVarcharObjectInspector)9 WritableStringObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector)9 VarcharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo)9 StandardStructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector)8 WritableBooleanObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBooleanObjectInspector)8