Search in sources :

Example 71 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class GenericUDFBaseNwayCompare method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length < 2) {
        throw new UDFArgumentLengthException(getFuncName() + " requires at least 2 arguments, got " + arguments.length);
    }
    if (arguments[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
        throw new UDFArgumentException(getFuncName() + " only takes primitive types, got " + arguments[0].getTypeName());
    }
    argumentOIs = arguments;
    converters = new Converter[arguments.length];
    TypeInfo commonInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(arguments[0]);
    for (int i = 1; i < arguments.length; i++) {
        PrimitiveTypeInfo currInfo = (PrimitiveTypeInfo) TypeInfoUtils.getTypeInfoFromObjectInspector(arguments[i]);
        commonInfo = FunctionRegistry.getCommonClassForComparison(commonInfo, currInfo);
    }
    resultOI = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo((commonInfo == null) ? TypeInfoFactory.doubleTypeInfo : commonInfo);
    for (int i = 0; i < arguments.length; i++) {
        converters[i] = ObjectInspectorConverters.getConverter(arguments[i], resultOI);
    }
    return resultOI;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)

Example 72 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class GenericUDFAdd10 method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length != 1) {
        throw new UDFArgumentLengthException("ADD10() requires 1 argument, got " + arguments.length);
    }
    if (arguments[0].getCategory() != Category.PRIMITIVE) {
        throw new UDFArgumentException("ADD10 only takes primitive types, got " + arguments[0].getTypeName());
    }
    argumentOI = (PrimitiveObjectInspector) arguments[0];
    inputType = argumentOI.getPrimitiveCategory();
    ObjectInspector outputOI = null;
    switch(inputType) {
        case SHORT:
        case BYTE:
        case INT:
            inputConverter = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableIntObjectInspector);
            outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
            break;
        case LONG:
            inputConverter = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableLongObjectInspector);
            outputOI = PrimitiveObjectInspectorFactory.writableLongObjectInspector;
            break;
        case FLOAT:
        case STRING:
        case DOUBLE:
            inputConverter = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
            outputOI = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
            break;
        case DECIMAL:
            outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(((PrimitiveObjectInspector) arguments[0]).getTypeInfo());
            inputConverter = ObjectInspectorConverters.getConverter(arguments[0], outputOI);
            break;
        default:
            throw new UDFArgumentException("ADD10 only takes SHORT/BYTE/INT/LONG/DOUBLE/FLOAT/STRING/DECIMAL types, got " + inputType);
    }
    return outputOI;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)

Aggregations

UDFArgumentException (org.apache.hadoop.hive.ql.exec.UDFArgumentException)72 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)31 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)27 UDFArgumentLengthException (org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException)24 UDFArgumentTypeException (org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException)18 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)11 ArrayList (java.util.ArrayList)9 Category (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category)7 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)7 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)6 ConstantObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector)6 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)6 Test (org.junit.Test)6 PrimitiveObjectInspectorConverter (org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter)5 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)5 StringObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector)4 HiveConf (org.apache.hadoop.hive.conf.HiveConf)3 DateWritableV2 (org.apache.hadoop.hive.serde2.io.DateWritableV2)3 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)3 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)3