Search in sources :

Example 56 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class GenericUDFUpper method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length != 1) {
        throw new UDFArgumentLengthException("UPPER requires 1 argument, got " + arguments.length);
    }
    if (arguments[0].getCategory() != Category.PRIMITIVE) {
        throw new UDFArgumentException("UPPER only takes primitive types, got " + arguments[0].getTypeName());
    }
    argumentOI = (PrimitiveObjectInspector) arguments[0];
    stringConverter = new PrimitiveObjectInspectorConverter.StringConverter(argumentOI);
    PrimitiveCategory inputType = argumentOI.getPrimitiveCategory();
    ObjectInspector outputOI = null;
    BaseCharTypeInfo typeInfo;
    switch(inputType) {
        case CHAR:
            // return type should have same length as the input.
            returnType = inputType;
            typeInfo = TypeInfoFactory.getCharTypeInfo(GenericUDFUtils.StringHelper.getFixedStringSizeForType(argumentOI));
            outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(typeInfo);
            break;
        case VARCHAR:
            // return type should have same length as the input.
            returnType = inputType;
            typeInfo = TypeInfoFactory.getVarcharTypeInfo(GenericUDFUtils.StringHelper.getFixedStringSizeForType(argumentOI));
            outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(typeInfo);
            break;
        default:
            returnType = PrimitiveCategory.STRING;
            outputOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
            break;
    }
    returnHelper = new GenericUDFUtils.StringHelper(returnType);
    return outputOI;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) BaseCharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.BaseCharTypeInfo) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) PrimitiveObjectInspectorConverter(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter) StringConverter(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.StringConverter) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)

Example 57 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class GenericUDFTrunc method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length == 2) {
        inputType1 = ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory();
        inputType2 = ((PrimitiveObjectInspector) arguments[1]).getPrimitiveCategory();
        if ((PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType1) == PrimitiveGrouping.DATE_GROUP || PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType1) == PrimitiveGrouping.STRING_GROUP) && PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType2) == PrimitiveGrouping.STRING_GROUP) {
            dateTypeArg = true;
            return initializeDate(arguments);
        } else if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType1) == PrimitiveGrouping.NUMERIC_GROUP && PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType2) == PrimitiveGrouping.NUMERIC_GROUP) {
            dateTypeArg = false;
            return initializeNumber(arguments);
        }
        throw new UDFArgumentException("Got wrong argument types : first argument type : " + arguments[0].getTypeName() + ", second argument type : " + arguments[1].getTypeName());
    } else if (arguments.length == 1) {
        inputType1 = ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory();
        if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType1) == PrimitiveGrouping.NUMERIC_GROUP) {
            dateTypeArg = false;
            return initializeNumber(arguments);
        } else {
            throw new UDFArgumentException("Only primitive type arguments are accepted, when arguments length is one, got " + arguments[0].getTypeName());
        }
    }
    throw new UDFArgumentException("TRUNC requires one or two argument, got " + arguments.length);
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)

Example 58 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class GenericUDFTrunc method initializeNumber.

private ObjectInspector initializeNumber(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length < 1 || arguments.length > 2) {
        throw new UDFArgumentLengthException("TRUNC requires one or two argument, got " + arguments.length);
    }
    if (arguments[0].getCategory() != Category.PRIMITIVE) {
        throw new UDFArgumentTypeException(0, "TRUNC input only takes primitive types, got " + arguments[0].getTypeName());
    }
    inputOI = (PrimitiveObjectInspector) arguments[0];
    if (arguments.length == 2) {
        if (arguments[1].getCategory() != Category.PRIMITIVE) {
            throw new UDFArgumentTypeException(1, "TRUNC second argument only takes primitive types, got " + arguments[1].getTypeName());
        }
        inputScaleOI = (PrimitiveObjectInspector) arguments[1];
        inputSacleConst = arguments[1] instanceof ConstantObjectInspector;
        if (inputSacleConst) {
            try {
                Object obj = ((ConstantObjectInspector) arguments[1]).getWritableConstantValue();
                fmtInput = obj != null ? obj.toString() : null;
                scale = Integer.parseInt(fmtInput);
            } catch (Exception e) {
                throw new UDFArgumentException("TRUNC input only takes integer values, got " + fmtInput);
            }
        } else {
            switch(inputScaleOI.getPrimitiveCategory()) {
                case BYTE:
                    byteConverter = ObjectInspectorConverters.getConverter(arguments[1], PrimitiveObjectInspectorFactory.writableByteObjectInspector);
                    break;
                case SHORT:
                    shortConverter = ObjectInspectorConverters.getConverter(arguments[1], PrimitiveObjectInspectorFactory.writableShortObjectInspector);
                    break;
                case INT:
                    intConverter = ObjectInspectorConverters.getConverter(arguments[1], PrimitiveObjectInspectorFactory.writableIntObjectInspector);
                    break;
                case LONG:
                    longConverter = ObjectInspectorConverters.getConverter(arguments[1], PrimitiveObjectInspectorFactory.writableLongObjectInspector);
                    break;
                default:
                    throw new UDFArgumentTypeException(1, getFuncName().toUpperCase() + " second argument only takes integer values");
            }
        }
    }
    inputType1 = inputOI.getPrimitiveCategory();
    ObjectInspector outputOI = null;
    switch(inputType1) {
        case DECIMAL:
            outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputType1);
            break;
        case VOID:
        case BYTE:
        case SHORT:
        case INT:
        case LONG:
        case FLOAT:
        case DOUBLE:
            outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputType1);
            break;
        default:
            throw new UDFArgumentTypeException(0, "Only numeric or string group data types are allowed for TRUNC function. Got " + inputType1.name());
    }
    return outputOI;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Example 59 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class GenericUDTFJSONTuple method initialize.

@Override
public StructObjectInspector initialize(ObjectInspector[] args) throws UDFArgumentException {
    inputOIs = args;
    numCols = args.length - 1;
    jsonObjectCache = new HashCache<>();
    if (numCols < 1) {
        throw new UDFArgumentException("json_tuple() takes at least two arguments: " + "the json string and a path expression");
    }
    for (int i = 0; i < args.length; ++i) {
        if (args[i].getCategory() != ObjectInspector.Category.PRIMITIVE || !args[i].getTypeName().equals(serdeConstants.STRING_TYPE_NAME)) {
            throw new UDFArgumentException("json_tuple()'s arguments have to be string type");
        }
    }
    seenErrors = false;
    pathParsed = false;
    paths = new String[numCols];
    cols = new Text[numCols];
    retCols = new Text[numCols];
    nullCols = new Object[numCols];
    for (int i = 0; i < numCols; ++i) {
        cols[i] = new Text();
        retCols[i] = cols[i];
        nullCols[i] = null;
    }
    // construct output object inspector
    ArrayList<String> fieldNames = new ArrayList<String>(numCols);
    ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>(numCols);
    for (int i = 0; i < numCols; ++i) {
        // column name can be anything since it will be named by UDTF as clause
        fieldNames.add("c" + i);
        // all returned type will be Text
        fieldOIs.add(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
    }
    return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ArrayList(java.util.ArrayList) Text(org.apache.hadoop.io.Text)

Example 60 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class GenericUDTFInline method initialize.

@Override
public StructObjectInspector initialize(ObjectInspector[] ois) throws UDFArgumentException {
    // There should be one argument that is a array of struct
    if (ois.length != 1) {
        throw new UDFArgumentException("UDF tables only one argument");
    }
    if (ois[0].getCategory() != Category.LIST) {
        throw new UDFArgumentException("Top level object must be an array but " + "was " + ois[0].getTypeName());
    }
    li = (ListObjectInspector) ois[0];
    ObjectInspector sub = li.getListElementObjectInspector();
    if (sub.getCategory() != Category.STRUCT) {
        throw new UDFArgumentException("The sub element must be struct, but was " + sub.getTypeName());
    }
    return (StructObjectInspector) sub;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Aggregations

UDFArgumentException (org.apache.hadoop.hive.ql.exec.UDFArgumentException)72 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)31 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)27 UDFArgumentLengthException (org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException)24 UDFArgumentTypeException (org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException)18 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)11 ArrayList (java.util.ArrayList)9 Category (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category)7 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)7 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)6 ConstantObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector)6 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)6 Test (org.junit.Test)6 PrimitiveObjectInspectorConverter (org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter)5 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)5 StringObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector)4 HiveConf (org.apache.hadoop.hive.conf.HiveConf)3 DateWritableV2 (org.apache.hadoop.hive.serde2.io.DateWritableV2)3 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)3 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)3