Search in sources :

Example 46 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class GenericUDFAbs method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length != 1) {
        throw new UDFArgumentLengthException("ABS() requires 1 argument, got " + arguments.length);
    }
    if (arguments[0].getCategory() != Category.PRIMITIVE) {
        throw new UDFArgumentException("ABS only takes primitive types, got " + arguments[0].getTypeName());
    }
    argumentOI = (PrimitiveObjectInspector) arguments[0];
    inputType = argumentOI.getPrimitiveCategory();
    ObjectInspector outputOI = null;
    switch(inputType) {
        case SHORT:
        case BYTE:
        case INT:
            inputConverter = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableIntObjectInspector);
            outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
            break;
        case LONG:
            inputConverter = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableLongObjectInspector);
            outputOI = PrimitiveObjectInspectorFactory.writableLongObjectInspector;
            break;
        case FLOAT:
        case STRING:
        case DOUBLE:
            inputConverter = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
            outputOI = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
            break;
        case DECIMAL:
            outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(((PrimitiveObjectInspector) arguments[0]).getTypeInfo());
            inputConverter = ObjectInspectorConverters.getConverter(arguments[0], outputOI);
            break;
        default:
            throw new UDFArgumentException("ABS only takes SHORT/BYTE/INT/LONG/DOUBLE/FLOAT/STRING/DECIMAL types, got " + inputType);
    }
    return outputOI;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)

Example 47 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class GenericUDFAbs method evaluate.

@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
    Object valObject = arguments[0].get();
    if (valObject == null) {
        return null;
    }
    switch(inputType) {
        case SHORT:
        case BYTE:
        case INT:
            valObject = inputConverter.convert(valObject);
            resultInt.set(Math.abs(((IntWritable) valObject).get()));
            return resultInt;
        case LONG:
            valObject = inputConverter.convert(valObject);
            resultLong.set(Math.abs(((LongWritable) valObject).get()));
            return resultLong;
        case FLOAT:
        case STRING:
        case DOUBLE:
            valObject = inputConverter.convert(valObject);
            if (valObject == null) {
                return null;
            }
            resultDouble.set(Math.abs(((DoubleWritable) valObject).get()));
            return resultDouble;
        case DECIMAL:
            HiveDecimalObjectInspector decimalOI = (HiveDecimalObjectInspector) argumentOI;
            HiveDecimalWritable val = decimalOI.getPrimitiveWritableObject(valObject);
            if (val != null) {
                resultDecimal.set(val);
                resultDecimal.mutateAbs();
                val = resultDecimal;
            }
            return val;
        default:
            throw new UDFArgumentException("ABS only takes SHORT/BYTE/INT/LONG/DOUBLE/FLOAT/STRING/DECIMAL types, got " + inputType);
    }
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) LongWritable(org.apache.hadoop.io.LongWritable) IntWritable(org.apache.hadoop.io.IntWritable)

Example 48 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class GenericUDFBaseArithmetic method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length != 2) {
        throw new UDFArgumentException(getClass().getSimpleName() + " requires two arguments.");
    }
    // Lookup values needed for numeric arithmetic UDFs
    if (confLookupNeeded) {
        CompatLevel compatLevel = HiveCompat.getCompatLevel(SessionState.get().getConf());
        ansiSqlArithmetic = compatLevel.ordinal() > CompatLevel.HIVE_0_12.ordinal();
        confLookupNeeded = false;
    }
    // Determine if we are dealing with a numeric or date arithmetic operation
    boolean isDateTimeOp = false;
    for (int idx = 0; idx < 2; ++idx) {
        switch(((PrimitiveObjectInspector) arguments[idx]).getPrimitiveCategory()) {
            case DATE:
            case TIMESTAMP:
            case INTERVAL_YEAR_MONTH:
            case INTERVAL_DAY_TIME:
                isDateTimeOp = true;
                break;
            default:
                break;
        }
    }
    if (isDateTimeOp) {
        arithmeticOperation = instantiateDTIUDF();
    } else {
        GenericUDFBaseNumeric numericUDF = instantiateNumericUDF();
        // Set values needed for numeric arithmetic UDFs
        numericUDF.setAnsiSqlArithmetic(ansiSqlArithmetic);
        numericUDF.setConfLookupNeeded(confLookupNeeded);
        arithmeticOperation = numericUDF;
    }
    return arithmeticOperation.initialize(arguments);
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) CompatLevel(org.apache.hive.common.HiveCompat.CompatLevel) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)

Example 49 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class GenericUDFTimestamp method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    checkArgsSize(arguments, 1, 1);
    checkArgPrimitive(arguments, 0);
    checkArgGroups(arguments, 0, tsInputTypes, STRING_GROUP, DATE_GROUP, NUMERIC_GROUP, VOID_GROUP, BOOLEAN_GROUP);
    strict = SessionState.get() != null ? SessionState.get().getConf().getBoolVar(ConfVars.HIVE_STRICT_TIMESTAMP_CONVERSION) : new HiveConf().getBoolVar(ConfVars.HIVE_STRICT_TIMESTAMP_CONVERSION);
    intToTimestampInSeconds = SessionState.get() != null ? SessionState.get().getConf().getBoolVar(ConfVars.HIVE_INT_TIMESTAMP_CONVERSION_IN_SECONDS) : new HiveConf().getBoolVar(ConfVars.HIVE_INT_TIMESTAMP_CONVERSION_IN_SECONDS);
    if (strict) {
        if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(tsInputTypes[0]) == PrimitiveGrouping.NUMERIC_GROUP) {
            throw new UDFArgumentException("Casting NUMERIC types to TIMESTAMP is prohibited (" + ConfVars.HIVE_STRICT_TIMESTAMP_CONVERSION + ")");
        }
    }
    obtainTimestampConverter(arguments, 0, tsInputTypes, tsConvertors);
    return PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) HiveConf(org.apache.hadoop.hive.conf.HiveConf)

Example 50 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class GenericUDFToDecimal method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length < 1) {
        throw new UDFArgumentLengthException("The function DECIMAL requires at least one argument, got " + arguments.length);
    }
    try {
        argumentOI = (PrimitiveObjectInspector) arguments[0];
    } catch (ClassCastException e) {
        throw new UDFArgumentException("The function DECIMAL takes only primitive types");
    }
    // Check if this UDF has been provided with type params for the output varchar type
    SettableHiveDecimalObjectInspector outputOI;
    outputOI = (SettableHiveDecimalObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(typeInfo);
    bdConverter = new HiveDecimalConverter(argumentOI, outputOI);
    return outputOI;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) HiveDecimalConverter(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.HiveDecimalConverter) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) SettableHiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveDecimalObjectInspector)

Aggregations

UDFArgumentException (org.apache.hadoop.hive.ql.exec.UDFArgumentException)72 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)31 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)27 UDFArgumentLengthException (org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException)24 UDFArgumentTypeException (org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException)18 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)11 ArrayList (java.util.ArrayList)9 Category (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category)7 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)7 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)6 ConstantObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector)6 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)6 Test (org.junit.Test)6 PrimitiveObjectInspectorConverter (org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter)5 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)5 StringObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector)4 HiveConf (org.apache.hadoop.hive.conf.HiveConf)3 DateWritableV2 (org.apache.hadoop.hive.serde2.io.DateWritableV2)3 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)3 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)3