Search in sources :

Example 31 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class GenericUDFIn method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length < 2) {
        throw new UDFArgumentLengthException("The function IN requires at least two arguments, got " + arguments.length);
    }
    argumentOIs = arguments;
    // We want to use the ReturnObjectInspectorResolver because otherwise
    // ObjectInspectorUtils.compare() will return != for two objects that have
    // different object inspectors, e.g. 238 and "238". The ROIR will help convert
    // both values to a common type so that they can be compared reasonably.
    conversionHelper = new GenericUDFUtils.ReturnObjectInspectorResolver(true);
    for (ObjectInspector oi : arguments) {
        if (!conversionHelper.updateForComparison(oi)) {
            StringBuilder sb = new StringBuilder();
            sb.append("Type mismatch: {");
            sb.append(arguments[0].getTypeName());
            sb.append(" IN (");
            for (int i = 1; i < arguments.length; i++) {
                if (i != 1) {
                    sb.append(", ");
                }
                sb.append(arguments[i].getTypeName());
            }
            sb.append(")}");
            throw new UDFArgumentException(sb.toString());
        }
    }
    compareOI = conversionHelper.get();
    checkIfInSetConstant();
    return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
}
Also used : ReturnObjectInspectorResolver(org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtils.ReturnObjectInspectorResolver) UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException)

Example 32 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class GenericUDTFStack method initialize.

@Override
public StructObjectInspector initialize(ObjectInspector[] args) throws UDFArgumentException {
    if (args.length < 2) {
        throw new UDFArgumentException("STACK() expects at least two arguments.");
    }
    if (!(args[0] instanceof ConstantObjectInspector)) {
        throw new UDFArgumentException("The first argument to STACK() must be a constant integer (got " + args[0].getTypeName() + " instead).");
    }
    numRows = (IntWritable) ((ConstantObjectInspector) args[0]).getWritableConstantValue();
    if (numRows == null || numRows.get() < 1) {
        throw new UDFArgumentException("STACK() expects its first argument to be >= 1.");
    }
    // Divide and round up.
    numCols = (args.length - 1 + numRows.get() - 1) / numRows.get();
    for (int jj = 0; jj < numCols; ++jj) {
        returnOIResolvers.add(new ReturnObjectInspectorResolver());
        for (int ii = 0; ii < numRows.get(); ++ii) {
            int index = ii * numCols + jj + 1;
            if (index < args.length && !returnOIResolvers.get(jj).update(args[index])) {
                throw new UDFArgumentException("Argument " + (jj + 1) + "'s type (" + args[jj + 1].getTypeName() + ") should be equal to argument " + index + "'s type (" + args[index].getTypeName() + ")");
            }
        }
    }
    forwardObj = new Object[numCols];
    for (int ii = 0; ii < args.length; ++ii) {
        argOIs.add(args[ii]);
    }
    ArrayList<String> fieldNames = new ArrayList<String>();
    ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
    for (int ii = 0; ii < numCols; ++ii) {
        fieldNames.add("col" + ii);
        fieldOIs.add(returnOIResolvers.get(ii).get());
    }
    return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) WritableConstantIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantIntObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ReturnObjectInspectorResolver(org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtils.ReturnObjectInspectorResolver) ArrayList(java.util.ArrayList) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector)

Example 33 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class GenericUDFTrunc method evaluateNumber.

private Object evaluateNumber(DeferredObject[] arguments) throws HiveException, UDFArgumentTypeException {
    if (arguments[0] == null) {
        return null;
    }
    Object input = arguments[0].get();
    if (input == null) {
        return null;
    }
    if (arguments.length == 2 && arguments[1] != null && arguments[1].get() != null && !inputSacleConst) {
        Object scaleObj = null;
        switch(inputScaleOI.getPrimitiveCategory()) {
            case BYTE:
                scaleObj = byteConverter.convert(arguments[1].get());
                scale = ((ByteWritable) scaleObj).get();
                break;
            case SHORT:
                scaleObj = shortConverter.convert(arguments[1].get());
                scale = ((ShortWritable) scaleObj).get();
                break;
            case INT:
                scaleObj = intConverter.convert(arguments[1].get());
                scale = ((IntWritable) scaleObj).get();
                break;
            case LONG:
                scaleObj = longConverter.convert(arguments[1].get());
                long l = ((LongWritable) scaleObj).get();
                if (l < Integer.MIN_VALUE || l > Integer.MAX_VALUE) {
                    throw new UDFArgumentException(getFuncName().toUpperCase() + " scale argument out of allowed range");
                }
                scale = (int) l;
            default:
                break;
        }
    }
    switch(inputType1) {
        case VOID:
            return null;
        case DECIMAL:
            HiveDecimalWritable decimalWritable = (HiveDecimalWritable) inputOI.getPrimitiveWritableObject(input);
            HiveDecimal dec = trunc(decimalWritable.getHiveDecimal(), scale);
            if (dec == null) {
                return null;
            }
            return new HiveDecimalWritable(dec);
        case BYTE:
            ByteWritable byteWritable = (ByteWritable) inputOI.getPrimitiveWritableObject(input);
            if (scale >= 0) {
                return byteWritable;
            } else {
                return new ByteWritable((byte) trunc(byteWritable.get(), scale));
            }
        case SHORT:
            ShortWritable shortWritable = (ShortWritable) inputOI.getPrimitiveWritableObject(input);
            if (scale >= 0) {
                return shortWritable;
            } else {
                return new ShortWritable((short) trunc(shortWritable.get(), scale));
            }
        case INT:
            IntWritable intWritable = (IntWritable) inputOI.getPrimitiveWritableObject(input);
            if (scale >= 0) {
                return intWritable;
            } else {
                return new IntWritable((int) trunc(intWritable.get(), scale));
            }
        case LONG:
            LongWritable longWritable = (LongWritable) inputOI.getPrimitiveWritableObject(input);
            if (scale >= 0) {
                return longWritable;
            } else {
                return new LongWritable(trunc(longWritable.get(), scale));
            }
        case FLOAT:
            float f = ((FloatWritable) inputOI.getPrimitiveWritableObject(input)).get();
            return new FloatWritable((float) trunc(f, scale));
        case DOUBLE:
            return trunc(((DoubleWritable) inputOI.getPrimitiveWritableObject(input)), scale);
        default:
            throw new UDFArgumentTypeException(0, "Only numeric or string group data types are allowed for TRUNC function. Got " + inputType1.name());
    }
}
Also used : HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) FloatWritable(org.apache.hadoop.io.FloatWritable) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable)

Example 34 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class GenericUDFSurrogateKey method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length == 0) {
        writeIdBits = DEFAULT_WRITE_ID_BITS;
        taskIdBits = DEFAULT_TASK_ID_BITS;
        rowIdBits = DEFAULT_ROW_ID_BITS;
    } else if (arguments.length == 2) {
        for (int i = 0; i < 2; i++) {
            if (arguments[i].getCategory() != Category.PRIMITIVE) {
                throw new UDFArgumentTypeException(0, "SURROGATE_KEY input only takes primitive types, got " + arguments[i].getTypeName());
            }
        }
        writeIdBits = ((WritableConstantIntObjectInspector) arguments[0]).getWritableConstantValue().get();
        taskIdBits = ((WritableConstantIntObjectInspector) arguments[1]).getWritableConstantValue().get();
        rowIdBits = 64 - (writeIdBits + taskIdBits);
        if (writeIdBits < 1 || writeIdBits > 62) {
            throw new UDFArgumentException("Write ID bits must be between 1 and 62 (value: " + writeIdBits + ")");
        }
        if (taskIdBits < 1 || taskIdBits > 62) {
            throw new UDFArgumentException("Task ID bits must be between 1 and 62 (value: " + taskIdBits + ")");
        }
        if (writeIdBits + taskIdBits > 63) {
            throw new UDFArgumentException("Write ID bits + Task ID bits must be less than 63 (value: " + (writeIdBits + taskIdBits) + ")");
        }
    } else {
        throw new UDFArgumentLengthException("The function SURROGATE_KEY takes 0 or 2 integer arguments (write id bits, taks id bits), but found " + arguments.length);
    }
    maxWriteId = (1L << writeIdBits) - 1;
    maxTaskId = (1L << taskIdBits) - 1;
    maxRowId = (1L << rowIdBits) - 1;
    return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException)

Example 35 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class GenericUDFToUnixTimeStamp method initializeInput.

protected void initializeInput(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length < 1) {
        throw new UDFArgumentLengthException("The function " + getName().toUpperCase() + "requires at least one argument");
    }
    for (ObjectInspector argument : arguments) {
        if (argument.getCategory() != Category.PRIMITIVE) {
            throw new UDFArgumentException(getName().toUpperCase() + " only takes string/date/timestamp types, got " + argument.getTypeName());
        }
    }
    PrimitiveObjectInspector arg1OI = (PrimitiveObjectInspector) arguments[0];
    switch(arg1OI.getPrimitiveCategory()) {
        case CHAR:
        case VARCHAR:
        case STRING:
            inputTextConverter = ObjectInspectorConverters.getConverter(arg1OI, PrimitiveObjectInspectorFactory.javaStringObjectInspector);
            if (arguments.length > 1) {
                PrimitiveObjectInspector arg2OI = (PrimitiveObjectInspector) arguments[1];
                if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(arg2OI.getPrimitiveCategory()) != PrimitiveGrouping.STRING_GROUP) {
                    throw new UDFArgumentException("The time pattern for " + getName().toUpperCase() + " should be string type");
                }
                patternConverter = ObjectInspectorConverters.getConverter(arg2OI, PrimitiveObjectInspectorFactory.javaStringObjectInspector);
            }
            break;
        case DATE:
            inputDateOI = (DateObjectInspector) arguments[0];
            break;
        case TIMESTAMP:
            inputTimestampOI = (TimestampObjectInspector) arguments[0];
            break;
        case TIMESTAMPLOCALTZ:
            inputTimestampLocalTzOI = (TimestampLocalTZObjectInspector) arguments[0];
            break;
        default:
            throw new UDFArgumentException("The function " + getName().toUpperCase() + " takes only string/date/timestamp/timestampwltz types. Got Type:" + arg1OI.getPrimitiveCategory().name());
    }
    timeZone = SessionState.get() == null ? new HiveConf().getLocalTimeZone() : SessionState.get().getConf().getLocalTimeZone();
    formatter = getFormatter(lasPattern);
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) TimestampLocalTZObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampLocalTZObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) HiveConf(org.apache.hadoop.hive.conf.HiveConf)

Aggregations

UDFArgumentException (org.apache.hadoop.hive.ql.exec.UDFArgumentException)72 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)31 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)27 UDFArgumentLengthException (org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException)24 UDFArgumentTypeException (org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException)18 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)11 ArrayList (java.util.ArrayList)9 Category (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category)7 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)7 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)6 ConstantObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector)6 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)6 Test (org.junit.Test)6 PrimitiveObjectInspectorConverter (org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter)5 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)5 StringObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector)4 HiveConf (org.apache.hadoop.hive.conf.HiveConf)3 DateWritableV2 (org.apache.hadoop.hive.serde2.io.DateWritableV2)3 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)3 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)3