Search in sources :

Example 1 with UDFArgumentTypeException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException in project hive by apache.

the class GenericUDFTrunc method evaluateDate.

private Object evaluateDate(DeferredObject[] arguments) throws UDFArgumentLengthException, HiveException, UDFArgumentTypeException, UDFArgumentException {
    if (arguments.length != 2) {
        throw new UDFArgumentLengthException("trunc() requires 2 argument, got " + arguments.length);
    }
    if (arguments[0].get() == null || arguments[1].get() == null) {
        return null;
    }
    if (textConverter2 != null) {
        fmtInput = textConverter2.convert(arguments[1].get()).toString();
    }
    Date date;
    switch(inputType1) {
        case STRING:
            String dateString = textConverter1.convert(arguments[0].get()).toString();
            try {
                date = formatter.parse(dateString.toString());
            } catch (ParseException e) {
                return null;
            }
            break;
        case TIMESTAMP:
            Timestamp ts = ((TimestampWritable) timestampConverter.convert(arguments[0].get())).getTimestamp();
            date = ts;
            break;
        case DATE:
            DateWritable dw = (DateWritable) dateWritableConverter.convert(arguments[0].get());
            date = dw.get();
            break;
        default:
            throw new UDFArgumentTypeException(0, "TRUNC() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType1);
    }
    if (evalDate(date) == null) {
        return null;
    }
    Date newDate = calendar.getTime();
    output.set(formatter.format(newDate));
    return output;
}
Also used : UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) ParseException(java.text.ParseException) Timestamp(java.sql.Timestamp) Date(java.util.Date)

Example 2 with UDFArgumentTypeException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException in project hive by apache.

the class GenericUDFTrunc method initializeDate.

private ObjectInspector initializeDate(ObjectInspector[] arguments) throws UDFArgumentLengthException, UDFArgumentTypeException {
    if (arguments.length != 2) {
        throw new UDFArgumentLengthException("trunc() requires 2 argument, got " + arguments.length);
    }
    if (arguments[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
        throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but " + arguments[0].getTypeName() + " is passed. as first arguments");
    }
    if (arguments[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
        throw new UDFArgumentTypeException(1, "Only primitive type arguments are accepted but " + arguments[1].getTypeName() + " is passed. as second arguments");
    }
    ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
    inputType1 = ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory();
    switch(inputType1) {
        case STRING:
        case VARCHAR:
        case CHAR:
        case VOID:
            inputType1 = PrimitiveCategory.STRING;
            textConverter1 = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableStringObjectInspector);
            break;
        case TIMESTAMP:
            timestampConverter = new TimestampConverter((PrimitiveObjectInspector) arguments[0], PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
            break;
        case DATE:
            dateWritableConverter = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableDateObjectInspector);
            break;
        default:
            throw new UDFArgumentTypeException(0, "TRUNC() only takes STRING/TIMESTAMP/DATEWRITABLE types as first argument, got " + inputType1);
    }
    inputType2 = ((PrimitiveObjectInspector) arguments[1]).getPrimitiveCategory();
    if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType2) != PrimitiveGrouping.STRING_GROUP && PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType2) != PrimitiveGrouping.VOID_GROUP) {
        throw new UDFArgumentTypeException(1, "trunk() only takes STRING/CHAR/VARCHAR types as second argument, got " + inputType2);
    }
    inputType2 = PrimitiveCategory.STRING;
    if (arguments[1] instanceof ConstantObjectInspector) {
        Object obj = ((ConstantObjectInspector) arguments[1]).getWritableConstantValue();
        fmtInput = obj != null ? obj.toString() : null;
    } else {
        textConverter2 = ObjectInspectorConverters.getConverter(arguments[1], PrimitiveObjectInspectorFactory.writableStringObjectInspector);
    }
    return outputOI;
}
Also used : PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) TimestampConverter(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector)

Example 3 with UDFArgumentTypeException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException in project hive by apache.

the class GenericUDFTrunc method evaluateNumber.

private Object evaluateNumber(DeferredObject[] arguments) throws HiveException, UDFArgumentTypeException {
    if (arguments[0] == null) {
        return null;
    }
    Object input = arguments[0].get();
    if (input == null) {
        return null;
    }
    if (arguments.length == 2 && arguments[1] != null && arguments[1].get() != null && !inputSacleConst) {
        Object scaleObj = null;
        switch(inputScaleOI.getPrimitiveCategory()) {
            case BYTE:
                scaleObj = byteConverter.convert(arguments[1].get());
                scale = ((ByteWritable) scaleObj).get();
                break;
            case SHORT:
                scaleObj = shortConverter.convert(arguments[1].get());
                scale = ((ShortWritable) scaleObj).get();
                break;
            case INT:
                scaleObj = intConverter.convert(arguments[1].get());
                scale = ((IntWritable) scaleObj).get();
                break;
            case LONG:
                scaleObj = longConverter.convert(arguments[1].get());
                long l = ((LongWritable) scaleObj).get();
                if (l < Integer.MIN_VALUE || l > Integer.MAX_VALUE) {
                    throw new UDFArgumentException(getFuncName().toUpperCase() + " scale argument out of allowed range");
                }
                scale = (int) l;
            default:
                break;
        }
    }
    switch(inputType1) {
        case VOID:
            return null;
        case DECIMAL:
            HiveDecimalWritable decimalWritable = (HiveDecimalWritable) inputOI.getPrimitiveWritableObject(input);
            HiveDecimal dec = trunc(decimalWritable.getHiveDecimal(), scale);
            if (dec == null) {
                return null;
            }
            return new HiveDecimalWritable(dec);
        case BYTE:
            ByteWritable byteWritable = (ByteWritable) inputOI.getPrimitiveWritableObject(input);
            if (scale >= 0) {
                return byteWritable;
            } else {
                return new ByteWritable((byte) trunc(byteWritable.get(), scale));
            }
        case SHORT:
            ShortWritable shortWritable = (ShortWritable) inputOI.getPrimitiveWritableObject(input);
            if (scale >= 0) {
                return shortWritable;
            } else {
                return new ShortWritable((short) trunc(shortWritable.get(), scale));
            }
        case INT:
            IntWritable intWritable = (IntWritable) inputOI.getPrimitiveWritableObject(input);
            if (scale >= 0) {
                return intWritable;
            } else {
                return new IntWritable((int) trunc(intWritable.get(), scale));
            }
        case LONG:
            LongWritable longWritable = (LongWritable) inputOI.getPrimitiveWritableObject(input);
            if (scale >= 0) {
                return longWritable;
            } else {
                return new LongWritable(trunc(longWritable.get(), scale));
            }
        case FLOAT:
            float f = ((FloatWritable) inputOI.getPrimitiveWritableObject(input)).get();
            return new FloatWritable((float) trunc(f, scale));
        case DOUBLE:
            return trunc(((DoubleWritable) inputOI.getPrimitiveWritableObject(input)), scale);
        default:
            throw new UDFArgumentTypeException(0, "Only numeric or string group data types are allowed for TRUNC function. Got " + inputType1.name());
    }
}
Also used : HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) FloatWritable(org.apache.hadoop.io.FloatWritable) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable)

Example 4 with UDFArgumentTypeException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException in project hive by apache.

the class GenericUDTFGetSplits method initialize.

@Override
public StructObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    LOG.debug("initializing GenericUDFGetSplits");
    if (SessionState.get() == null || SessionState.get().getConf() == null) {
        throw new IllegalStateException("Cannot run get splits outside HS2");
    }
    LOG.debug("Initialized conf, jc and metastore connection");
    if (arguments.length != 2) {
        throw new UDFArgumentLengthException("The function GET_SPLITS accepts 2 arguments.");
    } else if (!(arguments[0] instanceof StringObjectInspector)) {
        LOG.error("Got " + arguments[0].getTypeName() + " instead of string.");
        throw new UDFArgumentTypeException(0, "\"" + "string\" is expected at function GET_SPLITS, " + "but \"" + arguments[0].getTypeName() + "\" is found");
    } else if (!(arguments[1] instanceof IntObjectInspector)) {
        LOG.error("Got " + arguments[1].getTypeName() + " instead of int.");
        throw new UDFArgumentTypeException(1, "\"" + "int\" is expected at function GET_SPLITS, " + "but \"" + arguments[1].getTypeName() + "\" is found");
    }
    stringOI = (StringObjectInspector) arguments[0];
    intOI = (IntObjectInspector) arguments[1];
    List<String> names = Arrays.asList("split");
    List<ObjectInspector> fieldOIs = Arrays.<ObjectInspector>asList(PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector);
    StructObjectInspector outputOI = ObjectInspectorFactory.getStandardStructObjectInspector(names, fieldOIs);
    LOG.debug("done initializing GenericUDFGetSplits");
    return outputOI;
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 5 with UDFArgumentTypeException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException in project hive by apache.

the class GenericUDFEncode method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length != 2) {
        throw new UDFArgumentLengthException("Encode() requires exactly two arguments");
    }
    if (arguments[0].getCategory() != Category.PRIMITIVE || PrimitiveGrouping.STRING_GROUP != PrimitiveObjectInspectorUtils.getPrimitiveGrouping(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory())) {
        throw new UDFArgumentTypeException(0, "The first argument to Encode() must be a string/varchar");
    }
    stringOI = (PrimitiveObjectInspector) arguments[0];
    if (arguments[1].getCategory() != Category.PRIMITIVE || PrimitiveGrouping.STRING_GROUP != PrimitiveObjectInspectorUtils.getPrimitiveGrouping(((PrimitiveObjectInspector) arguments[1]).getPrimitiveCategory())) {
        throw new UDFArgumentTypeException(1, "The second argument to Encode() must be a string/varchar");
    }
    charsetOI = (PrimitiveObjectInspector) arguments[1];
    // If the character set for encoding is constant, we can optimize that
    if (charsetOI instanceof ConstantObjectInspector) {
        String charSetName = ((ConstantObjectInspector) arguments[1]).getWritableConstantValue().toString();
        encoder = Charset.forName(charSetName).newEncoder().onMalformedInput(CodingErrorAction.REPORT).onUnmappableCharacter(CodingErrorAction.REPORT);
    }
    result = new BytesWritable();
    return (ObjectInspector) PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
}
Also used : PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) BytesWritable(org.apache.hadoop.io.BytesWritable) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector)

Aggregations

UDFArgumentTypeException (org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException)56 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)29 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)28 UDFArgumentLengthException (org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException)20 ConstantObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector)15 UDFArgumentException (org.apache.hadoop.hive.ql.exec.UDFArgumentException)14 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)12 Category (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category)8 ListObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector)5 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)5 Converter (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter)4 ParseException (java.text.ParseException)3 ArrayList (java.util.ArrayList)3 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)3 MapObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector)3 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)3 TimestampConverter (org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter)3 VoidObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector)3 Timestamp (java.sql.Timestamp)2 Date (java.util.Date)2