Search in sources :

Example 1 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class GenericUDFDateDiff method convertToDate.

private Date convertToDate(PrimitiveCategory inputType, Converter converter, DeferredObject argument) throws HiveException {
    assert (converter != null);
    assert (argument != null);
    if (argument.get() == null) {
        return null;
    }
    Date date = new Date(0);
    switch(inputType) {
        case STRING:
        case VARCHAR:
        case CHAR:
            String dateString = converter.convert(argument.get()).toString();
            try {
                date.setTime(formatter.parse(dateString).getTime());
            } catch (ParseException e) {
                return null;
            }
            break;
        case TIMESTAMP:
            Timestamp ts = ((TimestampWritable) converter.convert(argument.get())).getTimestamp();
            date.setTime(ts.getTime());
            break;
        case DATE:
            DateWritable dw = (DateWritable) converter.convert(argument.get());
            date = dw.get();
            break;
        default:
            throw new UDFArgumentException("TO_DATE() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType);
    }
    return date;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) ParseException(java.text.ParseException) Timestamp(java.sql.Timestamp) Date(java.sql.Date)

Example 2 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class GenericUDFToDate method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length < 1) {
        throw new UDFArgumentLengthException("The function CAST as DATE requires at least one argument, got " + arguments.length);
    }
    try {
        argumentOI = (PrimitiveObjectInspector) arguments[0];
        PrimitiveCategory pc = argumentOI.getPrimitiveCategory();
        PrimitiveGrouping pg = PrimitiveObjectInspectorUtils.getPrimitiveGrouping(pc);
        switch(pg) {
            case DATE_GROUP:
            case STRING_GROUP:
            case VOID_GROUP:
                break;
            default:
                throw new UDFArgumentException("CAST as DATE only allows date,string, or timestamp types");
        }
    } catch (ClassCastException e) {
        throw new UDFArgumentException("The function CAST as DATE takes only primitive types");
    }
    dc = new DateConverter(argumentOI, PrimitiveObjectInspectorFactory.writableDateObjectInspector);
    return PrimitiveObjectInspectorFactory.writableDateObjectInspector;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) DateConverter(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.DateConverter) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) PrimitiveGrouping(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping)

Example 3 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class GenericUDFSQCountCheck method evaluate.

@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
    Long val = getLongValue(arguments, 0, converters);
    switch(arguments.length) {
        case // Scalar queries, should expect value/count less than 1
        1:
            if (val > 1) {
                throw new UDFArgumentException(" Scalar subquery expression returns more than one row.");
            }
            break;
        case 2:
            Object valObject = arguments[0].get();
            if (valObject != null && getLongValue(arguments, 0, converters) == 0) {
                throw new UDFArgumentException(" IN/NOT IN subquery with aggregate returning zero result. Currently this is not supported.");
            } else if (valObject == null) {
                throw new UDFArgumentException(" IN/NOT IN subquery with aggregate returning zero result. Currently this is not supported.");
            }
            break;
    }
    resultLong.set(val);
    return resultLong;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) FuncAbsLongToLong(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncAbsLongToLong)

Example 4 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project flink by apache.

the class GenericUDFLegacyGroupingID method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    // we accept two arguments: the new GROUPING__ID and the number of GBY expressions
    if (arguments.length != 2) {
        throw new UDFArgumentLengthException("Expect 2 arguments but actually got " + arguments.length);
    }
    if (arguments[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
        throw new UDFArgumentTypeException(0, "First argument should be primitive type");
    }
    if (arguments[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
        throw new UDFArgumentTypeException(1, "Second argument should be primitive type");
    }
    groupingIdOI = (PrimitiveObjectInspector) arguments[0];
    if (groupingIdOI.getPrimitiveCategory() != PrimitiveObjectInspector.PrimitiveCategory.LONG) {
        throw new UDFArgumentTypeException(0, "First argument should be a long");
    }
    PrimitiveObjectInspector numExprOI = (PrimitiveObjectInspector) arguments[1];
    if (numExprOI.getPrimitiveCategory() != PrimitiveObjectInspector.PrimitiveCategory.INT) {
        throw new UDFArgumentTypeException(1, "Second argument should be an int");
    }
    if (!(numExprOI instanceof ConstantObjectInspector)) {
        throw new UDFArgumentTypeException(1, "Second argument should be a constant");
    }
    numExprs = PrimitiveObjectInspectorUtils.getInt(((ConstantObjectInspector) numExprOI).getWritableConstantValue(), numExprOI);
    if (numExprs < 1 || numExprs > 64) {
        throw new UDFArgumentException("Number of GROUP BY expressions out of range: " + numExprs);
    }
    return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector)

Example 5 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class GenericUDFAdd10 method evaluate.

@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
    Object valObject = arguments[0].get();
    if (valObject == null) {
        return null;
    }
    switch(inputType) {
        case SHORT:
        case BYTE:
        case INT:
            valObject = inputConverter.convert(valObject);
            resultInt.set(10 + ((IntWritable) valObject).get());
            return resultInt;
        case LONG:
            valObject = inputConverter.convert(valObject);
            resultLong.set(10 + ((LongWritable) valObject).get());
            return resultLong;
        case FLOAT:
        case STRING:
        case DOUBLE:
            valObject = inputConverter.convert(valObject);
            resultDouble.set(10.0 + ((DoubleWritable) valObject).get());
            return resultDouble;
        case DECIMAL:
            HiveDecimalObjectInspector decimalOI = (HiveDecimalObjectInspector) argumentOI;
            HiveDecimalWritable val = decimalOI.getPrimitiveWritableObject(valObject);
            if (val != null) {
                resultDecimal.set(val.getHiveDecimal().add(HiveDecimal.create("10")));
                val = resultDecimal;
            }
            return val;
        default:
            throw new UDFArgumentException("ADD10 only takes SHORT/BYTE/INT/LONG/DOUBLE/FLOAT/STRING/DECIMAL types, got " + inputType);
    }
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) LongWritable(org.apache.hadoop.io.LongWritable) IntWritable(org.apache.hadoop.io.IntWritable)

Aggregations

UDFArgumentException (org.apache.hadoop.hive.ql.exec.UDFArgumentException)72 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)31 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)27 UDFArgumentLengthException (org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException)24 UDFArgumentTypeException (org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException)18 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)11 ArrayList (java.util.ArrayList)9 Category (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category)7 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)7 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)6 ConstantObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector)6 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)6 Test (org.junit.Test)6 PrimitiveObjectInspectorConverter (org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter)5 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)5 StringObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector)4 HiveConf (org.apache.hadoop.hive.conf.HiveConf)3 DateWritableV2 (org.apache.hadoop.hive.serde2.io.DateWritableV2)3 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)3 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)3