Search in sources :

Example 16 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class ExprNodeGenericFuncDesc method newInstance.

/**
 * Create a ExprNodeGenericFuncDesc based on the genericUDFClass and the
 * children parameters. If the function has an explicit name, the
 * newInstance method should be passed the function name in the funcText
 * argument.
 *
 * @throws UDFArgumentException
 */
public static ExprNodeGenericFuncDesc newInstance(GenericUDF genericUDF, String funcText, List<ExprNodeDesc> children) throws UDFArgumentException {
    ObjectInspector[] childrenOIs = new ObjectInspector[children.size()];
    for (int i = 0; i < childrenOIs.length; i++) {
        childrenOIs[i] = children.get(i).getWritableObjectInspector();
    }
    ObjectInspector oi = genericUDF.initializeAndFoldConstants(childrenOIs);
    String[] requiredJars = genericUDF.getRequiredJars();
    String[] requiredFiles = genericUDF.getRequiredFiles();
    SessionState ss = SessionState.get();
    if (requiredJars != null) {
        SessionState.ResourceType t = SessionState.find_resource_type("JAR");
        try {
            ss.add_resources(t, Arrays.asList(requiredJars));
        } catch (Exception e) {
            throw new UDFArgumentException(e);
        }
    }
    if (requiredFiles != null) {
        SessionState.ResourceType t = SessionState.find_resource_type("FILE");
        try {
            ss.add_resources(t, Arrays.asList(requiredFiles));
        } catch (Exception e) {
            throw new UDFArgumentException(e);
        }
    }
    return new ExprNodeGenericFuncDesc(oi, genericUDF, funcText, children);
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) SessionState(org.apache.hadoop.hive.ql.session.SessionState) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException)

Example 17 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class GenericUDFFromUtcTimestamp method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length != 2) {
        throw new UDFArgumentLengthException("The function " + getName() + " requires two " + "argument, got " + arguments.length);
    }
    try {
        argumentOIs = new PrimitiveObjectInspector[2];
        argumentOIs[0] = (PrimitiveObjectInspector) arguments[0];
        argumentOIs[1] = (PrimitiveObjectInspector) arguments[1];
    } catch (ClassCastException e) {
        throw new UDFArgumentException("The function " + getName() + " takes only primitive types");
    }
    timestampConverter = new TimestampConverter(argumentOIs[0], PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
    textConverter = new TextConverter(argumentOIs[1]);
    return PrimitiveObjectInspectorFactory.javaTimestampObjectInspector;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) TimestampConverter(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter) TextConverter(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TextConverter)

Example 18 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class GenericUDFJsonRead method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    checkArgsSize(arguments, 2, 2);
    checkArgPrimitive(arguments, 0);
    checkArgPrimitive(arguments, 1);
    if (!ObjectInspectorUtils.isConstantObjectInspector(arguments[1])) {
        throw new UDFArgumentTypeException(1, getFuncName() + " argument 2 may only be a constant");
    }
    inputConverter = new TextConverter((PrimitiveObjectInspector) arguments[0]);
    String typeStr = getConstantStringValue(arguments, 1);
    try {
        final TypeInfo t = TypeInfoUtils.getTypeInfoFromTypeString(typeStr);
        final ObjectInspector oi = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(t);
        jsonReader = new HiveJsonReader(oi);
        jsonReader.enable(Feature.PRIMITIVE_TO_WRITABLE);
    } catch (Exception e) {
        throw new UDFArgumentException(getFuncName() + ": Error parsing typestring: " + e.getMessage());
    }
    return jsonReader.getObjectInspector();
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) HiveJsonReader(org.apache.hadoop.hive.serde2.json.HiveJsonReader) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) TextConverter(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TextConverter) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Example 19 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class GenericUDFDateAdd method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length != 2) {
        throw new UDFArgumentLengthException("date_add() requires 2 argument, got " + arguments.length);
    }
    if (arguments[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
        throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but " + arguments[0].getTypeName() + " is passed. as first arguments");
    }
    if (arguments[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
        throw new UDFArgumentTypeException(1, "Only primitive type arguments are accepted but " + arguments[1].getTypeName() + " is passed. as second arguments");
    }
    inputType1 = ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory();
    ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
    switch(inputType1) {
        case STRING:
        case VARCHAR:
        case CHAR:
            inputType1 = PrimitiveCategory.STRING;
            dateConverter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[0], PrimitiveObjectInspectorFactory.writableStringObjectInspector);
            break;
        case TIMESTAMP:
            dateConverter = new TimestampConverter((PrimitiveObjectInspector) arguments[0], PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
            break;
        case DATE:
            dateConverter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[0], PrimitiveObjectInspectorFactory.writableDateObjectInspector);
            break;
        default:
            throw new UDFArgumentException(" DATE_ADD() only takes STRING/TIMESTAMP/DATEWRITABLE types as first argument, got " + inputType1);
    }
    inputType2 = ((PrimitiveObjectInspector) arguments[1]).getPrimitiveCategory();
    switch(inputType2) {
        case BYTE:
            daysConverter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[1], PrimitiveObjectInspectorFactory.writableByteObjectInspector);
            break;
        case SHORT:
            daysConverter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[1], PrimitiveObjectInspectorFactory.writableShortObjectInspector);
            break;
        case INT:
            daysConverter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[1], PrimitiveObjectInspectorFactory.writableIntObjectInspector);
            break;
        default:
            throw new UDFArgumentException(" DATE_ADD() only takes TINYINT/SMALLINT/INT types as second argument, got " + inputType2);
    }
    return outputOI;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) TimestampConverter(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)

Example 20 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class GenericUDFDateAdd method evaluate.

@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
    if (arguments[0].get() == null) {
        return null;
    }
    Object daysWritableObject = daysConverter.convert(arguments[1].get());
    if (daysWritableObject == null) {
        return null;
    }
    int toBeAdded;
    if (daysWritableObject instanceof ByteWritable) {
        toBeAdded = ((ByteWritable) daysWritableObject).get();
    } else if (daysWritableObject instanceof ShortWritable) {
        toBeAdded = ((ShortWritable) daysWritableObject).get();
    } else if (daysWritableObject instanceof IntWritable) {
        toBeAdded = ((IntWritable) daysWritableObject).get();
    } else {
        return null;
    }
    // Convert the first param into a DateWritableV2 value
    switch(inputType1) {
        case STRING:
            String dateString = dateConverter.convert(arguments[0].get()).toString();
            if (DateParser.parseDate(dateString, dateVal)) {
                output.set(dateVal);
            } else {
                return null;
            }
            break;
        case TIMESTAMP:
            Timestamp ts = ((TimestampWritableV2) dateConverter.convert(arguments[0].get())).getTimestamp();
            output.set(DateWritableV2.millisToDays(ts.toEpochMilli()));
            break;
        case DATE:
            DateWritableV2 dw = (DateWritableV2) dateConverter.convert(arguments[0].get());
            output.set(dw.getDays());
            break;
        default:
            throw new UDFArgumentException("DATE_ADD() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType1);
    }
    int newDays = output.getDays() + (signModifier * toBeAdded);
    output.set(newDays);
    return output;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) IntWritable(org.apache.hadoop.io.IntWritable)

Aggregations

UDFArgumentException (org.apache.hadoop.hive.ql.exec.UDFArgumentException)72 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)31 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)27 UDFArgumentLengthException (org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException)24 UDFArgumentTypeException (org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException)18 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)11 ArrayList (java.util.ArrayList)9 Category (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category)7 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)7 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)6 ConstantObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector)6 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)6 Test (org.junit.Test)6 PrimitiveObjectInspectorConverter (org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter)5 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)5 StringObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector)4 HiveConf (org.apache.hadoop.hive.conf.HiveConf)3 DateWritableV2 (org.apache.hadoop.hive.serde2.io.DateWritableV2)3 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)3 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)3