Search in sources :

Example 41 with UDFArgumentTypeException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException in project hive by apache.

the class GenericUDFOPDTIMinus method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length != 2) {
        throw new UDFArgumentException(opName + " requires two arguments.");
    }
    PrimitiveObjectInspector resultOI = null;
    for (int i = 0; i < 2; i++) {
        Category category = arguments[i].getCategory();
        if (category != Category.PRIMITIVE) {
            throw new UDFArgumentTypeException(i, "The " + GenericUDFUtils.getOrdinal(i + 1) + " argument of " + opName + "  is expected to a " + Category.PRIMITIVE.toString().toLowerCase() + " type, but " + category.toString().toLowerCase() + " is found");
        }
    }
    inputOIs = new PrimitiveObjectInspector[] { (PrimitiveObjectInspector) arguments[0], (PrimitiveObjectInspector) arguments[1] };
    PrimitiveObjectInspector leftOI = inputOIs[0];
    PrimitiveObjectInspector rightOI = inputOIs[1];
    // Timestamp - Date = IntervalDayTime (operands reversible)
    if (checkArgs(PrimitiveCategory.INTERVAL_YEAR_MONTH, PrimitiveCategory.INTERVAL_YEAR_MONTH)) {
        minusOpType = OperationType.INTERVALYM_MINUS_INTERVALYM;
        intervalArg1Idx = 0;
        intervalArg2Idx = 1;
        resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.intervalYearMonthTypeInfo);
    } else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.INTERVAL_YEAR_MONTH)) {
        minusOpType = OperationType.DATE_MINUS_INTERVALYM;
        dtArg1Idx = 0;
        intervalArg1Idx = 1;
        resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.dateTypeInfo);
    } else if (checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.INTERVAL_YEAR_MONTH)) {
        minusOpType = OperationType.TIMESTAMP_MINUS_INTERVALYM;
        dtArg1Idx = 0;
        intervalArg1Idx = 1;
        resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.timestampTypeInfo);
    } else if (checkArgs(PrimitiveCategory.INTERVAL_DAY_TIME, PrimitiveCategory.INTERVAL_DAY_TIME)) {
        minusOpType = OperationType.INTERVALDT_MINUS_INTERVALDT;
        intervalArg1Idx = 0;
        intervalArg2Idx = 1;
        resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.intervalDayTimeTypeInfo);
    } else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.INTERVAL_DAY_TIME) || checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.INTERVAL_DAY_TIME)) {
        minusOpType = OperationType.TIMESTAMP_MINUS_INTERVALDT;
        dtArg1Idx = 0;
        intervalArg1Idx = 1;
        resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.timestampTypeInfo);
        dt1Converter = ObjectInspectorConverters.getConverter(leftOI, resultOI);
    } else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.DATE) || checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.TIMESTAMP) || checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.TIMESTAMP) || checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.DATE)) {
        // Operands converted to timestamp, result as interval day-time
        minusOpType = OperationType.TIMESTAMP_MINUS_TIMESTAMP;
        dtArg1Idx = 0;
        dtArg2Idx = 1;
        resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.intervalDayTimeTypeInfo);
        dt1Converter = ObjectInspectorConverters.getConverter(leftOI, resultOI);
        dt2Converter = ObjectInspectorConverters.getConverter(leftOI, resultOI);
    } else {
        // Unsupported types - error
        List<TypeInfo> argTypeInfos = new ArrayList<TypeInfo>(2);
        argTypeInfos.add(leftOI.getTypeInfo());
        argTypeInfos.add(rightOI.getTypeInfo());
        throw new NoMatchingMethodException(this.getClass(), argTypeInfos, null);
    }
    return resultOI;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) NoMatchingMethodException(org.apache.hadoop.hive.ql.exec.NoMatchingMethodException) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) Category(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ArrayList(java.util.ArrayList) List(java.util.List) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)

Example 42 with UDFArgumentTypeException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException in project hive by apache.

the class GenericUDFIndex method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length != 2) {
        throw new UDFArgumentLengthException("The function INDEX accepts exactly 2 arguments.");
    }
    if (arguments[0] instanceof MapObjectInspector) {
        // index into a map
        mapOI = (MapObjectInspector) arguments[0];
        listOI = null;
    } else if (arguments[0] instanceof ListObjectInspector) {
        // index into a list
        listOI = (ListObjectInspector) arguments[0];
        mapOI = null;
    } else {
        throw new UDFArgumentTypeException(0, "\"" + Category.MAP.toString().toLowerCase() + "\" or \"" + Category.LIST.toString().toLowerCase() + "\" is expected at function INDEX, but \"" + arguments[0].getTypeName() + "\" is found");
    }
    // index has to be a primitive
    if (!(arguments[1] instanceof PrimitiveObjectInspector)) {
        throw new UDFArgumentTypeException(1, "Primitive Type is expected but " + arguments[1].getTypeName() + "\" is found");
    }
    PrimitiveObjectInspector inputOI = (PrimitiveObjectInspector) arguments[1];
    ObjectInspector returnOI;
    ObjectInspector indexOI;
    if (mapOI != null) {
        indexOI = ObjectInspectorConverters.getConvertedOI(inputOI, mapOI.getMapKeyObjectInspector());
        returnOI = mapOI.getMapValueObjectInspector();
    } else {
        indexOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
        returnOI = listOI.getListElementObjectInspector();
    }
    converter = ObjectInspectorConverters.getConverter(inputOI, indexOI);
    return returnOI;
}
Also used : ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)

Example 43 with UDFArgumentTypeException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException in project hive by apache.

the class GenericUDFInternalInterval method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    // read operation mode
    if (!(arguments[0] instanceof ConstantObjectInspector)) {
        throw new UDFArgumentTypeException(0, getFuncName() + ": may only accept constant as first argument");
    }
    Integer operationMode = getConstantIntValue(arguments, 0);
    if (operationMode == null) {
        throw new UDFArgumentTypeException(0, "must supply operationmode");
    }
    processor = getProcessorMap().get(operationMode);
    if (processor == null) {
        throw new UDFArgumentTypeException(0, getFuncName() + ": unsupported operationMode: " + operationMode);
    }
    // check value argument
    if (arguments[1].getCategory() != Category.PRIMITIVE) {
        throw new UDFArgumentTypeException(1, "The first argument to " + getFuncName() + " must be primitive");
    }
    inputOI = (PrimitiveObjectInspector) arguments[1];
    PrimitiveCategory inputCategory = inputOI.getPrimitiveCategory();
    if (!isValidInputCategory(inputCategory)) {
        throw new UDFArgumentTypeException(1, "The second argument to " + getFuncName() + " must be from the string group or numeric group (except:float/double)");
    }
    if (arguments[1] instanceof ConstantObjectInspector) {
        // return value as constant in case arg is constant
        return PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(processor.getTypeInfo(), processor.evaluate(getConstantStringValue(arguments, 1)));
    } else {
        return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(processor.getTypeInfo());
    }
}
Also used : UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)

Example 44 with UDFArgumentTypeException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException in project hive by apache.

the class GenericUDAFNTile method getEvaluator.

@Override
public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
    if (parameters.length != 1) {
        throw new UDFArgumentTypeException(parameters.length - 1, "Exactly one argument is expected.");
    }
    ObjectInspector oi = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(parameters[0]);
    boolean c = ObjectInspectorUtils.compareTypes(oi, PrimitiveObjectInspectorFactory.writableIntObjectInspector);
    if (!c) {
        throw new UDFArgumentTypeException(0, "Number of tiles must be an int expression");
    }
    return new GenericUDAFNTileEvaluator();
}
Also used : PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException)

Example 45 with UDFArgumentTypeException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException in project hive by apache.

the class GenericUDF method checkArgGroups.

protected void checkArgGroups(ObjectInspector[] arguments, int i, PrimitiveCategory[] inputTypes, PrimitiveGrouping... grps) throws UDFArgumentTypeException {
    PrimitiveCategory inputType = ((PrimitiveObjectInspector) arguments[i]).getPrimitiveCategory();
    for (PrimitiveGrouping grp : grps) {
        if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType) == grp) {
            inputTypes[i] = inputType;
            return;
        }
    }
    // build error message
    StringBuilder sb = new StringBuilder();
    sb.append(getFuncName());
    sb.append(" only takes ");
    sb.append(grps[0]);
    for (int j = 1; j < grps.length; j++) {
        sb.append(", ");
        sb.append(grps[j]);
    }
    sb.append(" types as ");
    sb.append(getArgOrder(i));
    sb.append(" argument, got ");
    sb.append(inputType);
    throw new UDFArgumentTypeException(i, sb.toString());
}
Also used : UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) PrimitiveGrouping(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping)

Aggregations

UDFArgumentTypeException (org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException)57 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)29 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)28 UDFArgumentLengthException (org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException)21 ConstantObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector)16 UDFArgumentException (org.apache.hadoop.hive.ql.exec.UDFArgumentException)15 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)12 Category (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category)8 ListObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector)5 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)5 ParseException (java.text.ParseException)3 ArrayList (java.util.ArrayList)3 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)3 MapObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector)3 Converter (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter)3 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)3 TimestampConverter (org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter)3 VoidObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector)3 Timestamp (java.sql.Timestamp)2 Date (java.util.Date)2