Search in sources :

Example 6 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class Udf method initialize.

/**
 * Initialize UDF
 */
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length == 0) {
        throw new UDFArgumentLengthException("At least one argument must be specified");
    }
    if (!(arguments[0] instanceof StringObjectInspector)) {
        throw new UDFArgumentException("First argument must be a string");
    }
    SessionState sessionState = SessionState.get();
    if (sessionState != null) {
        // we are still in HiveServer, get the source of the HplSQL function and store it.
        functionDefinition = loadSource(sessionState, functionName(arguments[0]));
    }
    queryOI = (StringObjectInspector) arguments[0];
    argumentsOI = arguments;
    return PrimitiveObjectInspectorFactory.javaStringObjectInspector;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) SessionState(org.apache.hadoop.hive.ql.session.SessionState) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector)

Example 7 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class ProjectionPusher method pushProjectionsAndFilters.

private void pushProjectionsAndFilters(final JobConf jobConf, final String splitPath, final String splitPathWithNoSchema) {
    if (mapWork == null) {
        return;
    } else if (mapWork.getPathToAliases() == null) {
        return;
    }
    final Set<String> aliases = new HashSet<String>();
    try {
        List<String> a = HiveFileFormatUtils.getFromPathRecursively(mapWork.getPathToAliases(), new Path(splitPath), null, false, true);
        if (a != null) {
            aliases.addAll(a);
        }
        if (a == null || a.isEmpty()) {
            // TODO: not having aliases for path usually means some bug. Should it give up?
            LOG.warn("Couldn't find aliases for " + splitPath);
        }
    } catch (IllegalArgumentException | IOException e) {
        throw new RuntimeException(e);
    }
    // Collect the needed columns from all the aliases and create ORed filter
    // expression for the table.
    boolean allColumnsNeeded = false;
    boolean noFilters = false;
    Set<Integer> neededColumnIDs = new HashSet<Integer>();
    // To support nested column pruning, we need to track the path from the top to the nested
    // fields
    Set<String> neededNestedColumnPaths = new HashSet<String>();
    List<ExprNodeGenericFuncDesc> filterExprs = new ArrayList<ExprNodeGenericFuncDesc>();
    RowSchema rowSchema = null;
    for (String alias : aliases) {
        final Operator<? extends Serializable> op = mapWork.getAliasToWork().get(alias);
        if (op != null && op instanceof TableScanOperator) {
            final TableScanOperator ts = (TableScanOperator) op;
            if (ts.getNeededColumnIDs() == null) {
                allColumnsNeeded = true;
            } else {
                neededColumnIDs.addAll(ts.getNeededColumnIDs());
                if (ts.getNeededNestedColumnPaths() != null) {
                    neededNestedColumnPaths.addAll(ts.getNeededNestedColumnPaths());
                }
            }
            rowSchema = ts.getSchema();
            ExprNodeGenericFuncDesc filterExpr = ts.getConf() == null ? null : ts.getConf().getFilterExpr();
            // No filter if any TS has no filter expression
            noFilters = filterExpr == null;
            filterExprs.add(filterExpr);
        }
    }
    ExprNodeGenericFuncDesc tableFilterExpr = null;
    if (!noFilters) {
        try {
            for (ExprNodeGenericFuncDesc filterExpr : filterExprs) {
                if (tableFilterExpr == null) {
                    tableFilterExpr = filterExpr;
                } else {
                    tableFilterExpr = ExprNodeGenericFuncDesc.newInstance(new GenericUDFOPOr(), Arrays.<ExprNodeDesc>asList(tableFilterExpr, filterExpr));
                }
            }
        } catch (UDFArgumentException ex) {
            LOG.debug("Turn off filtering due to " + ex);
            tableFilterExpr = null;
        }
    }
    // push down projections
    if (!allColumnsNeeded) {
        if (!neededColumnIDs.isEmpty()) {
            ColumnProjectionUtils.appendReadColumns(jobConf, new ArrayList<Integer>(neededColumnIDs));
            ColumnProjectionUtils.appendNestedColumnPaths(jobConf, new ArrayList<String>(neededNestedColumnPaths));
        }
    } else {
        ColumnProjectionUtils.setReadAllColumns(jobConf);
    }
    pushFilters(jobConf, rowSchema, tableFilterExpr);
}
Also used : Path(org.apache.hadoop.fs.Path) RowSchema(org.apache.hadoop.hive.ql.exec.RowSchema) TableScanOperator(org.apache.hadoop.hive.ql.exec.TableScanOperator) ArrayList(java.util.ArrayList) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) IOException(java.io.IOException) UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) GenericUDFOPOr(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr) HashSet(java.util.HashSet)

Example 8 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class TimestampCastRestrictorResolver method getEvalMethod.

@Override
public Method getEvalMethod(List<TypeInfo> argClasses) throws UDFArgumentException {
    if (strictTsConversion) {
        TypeInfo arg = argClasses.get(0);
        if (arg instanceof PrimitiveTypeInfo) {
            PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) arg;
            PrimitiveCategory category = primitiveTypeInfo.getPrimitiveCategory();
            PrimitiveGrouping group = PrimitiveObjectInspectorUtils.getPrimitiveGrouping(category);
            if (group == PrimitiveGrouping.DATE_GROUP) {
                throw new UDFArgumentException("Casting DATE/TIMESTAMP types to NUMERIC is prohibited (" + ConfVars.HIVE_STRICT_TIMESTAMP_CONVERSION + ")");
            }
        }
    }
    return parentResolver.getEvalMethod(argClasses);
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) PrimitiveGrouping(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping)

Example 9 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class GenericUDFOPDTIPlus method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length != 2) {
        throw new UDFArgumentException(opName + " requires two arguments.");
    }
    PrimitiveObjectInspector resultOI = null;
    for (int i = 0; i < 2; i++) {
        Category category = arguments[i].getCategory();
        if (category != Category.PRIMITIVE) {
            throw new UDFArgumentTypeException(i, "The " + GenericUDFUtils.getOrdinal(i + 1) + " argument of " + opName + "  is expected to a " + Category.PRIMITIVE.toString().toLowerCase() + " type, but " + category.toString().toLowerCase() + " is found");
        }
    }
    inputOIs = new PrimitiveObjectInspector[] { (PrimitiveObjectInspector) arguments[0], (PrimitiveObjectInspector) arguments[1] };
    PrimitiveObjectInspector leftOI = inputOIs[0];
    PrimitiveObjectInspector rightOI = inputOIs[1];
    // Date + Int = Date
    if (checkArgs(PrimitiveCategory.INTERVAL_YEAR_MONTH, PrimitiveCategory.INTERVAL_YEAR_MONTH)) {
        plusOpType = OperationType.INTERVALYM_PLUS_INTERVALYM;
        intervalArg1Idx = 0;
        intervalArg2Idx = 1;
        resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.intervalYearMonthTypeInfo);
    } else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.INTERVAL_YEAR_MONTH)) {
        plusOpType = OperationType.INTERVALYM_PLUS_DATE;
        dtArgIdx = 0;
        intervalArg1Idx = 1;
        resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.dateTypeInfo);
    } else if (checkArgs(PrimitiveCategory.INTERVAL_YEAR_MONTH, PrimitiveCategory.DATE)) {
        plusOpType = OperationType.INTERVALYM_PLUS_DATE;
        intervalArg1Idx = 0;
        dtArgIdx = 1;
        resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.dateTypeInfo);
    } else if (checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.INTERVAL_YEAR_MONTH)) {
        plusOpType = OperationType.INTERVALYM_PLUS_TIMESTAMP;
        dtArgIdx = 0;
        intervalArg1Idx = 1;
        resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.timestampTypeInfo);
    } else if (checkArgs(PrimitiveCategory.INTERVAL_YEAR_MONTH, PrimitiveCategory.TIMESTAMP)) {
        plusOpType = OperationType.INTERVALYM_PLUS_TIMESTAMP;
        intervalArg1Idx = 0;
        dtArgIdx = 1;
        resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.timestampTypeInfo);
    } else if (checkArgs(PrimitiveCategory.INTERVAL_DAY_TIME, PrimitiveCategory.INTERVAL_DAY_TIME)) {
        plusOpType = OperationType.INTERVALDT_PLUS_INTERVALDT;
        intervalArg1Idx = 0;
        intervalArg2Idx = 1;
        resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.intervalDayTimeTypeInfo);
    } else if (checkArgs(PrimitiveCategory.INTERVAL_DAY_TIME, PrimitiveCategory.DATE) || checkArgs(PrimitiveCategory.INTERVAL_DAY_TIME, PrimitiveCategory.TIMESTAMP)) {
        plusOpType = OperationType.INTERVALDT_PLUS_TIMESTAMP;
        intervalArg1Idx = 0;
        dtArgIdx = 1;
        resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.timestampTypeInfo);
        dtConverter = ObjectInspectorConverters.getConverter(leftOI, resultOI);
    } else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.INTERVAL_DAY_TIME) || checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.INTERVAL_DAY_TIME)) {
        plusOpType = OperationType.INTERVALDT_PLUS_TIMESTAMP;
        intervalArg1Idx = 1;
        dtArgIdx = 0;
        resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.timestampTypeInfo);
        dtConverter = ObjectInspectorConverters.getConverter(leftOI, resultOI);
    } else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.INT)) {
        plusOpType = OperationType.DATE_PLUS_INT;
        intervalArg1Idx = 1;
        dtArgIdx = 0;
        resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.dateTypeInfo);
        dtConverter = ObjectInspectorConverters.getConverter(leftOI, resultOI);
    } else {
        // Unsupported types - error
        List<TypeInfo> argTypeInfos = new ArrayList<TypeInfo>(2);
        argTypeInfos.add(leftOI.getTypeInfo());
        argTypeInfos.add(rightOI.getTypeInfo());
        throw new NoMatchingMethodException(this.getClass(), argTypeInfos, null);
    }
    return resultOI;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) NoMatchingMethodException(org.apache.hadoop.hive.ql.exec.NoMatchingMethodException) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) Category(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) ArrayList(java.util.ArrayList) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)

Example 10 with UDFArgumentException

use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.

the class GenericUDFLower method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length != 1) {
        throw new UDFArgumentLengthException("LOWER requires 1 argument, got " + arguments.length);
    }
    if (arguments[0].getCategory() != Category.PRIMITIVE) {
        throw new UDFArgumentException("LOWER only takes primitive types, got " + arguments[0].getTypeName());
    }
    argumentOI = (PrimitiveObjectInspector) arguments[0];
    stringConverter = new PrimitiveObjectInspectorConverter.StringConverter(argumentOI);
    PrimitiveCategory inputType = argumentOI.getPrimitiveCategory();
    ObjectInspector outputOI = null;
    BaseCharTypeInfo typeInfo;
    switch(inputType) {
        case CHAR:
            // return type should have same length as the input.
            returnType = inputType;
            typeInfo = TypeInfoFactory.getCharTypeInfo(GenericUDFUtils.StringHelper.getFixedStringSizeForType(argumentOI));
            outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(typeInfo);
            break;
        case VARCHAR:
            // return type should have same length as the input.
            returnType = inputType;
            typeInfo = TypeInfoFactory.getVarcharTypeInfo(GenericUDFUtils.StringHelper.getFixedStringSizeForType(argumentOI));
            outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(typeInfo);
            break;
        default:
            returnType = PrimitiveCategory.STRING;
            outputOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
            break;
    }
    returnHelper = new GenericUDFUtils.StringHelper(returnType);
    return outputOI;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) BaseCharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.BaseCharTypeInfo) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) PrimitiveObjectInspectorConverter(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter) StringConverter(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.StringConverter) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)

Aggregations

UDFArgumentException (org.apache.hadoop.hive.ql.exec.UDFArgumentException)72 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)31 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)27 UDFArgumentLengthException (org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException)24 UDFArgumentTypeException (org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException)18 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)11 ArrayList (java.util.ArrayList)9 Category (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category)7 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)7 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)6 ConstantObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector)6 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)6 Test (org.junit.Test)6 PrimitiveObjectInspectorConverter (org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter)5 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)5 StringObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector)4 HiveConf (org.apache.hadoop.hive.conf.HiveConf)3 DateWritableV2 (org.apache.hadoop.hive.serde2.io.DateWritableV2)3 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)3 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)3