use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class GenericUDFDateDiff method checkArguments.
private Converter checkArguments(ObjectInspector[] arguments, int i) throws UDFArgumentException {
if (arguments[i].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but " + arguments[i].getTypeName() + " is passed. as first arguments");
}
PrimitiveCategory inputType = ((PrimitiveObjectInspector) arguments[i]).getPrimitiveCategory();
Converter converter;
switch(inputType) {
case STRING:
case VARCHAR:
case CHAR:
converter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[i], PrimitiveObjectInspectorFactory.writableStringObjectInspector);
break;
case TIMESTAMP:
converter = new TimestampConverter((PrimitiveObjectInspector) arguments[i], PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
break;
case DATE:
converter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[i], PrimitiveObjectInspectorFactory.writableDateObjectInspector);
break;
default:
throw new UDFArgumentException(" DATEDIFF() only takes STRING/TIMESTAMP/DATEWRITABLE types as " + (i + 1) + "-th argument, got " + inputType);
}
return converter;
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class GenericUDF method getDateValue.
protected Date getDateValue(DeferredObject[] arguments, int i, PrimitiveCategory[] inputTypes, Converter[] converters) throws HiveException {
Object obj;
if ((obj = arguments[i].get()) == null) {
return null;
}
Date date;
switch(inputTypes[i]) {
case STRING:
case VARCHAR:
case CHAR:
String dateStr = converters[i].convert(obj).toString();
try {
date = DateUtils.getDateFormat().parse(dateStr);
} catch (ParseException e) {
throw new UDFArgumentException("Unparsable date: " + dateStr);
}
break;
case TIMESTAMP:
case DATE:
case TIMESTAMPLOCALTZ:
Object writableValue = converters[i].convert(obj);
date = ((DateWritable) writableValue).get();
break;
default:
throw new UDFArgumentTypeException(0, getFuncName() + " only takes STRING_GROUP and DATE_GROUP types, got " + inputTypes[i]);
}
return date;
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class ExprNodeConverter method visitCall.
@Override
public ExprNodeDesc visitCall(RexCall call) {
ExprNodeDesc gfDesc = null;
if (!deep) {
return null;
}
List<ExprNodeDesc> args = new LinkedList<ExprNodeDesc>();
if (call.getKind() == SqlKind.EXTRACT) {
// Extract on date: special handling since function in Hive does
// include <time_unit>. Observe that <time_unit> information
// is implicit in the function name, thus translation will
// proceed correctly if we just ignore the <time_unit>
args.add(call.operands.get(1).accept(this));
} else if (call.getKind() == SqlKind.FLOOR && call.operands.size() == 2) {
// Floor on date: special handling since function in Hive does
// include <time_unit>. Observe that <time_unit> information
// is implicit in the function name, thus translation will
// proceed correctly if we just ignore the <time_unit>
args.add(call.operands.get(0).accept(this));
} else if (call.getKind() == SqlKind.IS_DISTINCT_FROM) {
call = (RexCall) RexUtil.not(rexBuilder.makeCall(SqlStdOperatorTable.IS_NOT_DISTINCT_FROM, call.operands));
for (RexNode operand : call.operands) {
args.add(operand.accept(this));
}
} else {
for (RexNode operand : call.operands) {
args.add(operand.accept(this));
}
}
// If Call is a redundant cast then bail out. Ex: cast(true)BOOLEAN
if (call.isA(SqlKind.CAST) && (call.operands.size() == 1) && SqlTypeUtil.equalSansNullability(dTFactory, call.getType(), call.operands.get(0).getType())) {
return args.get(0);
} else {
GenericUDF hiveUdf = SqlFunctionConverter.getHiveUDF(call.getOperator(), call.getType(), args.size());
if (hiveUdf == null) {
throw new RuntimeException("Cannot find UDF for " + call.getType() + " " + call.getOperator() + "[" + call.getOperator().getKind() + "]/" + args.size());
}
try {
gfDesc = ExprNodeGenericFuncDesc.newInstance(hiveUdf, args);
} catch (UDFArgumentException e) {
LOG.error("Failed to instantiate udf: ", e);
throw new RuntimeException(e);
}
}
// Try to fold if it is a constant expression
if (foldExpr && RexUtil.isConstant(call)) {
ExprNodeDesc constantExpr = ConstantPropagateProcFactory.foldExpr((ExprNodeGenericFuncDesc) gfDesc);
if (constantExpr != null) {
gfDesc = constantExpr;
}
}
return gfDesc;
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class GenericUDFArrayContains method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
// Check if two arguments were passed
if (arguments.length != ARG_COUNT) {
throw new UDFArgumentException("The function " + FUNC_NAME + " accepts " + ARG_COUNT + " arguments.");
}
// Check if ARRAY_IDX argument is of category LIST
if (!arguments[ARRAY_IDX].getCategory().equals(Category.LIST)) {
throw new UDFArgumentTypeException(ARRAY_IDX, "\"" + org.apache.hadoop.hive.serde.serdeConstants.LIST_TYPE_NAME + "\" " + "expected at function ARRAY_CONTAINS, but " + "\"" + arguments[ARRAY_IDX].getTypeName() + "\" " + "is found");
}
arrayOI = (ListObjectInspector) arguments[ARRAY_IDX];
arrayElementOI = arrayOI.getListElementObjectInspector();
valueOI = arguments[VALUE_IDX];
// Check if list element and value are of same type
if (!ObjectInspectorUtils.compareTypes(arrayElementOI, valueOI)) {
throw new UDFArgumentTypeException(VALUE_IDX, "\"" + arrayElementOI.getTypeName() + "\"" + " expected at function ARRAY_CONTAINS, but " + "\"" + valueOI.getTypeName() + "\"" + " is found");
}
// Check if the comparison is supported for this type
if (!ObjectInspectorUtils.compareSupported(valueOI)) {
throw new UDFArgumentException("The function " + FUNC_NAME + " does not support comparison for " + "\"" + valueOI.getTypeName() + "\"" + " types");
}
result = new BooleanWritable(false);
return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class GenericUDF method initializeAndFoldConstants.
/**
* Initialize this GenericUDF. Additionally, if the arguments are constant
* and the function is eligible to be folded, then the constant value
* returned by this UDF will be computed and stored in the
* ConstantObjectInspector returned. Otherwise, the function behaves exactly
* like initialize().
*/
public ObjectInspector initializeAndFoldConstants(ObjectInspector[] arguments) throws UDFArgumentException {
ObjectInspector oi = initialize(arguments);
// resources may not be available at compile time.
if (getRequiredFiles() != null || getRequiredJars() != null) {
return oi;
}
boolean allConstant = true;
for (int ii = 0; ii < arguments.length; ++ii) {
if (!ObjectInspectorUtils.isConstantObjectInspector(arguments[ii])) {
allConstant = false;
break;
}
}
if (allConstant && !ObjectInspectorUtils.isConstantObjectInspector(oi) && FunctionRegistry.isConsistentWithinQuery(this) && ObjectInspectorUtils.supportsConstantObjectInspector(oi)) {
DeferredObject[] argumentValues = new DeferredJavaObject[arguments.length];
for (int ii = 0; ii < arguments.length; ++ii) {
argumentValues[ii] = new DeferredJavaObject(((ConstantObjectInspector) arguments[ii]).getWritableConstantValue());
}
try {
Object constantValue = evaluate(argumentValues);
oi = ObjectInspectorUtils.getConstantObjectInspector(oi, constantValue);
} catch (HiveException e) {
throw new UDFArgumentException(e);
}
}
return oi;
}
Aggregations