use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class ExprNodeGenericFuncDesc method newInstance.
/**
* Create a ExprNodeGenericFuncDesc based on the genericUDFClass and the
* children parameters. If the function has an explicit name, the
* newInstance method should be passed the function name in the funcText
* argument.
*
* @throws UDFArgumentException
*/
public static ExprNodeGenericFuncDesc newInstance(GenericUDF genericUDF, String funcText, List<ExprNodeDesc> children) throws UDFArgumentException {
ObjectInspector[] childrenOIs = new ObjectInspector[children.size()];
for (int i = 0; i < childrenOIs.length; i++) {
childrenOIs[i] = children.get(i).getWritableObjectInspector();
}
ObjectInspector oi = genericUDF.initializeAndFoldConstants(childrenOIs);
String[] requiredJars = genericUDF.getRequiredJars();
String[] requiredFiles = genericUDF.getRequiredFiles();
SessionState ss = SessionState.get();
if (requiredJars != null) {
SessionState.ResourceType t = SessionState.find_resource_type("JAR");
try {
ss.add_resources(t, Arrays.asList(requiredJars));
} catch (Exception e) {
throw new UDFArgumentException(e);
}
}
if (requiredFiles != null) {
SessionState.ResourceType t = SessionState.find_resource_type("FILE");
try {
ss.add_resources(t, Arrays.asList(requiredFiles));
} catch (Exception e) {
throw new UDFArgumentException(e);
}
}
return new ExprNodeGenericFuncDesc(oi, genericUDF, funcText, children);
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class GenericUDFFromUtcTimestamp method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 2) {
throw new UDFArgumentLengthException("The function " + getName() + " requires two " + "argument, got " + arguments.length);
}
try {
argumentOIs = new PrimitiveObjectInspector[2];
argumentOIs[0] = (PrimitiveObjectInspector) arguments[0];
argumentOIs[1] = (PrimitiveObjectInspector) arguments[1];
} catch (ClassCastException e) {
throw new UDFArgumentException("The function " + getName() + " takes only primitive types");
}
timestampConverter = new TimestampConverter(argumentOIs[0], PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
textConverter = new TextConverter(argumentOIs[1]);
return PrimitiveObjectInspectorFactory.javaTimestampObjectInspector;
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class GenericUDFJsonRead method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
checkArgsSize(arguments, 2, 2);
checkArgPrimitive(arguments, 0);
checkArgPrimitive(arguments, 1);
if (!ObjectInspectorUtils.isConstantObjectInspector(arguments[1])) {
throw new UDFArgumentTypeException(1, getFuncName() + " argument 2 may only be a constant");
}
inputConverter = new TextConverter((PrimitiveObjectInspector) arguments[0]);
String typeStr = getConstantStringValue(arguments, 1);
try {
final TypeInfo t = TypeInfoUtils.getTypeInfoFromTypeString(typeStr);
final ObjectInspector oi = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(t);
jsonReader = new HiveJsonReader(oi);
jsonReader.enable(Feature.PRIMITIVE_TO_WRITABLE);
} catch (Exception e) {
throw new UDFArgumentException(getFuncName() + ": Error parsing typestring: " + e.getMessage());
}
return jsonReader.getObjectInspector();
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class GenericUDFDateAdd method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 2) {
throw new UDFArgumentLengthException("date_add() requires 2 argument, got " + arguments.length);
}
if (arguments[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but " + arguments[0].getTypeName() + " is passed. as first arguments");
}
if (arguments[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(1, "Only primitive type arguments are accepted but " + arguments[1].getTypeName() + " is passed. as second arguments");
}
inputType1 = ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory();
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
switch(inputType1) {
case STRING:
case VARCHAR:
case CHAR:
inputType1 = PrimitiveCategory.STRING;
dateConverter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[0], PrimitiveObjectInspectorFactory.writableStringObjectInspector);
break;
case TIMESTAMP:
dateConverter = new TimestampConverter((PrimitiveObjectInspector) arguments[0], PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
break;
case DATE:
dateConverter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[0], PrimitiveObjectInspectorFactory.writableDateObjectInspector);
break;
default:
throw new UDFArgumentException(" DATE_ADD() only takes STRING/TIMESTAMP/DATEWRITABLE types as first argument, got " + inputType1);
}
inputType2 = ((PrimitiveObjectInspector) arguments[1]).getPrimitiveCategory();
switch(inputType2) {
case BYTE:
daysConverter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[1], PrimitiveObjectInspectorFactory.writableByteObjectInspector);
break;
case SHORT:
daysConverter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[1], PrimitiveObjectInspectorFactory.writableShortObjectInspector);
break;
case INT:
daysConverter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[1], PrimitiveObjectInspectorFactory.writableIntObjectInspector);
break;
default:
throw new UDFArgumentException(" DATE_ADD() only takes TINYINT/SMALLINT/INT types as second argument, got " + inputType2);
}
return outputOI;
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class GenericUDFDateAdd method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
if (arguments[0].get() == null) {
return null;
}
Object daysWritableObject = daysConverter.convert(arguments[1].get());
if (daysWritableObject == null) {
return null;
}
int toBeAdded;
if (daysWritableObject instanceof ByteWritable) {
toBeAdded = ((ByteWritable) daysWritableObject).get();
} else if (daysWritableObject instanceof ShortWritable) {
toBeAdded = ((ShortWritable) daysWritableObject).get();
} else if (daysWritableObject instanceof IntWritable) {
toBeAdded = ((IntWritable) daysWritableObject).get();
} else {
return null;
}
// Convert the first param into a DateWritableV2 value
switch(inputType1) {
case STRING:
String dateString = dateConverter.convert(arguments[0].get()).toString();
if (DateParser.parseDate(dateString, dateVal)) {
output.set(dateVal);
} else {
return null;
}
break;
case TIMESTAMP:
Timestamp ts = ((TimestampWritableV2) dateConverter.convert(arguments[0].get())).getTimestamp();
output.set(DateWritableV2.millisToDays(ts.toEpochMilli()));
break;
case DATE:
DateWritableV2 dw = (DateWritableV2) dateConverter.convert(arguments[0].get());
output.set(dw.getDays());
break;
default:
throw new UDFArgumentException("DATE_ADD() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType1);
}
int newDays = output.getDays() + (signModifier * toBeAdded);
output.set(newDays);
return output;
}
Aggregations