use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class GenericUDFDatetimeLegacyHybridCalendar method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length < 1) {
throw new UDFArgumentLengthException("The function datetime_legacy_hybrid_calendar requires at least one argument, got " + arguments.length);
}
try {
inputOI = (PrimitiveObjectInspector) arguments[0];
PrimitiveCategory pc = inputOI.getPrimitiveCategory();
switch(pc) {
case DATE:
formatter = new SimpleDateFormat("yyyy-MM-dd");
formatter.setLenient(false);
formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
converter = ObjectInspectorConverters.getConverter(inputOI, PrimitiveObjectInspectorFactory.writableDateObjectInspector);
resultOI = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
break;
case TIMESTAMP:
formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
formatter.setLenient(false);
formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
converter = ObjectInspectorConverters.getConverter(inputOI, PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
resultOI = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
break;
default:
throw new UDFArgumentException("datetime_legacy_hybrid_calendar only allows date or timestamp types");
}
} catch (ClassCastException e) {
throw new UDFArgumentException("The function datetime_legacy_hybrid_calendar takes only primitive types");
}
return resultOI;
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class GenericUDFDeserialize method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != ARG_COUNT) {
throw new UDFArgumentException("The function " + FUNC_NAME + " accepts " + ARG_COUNT + " arguments.");
}
for (ObjectInspector arg : arguments) {
if (arg.getCategory() != ObjectInspector.Category.PRIMITIVE || PrimitiveObjectInspectorUtils.PrimitiveGrouping.STRING_GROUP != PrimitiveObjectInspectorUtils.getPrimitiveGrouping(((PrimitiveObjectInspector) arg).getPrimitiveCategory())) {
throw new UDFArgumentTypeException(0, "The arguments to " + FUNC_NAME + " must be a string/varchar");
}
}
stringOI = (PrimitiveObjectInspector) arguments[0];
compressionFormat = (PrimitiveObjectInspector) arguments[1];
return PrimitiveObjectInspectorFactory.javaStringObjectInspector;
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class GenericUDFDate method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
if (arguments[0].get() == null) {
return null;
}
switch(inputType) {
case VOID:
throw new UDFArgumentException("TO_DATE() received non-null object of VOID type");
case STRING:
String dateString = textConverter.convert(arguments[0].get()).toString();
if (DateParser.parseDate(dateString, date)) {
output.set(date);
} else {
return null;
}
break;
case TIMESTAMP:
Timestamp ts = ((TimestampWritableV2) timestampConverter.convert(arguments[0].get())).getTimestamp();
output.set(DateWritableV2.millisToDays(ts.toEpochMilli()));
break;
case TIMESTAMPLOCALTZ:
case DATE:
DateWritableV2 dw = (DateWritableV2) dateWritableConverter.convert(arguments[0].get());
output.set(dw);
break;
default:
throw new UDFArgumentException("TO_DATE() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType);
}
return output;
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class GenericUDFDate method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 1) {
throw new UDFArgumentLengthException("to_date() requires 1 argument, got " + arguments.length);
}
if (arguments[0].getCategory() != Category.PRIMITIVE) {
throw new UDFArgumentException("to_date() only accepts STRING/TIMESTAMP/DATEWRITABLE types, got " + arguments[0].getTypeName());
}
argumentOI = (PrimitiveObjectInspector) arguments[0];
inputType = argumentOI.getPrimitiveCategory();
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
switch(inputType) {
case VOID:
break;
case CHAR:
case VARCHAR:
case STRING:
inputType = PrimitiveCategory.STRING;
textConverter = ObjectInspectorConverters.getConverter(argumentOI, PrimitiveObjectInspectorFactory.writableStringObjectInspector);
break;
case TIMESTAMP:
timestampConverter = new TimestampConverter(argumentOI, PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
break;
case TIMESTAMPLOCALTZ:
case DATE:
dateWritableConverter = ObjectInspectorConverters.getConverter(argumentOI, PrimitiveObjectInspectorFactory.writableDateObjectInspector);
break;
default:
throw new UDFArgumentException("TO_DATE() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType);
}
return outputOI;
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class GenericUDFBaseCompare method initForNonPrimitives.
private void initForNonPrimitives(ObjectInspector arg0, ObjectInspector arg1) throws UDFArgumentException {
assert arg0.getCategory() != Category.PRIMITIVE;
assert arg1.getCategory() != Category.PRIMITIVE;
assert arg0.getCategory() == arg1.getCategory();
final TypeInfo type0 = TypeInfoUtils.getTypeInfoFromObjectInspector(arg0);
final TypeInfo type1 = TypeInfoUtils.getTypeInfoFromObjectInspector(arg1);
if (type0.equals(type1)) {
compareType = CompareType.SAME_TYPE;
} else {
throw new UDFArgumentException("Type mismatch in " + opName + "(" + type0 + "," + type1 + ")");
}
}
Aggregations