use of org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException in project hive by apache.
the class GenericUDFDateAdd method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 2) {
throw new UDFArgumentLengthException("date_add() requires 2 argument, got " + arguments.length);
}
if (arguments[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but " + arguments[0].getTypeName() + " is passed. as first arguments");
}
if (arguments[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(1, "Only primitive type arguments are accepted but " + arguments[1].getTypeName() + " is passed. as second arguments");
}
inputType1 = ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory();
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
switch(inputType1) {
case STRING:
case VARCHAR:
case CHAR:
inputType1 = PrimitiveCategory.STRING;
dateConverter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[0], PrimitiveObjectInspectorFactory.writableStringObjectInspector);
break;
case TIMESTAMP:
dateConverter = new TimestampConverter((PrimitiveObjectInspector) arguments[0], PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
break;
case DATE:
dateConverter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[0], PrimitiveObjectInspectorFactory.writableDateObjectInspector);
break;
default:
throw new UDFArgumentException(" DATE_ADD() only takes STRING/TIMESTAMP/DATEWRITABLE types as first argument, got " + inputType1);
}
inputType2 = ((PrimitiveObjectInspector) arguments[1]).getPrimitiveCategory();
switch(inputType2) {
case BYTE:
daysConverter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[1], PrimitiveObjectInspectorFactory.writableByteObjectInspector);
break;
case SHORT:
daysConverter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[1], PrimitiveObjectInspectorFactory.writableShortObjectInspector);
break;
case INT:
daysConverter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[1], PrimitiveObjectInspectorFactory.writableIntObjectInspector);
break;
default:
throw new UDFArgumentException(" DATE_ADD() only takes TINYINT/SMALLINT/INT types as second argument, got " + inputType2);
}
return outputOI;
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException in project hive by apache.
the class GenericUDFDateDiff method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 2) {
throw new UDFArgumentLengthException("datediff() requires 2 argument, got " + arguments.length);
}
inputConverter1 = checkArguments(arguments, 0);
inputConverter2 = checkArguments(arguments, 1);
inputType1 = ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory();
inputType2 = ((PrimitiveObjectInspector) arguments[1]).getPrimitiveCategory();
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
return outputOI;
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException in project hive by apache.
the class GenericUDFDecode method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 2) {
throw new UDFArgumentLengthException("Decode() requires exactly two arguments");
}
if (arguments[0].getCategory() != Category.PRIMITIVE) {
throw new UDFArgumentTypeException(0, "The first argument to Decode() must be primitive");
}
PrimitiveCategory category = ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory();
if (category == PrimitiveCategory.BINARY) {
bytesOI = (BinaryObjectInspector) arguments[0];
} else if (category == PrimitiveCategory.VOID) {
bytesOI = (VoidObjectInspector) arguments[0];
} else {
throw new UDFArgumentTypeException(0, "The first argument to Decode() must be binary");
}
if (arguments[1].getCategory() != Category.PRIMITIVE) {
throw new UDFArgumentTypeException(1, "The second argument to Decode() must be primitive");
}
charsetOI = (PrimitiveObjectInspector) arguments[1];
if (PrimitiveGrouping.STRING_GROUP != PrimitiveObjectInspectorUtils.getPrimitiveGrouping(charsetOI.getPrimitiveCategory())) {
throw new UDFArgumentTypeException(1, "The second argument to Decode() must be from string group");
}
// If the character set for decoding is constant, we can optimize that
if (arguments[1] instanceof ConstantObjectInspector) {
String charSetName = ((ConstantObjectInspector) arguments[1]).getWritableConstantValue().toString();
decoder = Charset.forName(charSetName).newDecoder().onMalformedInput(CodingErrorAction.REPORT).onUnmappableCharacter(CodingErrorAction.REPORT);
}
return PrimitiveObjectInspectorFactory.javaStringObjectInspector;
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException in project hive by apache.
the class GenericUDFCharacterLength method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 1) {
throw new UDFArgumentLengthException("CHARACTER_LENGTH requires 1 argument, got " + arguments.length);
}
if (arguments[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentException("CHARACTER_LENGTH only takes primitive types, got " + argumentOI.getTypeName());
}
argumentOI = (PrimitiveObjectInspector) arguments[0];
stringConverter = new PrimitiveObjectInspectorConverter.StringConverter(argumentOI);
PrimitiveObjectInspector.PrimitiveCategory inputType = argumentOI.getPrimitiveCategory();
ObjectInspector outputOI = null;
switch(inputType) {
case CHAR:
case VARCHAR:
case STRING:
isInputString = true;
break;
case BINARY:
isInputString = false;
break;
default:
throw new UDFArgumentException(" CHARACTER_LENGTH() only takes STRING/CHAR/VARCHAR/BINARY types as first argument, got " + inputType);
}
outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
return outputOI;
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException in project hive by apache.
the class TestGenericUDFLevenshtein method testLevenshteinWrongLength.
public void testLevenshteinWrongLength() throws HiveException {
@SuppressWarnings("resource") GenericUDFLevenshtein udf = new GenericUDFLevenshtein();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector;
ObjectInspector[] arguments = { valueOI0 };
try {
udf.initialize(arguments);
assertTrue("levenshtein test. UDFArgumentLengthException is expected", false);
} catch (UDFArgumentLengthException e) {
assertEquals("levenshtein test", "levenshtein requires 2 argument(s), got 1", e.getMessage());
}
}
Aggregations