use of org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException in project hive by apache.
the class GenericUDFTrunc method evaluateDate.
private Object evaluateDate(DeferredObject[] arguments) throws UDFArgumentLengthException, HiveException, UDFArgumentTypeException, UDFArgumentException {
if (arguments.length != 2) {
throw new UDFArgumentLengthException("trunc() requires 2 argument, got " + arguments.length);
}
if (arguments[0].get() == null || arguments[1].get() == null) {
return null;
}
if (textConverter2 != null) {
fmtInput = textConverter2.convert(arguments[1].get()).toString();
}
Date date;
switch(inputType1) {
case STRING:
String dateString = textConverter1.convert(arguments[0].get()).toString();
try {
date = formatter.parse(dateString.toString());
} catch (ParseException e) {
return null;
}
break;
case TIMESTAMP:
Timestamp ts = ((TimestampWritable) timestampConverter.convert(arguments[0].get())).getTimestamp();
date = ts;
break;
case DATE:
DateWritable dw = (DateWritable) dateWritableConverter.convert(arguments[0].get());
date = dw.get();
break;
default:
throw new UDFArgumentTypeException(0, "TRUNC() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType1);
}
if (evalDate(date) == null) {
return null;
}
Date newDate = calendar.getTime();
output.set(formatter.format(newDate));
return output;
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException in project hive by apache.
the class GenericUDFTrunc method initializeDate.
private ObjectInspector initializeDate(ObjectInspector[] arguments) throws UDFArgumentLengthException, UDFArgumentTypeException {
if (arguments.length != 2) {
throw new UDFArgumentLengthException("trunc() requires 2 argument, got " + arguments.length);
}
if (arguments[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but " + arguments[0].getTypeName() + " is passed. as first arguments");
}
if (arguments[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(1, "Only primitive type arguments are accepted but " + arguments[1].getTypeName() + " is passed. as second arguments");
}
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
inputType1 = ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory();
switch(inputType1) {
case STRING:
case VARCHAR:
case CHAR:
case VOID:
inputType1 = PrimitiveCategory.STRING;
textConverter1 = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableStringObjectInspector);
break;
case TIMESTAMP:
timestampConverter = new TimestampConverter((PrimitiveObjectInspector) arguments[0], PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
break;
case DATE:
dateWritableConverter = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableDateObjectInspector);
break;
default:
throw new UDFArgumentTypeException(0, "TRUNC() only takes STRING/TIMESTAMP/DATEWRITABLE types as first argument, got " + inputType1);
}
inputType2 = ((PrimitiveObjectInspector) arguments[1]).getPrimitiveCategory();
if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType2) != PrimitiveGrouping.STRING_GROUP && PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType2) != PrimitiveGrouping.VOID_GROUP) {
throw new UDFArgumentTypeException(1, "trunk() only takes STRING/CHAR/VARCHAR types as second argument, got " + inputType2);
}
inputType2 = PrimitiveCategory.STRING;
if (arguments[1] instanceof ConstantObjectInspector) {
Object obj = ((ConstantObjectInspector) arguments[1]).getWritableConstantValue();
fmtInput = obj != null ? obj.toString() : null;
} else {
textConverter2 = ObjectInspectorConverters.getConverter(arguments[1], PrimitiveObjectInspectorFactory.writableStringObjectInspector);
}
return outputOI;
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException in project hive by apache.
the class GenericUDFTrunc method evaluateNumber.
private Object evaluateNumber(DeferredObject[] arguments) throws HiveException, UDFArgumentTypeException {
if (arguments[0] == null) {
return null;
}
Object input = arguments[0].get();
if (input == null) {
return null;
}
if (arguments.length == 2 && arguments[1] != null && arguments[1].get() != null && !inputSacleConst) {
Object scaleObj = null;
switch(inputScaleOI.getPrimitiveCategory()) {
case BYTE:
scaleObj = byteConverter.convert(arguments[1].get());
scale = ((ByteWritable) scaleObj).get();
break;
case SHORT:
scaleObj = shortConverter.convert(arguments[1].get());
scale = ((ShortWritable) scaleObj).get();
break;
case INT:
scaleObj = intConverter.convert(arguments[1].get());
scale = ((IntWritable) scaleObj).get();
break;
case LONG:
scaleObj = longConverter.convert(arguments[1].get());
long l = ((LongWritable) scaleObj).get();
if (l < Integer.MIN_VALUE || l > Integer.MAX_VALUE) {
throw new UDFArgumentException(getFuncName().toUpperCase() + " scale argument out of allowed range");
}
scale = (int) l;
default:
break;
}
}
switch(inputType1) {
case VOID:
return null;
case DECIMAL:
HiveDecimalWritable decimalWritable = (HiveDecimalWritable) inputOI.getPrimitiveWritableObject(input);
HiveDecimal dec = trunc(decimalWritable.getHiveDecimal(), scale);
if (dec == null) {
return null;
}
return new HiveDecimalWritable(dec);
case BYTE:
ByteWritable byteWritable = (ByteWritable) inputOI.getPrimitiveWritableObject(input);
if (scale >= 0) {
return byteWritable;
} else {
return new ByteWritable((byte) trunc(byteWritable.get(), scale));
}
case SHORT:
ShortWritable shortWritable = (ShortWritable) inputOI.getPrimitiveWritableObject(input);
if (scale >= 0) {
return shortWritable;
} else {
return new ShortWritable((short) trunc(shortWritable.get(), scale));
}
case INT:
IntWritable intWritable = (IntWritable) inputOI.getPrimitiveWritableObject(input);
if (scale >= 0) {
return intWritable;
} else {
return new IntWritable((int) trunc(intWritable.get(), scale));
}
case LONG:
LongWritable longWritable = (LongWritable) inputOI.getPrimitiveWritableObject(input);
if (scale >= 0) {
return longWritable;
} else {
return new LongWritable(trunc(longWritable.get(), scale));
}
case FLOAT:
float f = ((FloatWritable) inputOI.getPrimitiveWritableObject(input)).get();
return new FloatWritable((float) trunc(f, scale));
case DOUBLE:
return trunc(((DoubleWritable) inputOI.getPrimitiveWritableObject(input)), scale);
default:
throw new UDFArgumentTypeException(0, "Only numeric or string group data types are allowed for TRUNC function. Got " + inputType1.name());
}
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException in project hive by apache.
the class GenericUDTFGetSplits method initialize.
@Override
public StructObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
LOG.debug("initializing GenericUDFGetSplits");
if (SessionState.get() == null || SessionState.get().getConf() == null) {
throw new IllegalStateException("Cannot run get splits outside HS2");
}
LOG.debug("Initialized conf, jc and metastore connection");
if (arguments.length != 2) {
throw new UDFArgumentLengthException("The function GET_SPLITS accepts 2 arguments.");
} else if (!(arguments[0] instanceof StringObjectInspector)) {
LOG.error("Got " + arguments[0].getTypeName() + " instead of string.");
throw new UDFArgumentTypeException(0, "\"" + "string\" is expected at function GET_SPLITS, " + "but \"" + arguments[0].getTypeName() + "\" is found");
} else if (!(arguments[1] instanceof IntObjectInspector)) {
LOG.error("Got " + arguments[1].getTypeName() + " instead of int.");
throw new UDFArgumentTypeException(1, "\"" + "int\" is expected at function GET_SPLITS, " + "but \"" + arguments[1].getTypeName() + "\" is found");
}
stringOI = (StringObjectInspector) arguments[0];
intOI = (IntObjectInspector) arguments[1];
List<String> names = Arrays.asList("split");
List<ObjectInspector> fieldOIs = Arrays.<ObjectInspector>asList(PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector);
StructObjectInspector outputOI = ObjectInspectorFactory.getStandardStructObjectInspector(names, fieldOIs);
LOG.debug("done initializing GenericUDFGetSplits");
return outputOI;
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException in project hive by apache.
the class GenericUDFEncode method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 2) {
throw new UDFArgumentLengthException("Encode() requires exactly two arguments");
}
if (arguments[0].getCategory() != Category.PRIMITIVE || PrimitiveGrouping.STRING_GROUP != PrimitiveObjectInspectorUtils.getPrimitiveGrouping(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory())) {
throw new UDFArgumentTypeException(0, "The first argument to Encode() must be a string/varchar");
}
stringOI = (PrimitiveObjectInspector) arguments[0];
if (arguments[1].getCategory() != Category.PRIMITIVE || PrimitiveGrouping.STRING_GROUP != PrimitiveObjectInspectorUtils.getPrimitiveGrouping(((PrimitiveObjectInspector) arguments[1]).getPrimitiveCategory())) {
throw new UDFArgumentTypeException(1, "The second argument to Encode() must be a string/varchar");
}
charsetOI = (PrimitiveObjectInspector) arguments[1];
// If the character set for encoding is constant, we can optimize that
if (charsetOI instanceof ConstantObjectInspector) {
String charSetName = ((ConstantObjectInspector) arguments[1]).getWritableConstantValue().toString();
encoder = Charset.forName(charSetName).newEncoder().onMalformedInput(CodingErrorAction.REPORT).onUnmappableCharacter(CodingErrorAction.REPORT);
}
result = new BytesWritable();
return (ObjectInspector) PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
}
Aggregations