use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableDoubleObjectInspector in project hive by apache.
the class GenericUDFAdd10 method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 1) {
throw new UDFArgumentLengthException("ADD10() requires 1 argument, got " + arguments.length);
}
if (arguments[0].getCategory() != Category.PRIMITIVE) {
throw new UDFArgumentException("ADD10 only takes primitive types, got " + arguments[0].getTypeName());
}
argumentOI = (PrimitiveObjectInspector) arguments[0];
inputType = argumentOI.getPrimitiveCategory();
ObjectInspector outputOI = null;
switch(inputType) {
case SHORT:
case BYTE:
case INT:
inputConverter = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableIntObjectInspector);
outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
break;
case LONG:
inputConverter = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableLongObjectInspector);
outputOI = PrimitiveObjectInspectorFactory.writableLongObjectInspector;
break;
case FLOAT:
case STRING:
case DOUBLE:
inputConverter = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
outputOI = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
break;
case DECIMAL:
outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(((PrimitiveObjectInspector) arguments[0]).getTypeInfo());
inputConverter = ObjectInspectorConverters.getConverter(arguments[0], outputOI);
break;
default:
throw new UDFArgumentException("ADD10 only takes SHORT/BYTE/INT/LONG/DOUBLE/FLOAT/STRING/DECIMAL types, got " + inputType);
}
return outputOI;
}
Aggregations