use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class GenericUDFUpper method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 1) {
throw new UDFArgumentLengthException("UPPER requires 1 argument, got " + arguments.length);
}
if (arguments[0].getCategory() != Category.PRIMITIVE) {
throw new UDFArgumentException("UPPER only takes primitive types, got " + arguments[0].getTypeName());
}
argumentOI = (PrimitiveObjectInspector) arguments[0];
stringConverter = new PrimitiveObjectInspectorConverter.StringConverter(argumentOI);
PrimitiveCategory inputType = argumentOI.getPrimitiveCategory();
ObjectInspector outputOI = null;
BaseCharTypeInfo typeInfo;
switch(inputType) {
case CHAR:
// return type should have same length as the input.
returnType = inputType;
typeInfo = TypeInfoFactory.getCharTypeInfo(GenericUDFUtils.StringHelper.getFixedStringSizeForType(argumentOI));
outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(typeInfo);
break;
case VARCHAR:
// return type should have same length as the input.
returnType = inputType;
typeInfo = TypeInfoFactory.getVarcharTypeInfo(GenericUDFUtils.StringHelper.getFixedStringSizeForType(argumentOI));
outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(typeInfo);
break;
default:
returnType = PrimitiveCategory.STRING;
outputOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
break;
}
returnHelper = new GenericUDFUtils.StringHelper(returnType);
return outputOI;
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class GenericUDFTrunc method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length == 2) {
inputType1 = ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory();
inputType2 = ((PrimitiveObjectInspector) arguments[1]).getPrimitiveCategory();
if ((PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType1) == PrimitiveGrouping.DATE_GROUP || PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType1) == PrimitiveGrouping.STRING_GROUP) && PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType2) == PrimitiveGrouping.STRING_GROUP) {
dateTypeArg = true;
return initializeDate(arguments);
} else if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType1) == PrimitiveGrouping.NUMERIC_GROUP && PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType2) == PrimitiveGrouping.NUMERIC_GROUP) {
dateTypeArg = false;
return initializeNumber(arguments);
}
throw new UDFArgumentException("Got wrong argument types : first argument type : " + arguments[0].getTypeName() + ", second argument type : " + arguments[1].getTypeName());
} else if (arguments.length == 1) {
inputType1 = ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory();
if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType1) == PrimitiveGrouping.NUMERIC_GROUP) {
dateTypeArg = false;
return initializeNumber(arguments);
} else {
throw new UDFArgumentException("Only primitive type arguments are accepted, when arguments length is one, got " + arguments[0].getTypeName());
}
}
throw new UDFArgumentException("TRUNC requires one or two argument, got " + arguments.length);
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class GenericUDFTrunc method initializeNumber.
private ObjectInspector initializeNumber(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length < 1 || arguments.length > 2) {
throw new UDFArgumentLengthException("TRUNC requires one or two argument, got " + arguments.length);
}
if (arguments[0].getCategory() != Category.PRIMITIVE) {
throw new UDFArgumentTypeException(0, "TRUNC input only takes primitive types, got " + arguments[0].getTypeName());
}
inputOI = (PrimitiveObjectInspector) arguments[0];
if (arguments.length == 2) {
if (arguments[1].getCategory() != Category.PRIMITIVE) {
throw new UDFArgumentTypeException(1, "TRUNC second argument only takes primitive types, got " + arguments[1].getTypeName());
}
inputScaleOI = (PrimitiveObjectInspector) arguments[1];
inputSacleConst = arguments[1] instanceof ConstantObjectInspector;
if (inputSacleConst) {
try {
Object obj = ((ConstantObjectInspector) arguments[1]).getWritableConstantValue();
fmtInput = obj != null ? obj.toString() : null;
scale = Integer.parseInt(fmtInput);
} catch (Exception e) {
throw new UDFArgumentException("TRUNC input only takes integer values, got " + fmtInput);
}
} else {
switch(inputScaleOI.getPrimitiveCategory()) {
case BYTE:
byteConverter = ObjectInspectorConverters.getConverter(arguments[1], PrimitiveObjectInspectorFactory.writableByteObjectInspector);
break;
case SHORT:
shortConverter = ObjectInspectorConverters.getConverter(arguments[1], PrimitiveObjectInspectorFactory.writableShortObjectInspector);
break;
case INT:
intConverter = ObjectInspectorConverters.getConverter(arguments[1], PrimitiveObjectInspectorFactory.writableIntObjectInspector);
break;
case LONG:
longConverter = ObjectInspectorConverters.getConverter(arguments[1], PrimitiveObjectInspectorFactory.writableLongObjectInspector);
break;
default:
throw new UDFArgumentTypeException(1, getFuncName().toUpperCase() + " second argument only takes integer values");
}
}
}
inputType1 = inputOI.getPrimitiveCategory();
ObjectInspector outputOI = null;
switch(inputType1) {
case DECIMAL:
outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputType1);
break;
case VOID:
case BYTE:
case SHORT:
case INT:
case LONG:
case FLOAT:
case DOUBLE:
outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputType1);
break;
default:
throw new UDFArgumentTypeException(0, "Only numeric or string group data types are allowed for TRUNC function. Got " + inputType1.name());
}
return outputOI;
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class GenericUDTFJSONTuple method initialize.
@Override
public StructObjectInspector initialize(ObjectInspector[] args) throws UDFArgumentException {
inputOIs = args;
numCols = args.length - 1;
jsonObjectCache = new HashCache<>();
if (numCols < 1) {
throw new UDFArgumentException("json_tuple() takes at least two arguments: " + "the json string and a path expression");
}
for (int i = 0; i < args.length; ++i) {
if (args[i].getCategory() != ObjectInspector.Category.PRIMITIVE || !args[i].getTypeName().equals(serdeConstants.STRING_TYPE_NAME)) {
throw new UDFArgumentException("json_tuple()'s arguments have to be string type");
}
}
seenErrors = false;
pathParsed = false;
paths = new String[numCols];
cols = new Text[numCols];
retCols = new Text[numCols];
nullCols = new Object[numCols];
for (int i = 0; i < numCols; ++i) {
cols[i] = new Text();
retCols[i] = cols[i];
nullCols[i] = null;
}
// construct output object inspector
ArrayList<String> fieldNames = new ArrayList<String>(numCols);
ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>(numCols);
for (int i = 0; i < numCols; ++i) {
// column name can be anything since it will be named by UDTF as clause
fieldNames.add("c" + i);
// all returned type will be Text
fieldOIs.add(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
}
return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class GenericUDTFInline method initialize.
@Override
public StructObjectInspector initialize(ObjectInspector[] ois) throws UDFArgumentException {
// There should be one argument that is a array of struct
if (ois.length != 1) {
throw new UDFArgumentException("UDF tables only one argument");
}
if (ois[0].getCategory() != Category.LIST) {
throw new UDFArgumentException("Top level object must be an array but " + "was " + ois[0].getTypeName());
}
li = (ListObjectInspector) ois[0];
ObjectInspector sub = li.getListElementObjectInspector();
if (sub.getCategory() != Category.STRUCT) {
throw new UDFArgumentException("The sub element must be struct, but was " + sub.getTypeName());
}
return (StructObjectInspector) sub;
}
Aggregations