use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class GenericUDFAbs method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 1) {
throw new UDFArgumentLengthException("ABS() requires 1 argument, got " + arguments.length);
}
if (arguments[0].getCategory() != Category.PRIMITIVE) {
throw new UDFArgumentException("ABS only takes primitive types, got " + arguments[0].getTypeName());
}
argumentOI = (PrimitiveObjectInspector) arguments[0];
inputType = argumentOI.getPrimitiveCategory();
ObjectInspector outputOI = null;
switch(inputType) {
case SHORT:
case BYTE:
case INT:
inputConverter = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableIntObjectInspector);
outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
break;
case LONG:
inputConverter = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableLongObjectInspector);
outputOI = PrimitiveObjectInspectorFactory.writableLongObjectInspector;
break;
case FLOAT:
case STRING:
case DOUBLE:
inputConverter = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
outputOI = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
break;
case DECIMAL:
outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(((PrimitiveObjectInspector) arguments[0]).getTypeInfo());
inputConverter = ObjectInspectorConverters.getConverter(arguments[0], outputOI);
break;
default:
throw new UDFArgumentException("ABS only takes SHORT/BYTE/INT/LONG/DOUBLE/FLOAT/STRING/DECIMAL types, got " + inputType);
}
return outputOI;
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class GenericUDFAbs method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
Object valObject = arguments[0].get();
if (valObject == null) {
return null;
}
switch(inputType) {
case SHORT:
case BYTE:
case INT:
valObject = inputConverter.convert(valObject);
resultInt.set(Math.abs(((IntWritable) valObject).get()));
return resultInt;
case LONG:
valObject = inputConverter.convert(valObject);
resultLong.set(Math.abs(((LongWritable) valObject).get()));
return resultLong;
case FLOAT:
case STRING:
case DOUBLE:
valObject = inputConverter.convert(valObject);
if (valObject == null) {
return null;
}
resultDouble.set(Math.abs(((DoubleWritable) valObject).get()));
return resultDouble;
case DECIMAL:
HiveDecimalObjectInspector decimalOI = (HiveDecimalObjectInspector) argumentOI;
HiveDecimalWritable val = decimalOI.getPrimitiveWritableObject(valObject);
if (val != null) {
resultDecimal.set(val);
resultDecimal.mutateAbs();
val = resultDecimal;
}
return val;
default:
throw new UDFArgumentException("ABS only takes SHORT/BYTE/INT/LONG/DOUBLE/FLOAT/STRING/DECIMAL types, got " + inputType);
}
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class GenericUDFBaseArithmetic method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 2) {
throw new UDFArgumentException(getClass().getSimpleName() + " requires two arguments.");
}
// Lookup values needed for numeric arithmetic UDFs
if (confLookupNeeded) {
CompatLevel compatLevel = HiveCompat.getCompatLevel(SessionState.get().getConf());
ansiSqlArithmetic = compatLevel.ordinal() > CompatLevel.HIVE_0_12.ordinal();
confLookupNeeded = false;
}
// Determine if we are dealing with a numeric or date arithmetic operation
boolean isDateTimeOp = false;
for (int idx = 0; idx < 2; ++idx) {
switch(((PrimitiveObjectInspector) arguments[idx]).getPrimitiveCategory()) {
case DATE:
case TIMESTAMP:
case INTERVAL_YEAR_MONTH:
case INTERVAL_DAY_TIME:
isDateTimeOp = true;
break;
default:
break;
}
}
if (isDateTimeOp) {
arithmeticOperation = instantiateDTIUDF();
} else {
GenericUDFBaseNumeric numericUDF = instantiateNumericUDF();
// Set values needed for numeric arithmetic UDFs
numericUDF.setAnsiSqlArithmetic(ansiSqlArithmetic);
numericUDF.setConfLookupNeeded(confLookupNeeded);
arithmeticOperation = numericUDF;
}
return arithmeticOperation.initialize(arguments);
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class GenericUDFTimestamp method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
checkArgsSize(arguments, 1, 1);
checkArgPrimitive(arguments, 0);
checkArgGroups(arguments, 0, tsInputTypes, STRING_GROUP, DATE_GROUP, NUMERIC_GROUP, VOID_GROUP, BOOLEAN_GROUP);
strict = SessionState.get() != null ? SessionState.get().getConf().getBoolVar(ConfVars.HIVE_STRICT_TIMESTAMP_CONVERSION) : new HiveConf().getBoolVar(ConfVars.HIVE_STRICT_TIMESTAMP_CONVERSION);
intToTimestampInSeconds = SessionState.get() != null ? SessionState.get().getConf().getBoolVar(ConfVars.HIVE_INT_TIMESTAMP_CONVERSION_IN_SECONDS) : new HiveConf().getBoolVar(ConfVars.HIVE_INT_TIMESTAMP_CONVERSION_IN_SECONDS);
if (strict) {
if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(tsInputTypes[0]) == PrimitiveGrouping.NUMERIC_GROUP) {
throw new UDFArgumentException("Casting NUMERIC types to TIMESTAMP is prohibited (" + ConfVars.HIVE_STRICT_TIMESTAMP_CONVERSION + ")");
}
}
obtainTimestampConverter(arguments, 0, tsInputTypes, tsConvertors);
return PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class GenericUDFToDecimal method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length < 1) {
throw new UDFArgumentLengthException("The function DECIMAL requires at least one argument, got " + arguments.length);
}
try {
argumentOI = (PrimitiveObjectInspector) arguments[0];
} catch (ClassCastException e) {
throw new UDFArgumentException("The function DECIMAL takes only primitive types");
}
// Check if this UDF has been provided with type params for the output varchar type
SettableHiveDecimalObjectInspector outputOI;
outputOI = (SettableHiveDecimalObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(typeInfo);
bdConverter = new HiveDecimalConverter(argumentOI, outputOI);
return outputOI;
}
Aggregations