use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class GenericUDFDateDiff method convertToDate.
private Date convertToDate(PrimitiveCategory inputType, Converter converter, DeferredObject argument) throws HiveException {
assert (converter != null);
assert (argument != null);
if (argument.get() == null) {
return null;
}
Date date = new Date(0);
switch(inputType) {
case STRING:
case VARCHAR:
case CHAR:
String dateString = converter.convert(argument.get()).toString();
try {
date.setTime(formatter.parse(dateString).getTime());
} catch (ParseException e) {
return null;
}
break;
case TIMESTAMP:
Timestamp ts = ((TimestampWritable) converter.convert(argument.get())).getTimestamp();
date.setTime(ts.getTime());
break;
case DATE:
DateWritable dw = (DateWritable) converter.convert(argument.get());
date = dw.get();
break;
default:
throw new UDFArgumentException("TO_DATE() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType);
}
return date;
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class GenericUDFToDate method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length < 1) {
throw new UDFArgumentLengthException("The function CAST as DATE requires at least one argument, got " + arguments.length);
}
try {
argumentOI = (PrimitiveObjectInspector) arguments[0];
PrimitiveCategory pc = argumentOI.getPrimitiveCategory();
PrimitiveGrouping pg = PrimitiveObjectInspectorUtils.getPrimitiveGrouping(pc);
switch(pg) {
case DATE_GROUP:
case STRING_GROUP:
case VOID_GROUP:
break;
default:
throw new UDFArgumentException("CAST as DATE only allows date,string, or timestamp types");
}
} catch (ClassCastException e) {
throw new UDFArgumentException("The function CAST as DATE takes only primitive types");
}
dc = new DateConverter(argumentOI, PrimitiveObjectInspectorFactory.writableDateObjectInspector);
return PrimitiveObjectInspectorFactory.writableDateObjectInspector;
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class GenericUDFSQCountCheck method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
Long val = getLongValue(arguments, 0, converters);
switch(arguments.length) {
case // Scalar queries, should expect value/count less than 1
1:
if (val > 1) {
throw new UDFArgumentException(" Scalar subquery expression returns more than one row.");
}
break;
case 2:
Object valObject = arguments[0].get();
if (valObject != null && getLongValue(arguments, 0, converters) == 0) {
throw new UDFArgumentException(" IN/NOT IN subquery with aggregate returning zero result. Currently this is not supported.");
} else if (valObject == null) {
throw new UDFArgumentException(" IN/NOT IN subquery with aggregate returning zero result. Currently this is not supported.");
}
break;
}
resultLong.set(val);
return resultLong;
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project flink by apache.
the class GenericUDFLegacyGroupingID method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
// we accept two arguments: the new GROUPING__ID and the number of GBY expressions
if (arguments.length != 2) {
throw new UDFArgumentLengthException("Expect 2 arguments but actually got " + arguments.length);
}
if (arguments[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(0, "First argument should be primitive type");
}
if (arguments[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(1, "Second argument should be primitive type");
}
groupingIdOI = (PrimitiveObjectInspector) arguments[0];
if (groupingIdOI.getPrimitiveCategory() != PrimitiveObjectInspector.PrimitiveCategory.LONG) {
throw new UDFArgumentTypeException(0, "First argument should be a long");
}
PrimitiveObjectInspector numExprOI = (PrimitiveObjectInspector) arguments[1];
if (numExprOI.getPrimitiveCategory() != PrimitiveObjectInspector.PrimitiveCategory.INT) {
throw new UDFArgumentTypeException(1, "Second argument should be an int");
}
if (!(numExprOI instanceof ConstantObjectInspector)) {
throw new UDFArgumentTypeException(1, "Second argument should be a constant");
}
numExprs = PrimitiveObjectInspectorUtils.getInt(((ConstantObjectInspector) numExprOI).getWritableConstantValue(), numExprOI);
if (numExprs < 1 || numExprs > 64) {
throw new UDFArgumentException("Number of GROUP BY expressions out of range: " + numExprs);
}
return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
}
use of org.apache.hadoop.hive.ql.exec.UDFArgumentException in project hive by apache.
the class GenericUDFAdd10 method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
Object valObject = arguments[0].get();
if (valObject == null) {
return null;
}
switch(inputType) {
case SHORT:
case BYTE:
case INT:
valObject = inputConverter.convert(valObject);
resultInt.set(10 + ((IntWritable) valObject).get());
return resultInt;
case LONG:
valObject = inputConverter.convert(valObject);
resultLong.set(10 + ((LongWritable) valObject).get());
return resultLong;
case FLOAT:
case STRING:
case DOUBLE:
valObject = inputConverter.convert(valObject);
resultDouble.set(10.0 + ((DoubleWritable) valObject).get());
return resultDouble;
case DECIMAL:
HiveDecimalObjectInspector decimalOI = (HiveDecimalObjectInspector) argumentOI;
HiveDecimalWritable val = decimalOI.getPrimitiveWritableObject(valObject);
if (val != null) {
resultDecimal.set(val.getHiveDecimal().add(HiveDecimal.create("10")));
val = resultDecimal;
}
return val;
default:
throw new UDFArgumentException("ADD10 only takes SHORT/BYTE/INT/LONG/DOUBLE/FLOAT/STRING/DECIMAL types, got " + inputType);
}
}
Aggregations