use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.
the class GenericUDFOPDTIPlus method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 2) {
throw new UDFArgumentException(opName + " requires two arguments.");
}
PrimitiveObjectInspector resultOI = null;
for (int i = 0; i < 2; i++) {
Category category = arguments[i].getCategory();
if (category != Category.PRIMITIVE) {
throw new UDFArgumentTypeException(i, "The " + GenericUDFUtils.getOrdinal(i + 1) + " argument of " + opName + " is expected to a " + Category.PRIMITIVE.toString().toLowerCase() + " type, but " + category.toString().toLowerCase() + " is found");
}
}
inputOIs = new PrimitiveObjectInspector[] { (PrimitiveObjectInspector) arguments[0], (PrimitiveObjectInspector) arguments[1] };
PrimitiveObjectInspector leftOI = inputOIs[0];
PrimitiveObjectInspector rightOI = inputOIs[1];
// IntervalDayTime + Timestamp = Timestamp (operands reversible)
if (checkArgs(PrimitiveCategory.INTERVAL_YEAR_MONTH, PrimitiveCategory.INTERVAL_YEAR_MONTH)) {
plusOpType = OperationType.INTERVALYM_PLUS_INTERVALYM;
intervalArg1Idx = 0;
intervalArg2Idx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.intervalYearMonthTypeInfo);
} else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.INTERVAL_YEAR_MONTH)) {
plusOpType = OperationType.INTERVALYM_PLUS_DATE;
dtArgIdx = 0;
intervalArg1Idx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.dateTypeInfo);
} else if (checkArgs(PrimitiveCategory.INTERVAL_YEAR_MONTH, PrimitiveCategory.DATE)) {
plusOpType = OperationType.INTERVALYM_PLUS_DATE;
intervalArg1Idx = 0;
dtArgIdx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.dateTypeInfo);
} else if (checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.INTERVAL_YEAR_MONTH)) {
plusOpType = OperationType.INTERVALYM_PLUS_TIMESTAMP;
dtArgIdx = 0;
intervalArg1Idx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.timestampTypeInfo);
} else if (checkArgs(PrimitiveCategory.INTERVAL_YEAR_MONTH, PrimitiveCategory.TIMESTAMP)) {
plusOpType = OperationType.INTERVALYM_PLUS_TIMESTAMP;
intervalArg1Idx = 0;
dtArgIdx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.timestampTypeInfo);
} else if (checkArgs(PrimitiveCategory.INTERVAL_DAY_TIME, PrimitiveCategory.INTERVAL_DAY_TIME)) {
plusOpType = OperationType.INTERVALDT_PLUS_INTERVALDT;
intervalArg1Idx = 0;
intervalArg2Idx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.intervalDayTimeTypeInfo);
} else if (checkArgs(PrimitiveCategory.INTERVAL_DAY_TIME, PrimitiveCategory.DATE) || checkArgs(PrimitiveCategory.INTERVAL_DAY_TIME, PrimitiveCategory.TIMESTAMP)) {
plusOpType = OperationType.INTERVALDT_PLUS_TIMESTAMP;
intervalArg1Idx = 0;
dtArgIdx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.timestampTypeInfo);
dtConverter = ObjectInspectorConverters.getConverter(leftOI, resultOI);
} else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.INTERVAL_DAY_TIME) || checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.INTERVAL_DAY_TIME)) {
plusOpType = OperationType.INTERVALDT_PLUS_TIMESTAMP;
intervalArg1Idx = 1;
dtArgIdx = 0;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.timestampTypeInfo);
dtConverter = ObjectInspectorConverters.getConverter(leftOI, resultOI);
} else {
// Unsupported types - error
List<TypeInfo> argTypeInfos = new ArrayList<TypeInfo>(2);
argTypeInfos.add(leftOI.getTypeInfo());
argTypeInfos.add(rightOI.getTypeInfo());
throw new NoMatchingMethodException(this.getClass(), argTypeInfos, null);
}
return resultOI;
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.
the class GenericUDFToChar method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 1) {
throw new UDFArgumentException("CHAR cast requires a value argument");
}
try {
argumentOI = (PrimitiveObjectInspector) arguments[0];
} catch (ClassCastException e) {
throw new UDFArgumentException("The function CHAR takes only primitive types");
}
// Check if this UDF has been provided with type params for the output char type
SettableHiveCharObjectInspector outputOI;
outputOI = (SettableHiveCharObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(typeInfo);
converter = new HiveCharConverter(argumentOI, outputOI);
return outputOI;
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.
the class GenericUDFToVarchar method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 1) {
throw new UDFArgumentException("VARCHAR cast requires a value argument");
}
try {
argumentOI = (PrimitiveObjectInspector) arguments[0];
} catch (ClassCastException e) {
throw new UDFArgumentException("The function VARCHAR takes only primitive types");
}
// Check if this UDF has been provided with type params for the output varchar type
SettableHiveVarcharObjectInspector outputOI;
outputOI = (SettableHiveVarcharObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(typeInfo);
converter = new HiveVarcharConverter(argumentOI, outputOI);
return outputOI;
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.
the class AbstractTestGenericUDFOPNumeric method verifyReturnType.
protected void verifyReturnType(GenericUDF udf, String typeStr1, String typeStr2, String expectedTypeStr) throws HiveException {
// Lookup type infos for our input types and expected return type
PrimitiveTypeInfo type1 = TypeInfoFactory.getPrimitiveTypeInfo(typeStr1);
PrimitiveTypeInfo type2 = TypeInfoFactory.getPrimitiveTypeInfo(typeStr2);
PrimitiveTypeInfo expectedType = TypeInfoFactory.getPrimitiveTypeInfo(expectedTypeStr);
// Initialize UDF which will output the return type for the UDF.
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(type1), PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(type2) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals("Return type for " + udf.getDisplayString(new String[] { typeStr1, typeStr2 }), expectedType, oi.getTypeInfo());
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.
the class TestGenericUDFAbs method testHiveDecimal.
public void testHiveDecimal() throws HiveException {
GenericUDFAbs udf = new GenericUDFAbs();
int prec = 12;
int scale = 9;
ObjectInspector valueOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(prec, scale));
ObjectInspector[] arguments = { valueOI };
PrimitiveObjectInspector outputOI = (PrimitiveObjectInspector) udf.initialize(arguments);
// Make sure result precision/scale matches the input prec/scale
assertEquals("result precision for abs()", prec, outputOI.precision());
assertEquals("result scale for abs()", scale, outputOI.scale());
DeferredObject valueObj = new DeferredJavaObject(new HiveDecimalWritable(HiveDecimal.create("107.123456789")));
DeferredObject[] args = { valueObj };
HiveDecimalWritable output = (HiveDecimalWritable) udf.evaluate(args);
assertEquals("abs() test for HiveDecimal failed ", 107.123456789, output.getHiveDecimal().doubleValue());
valueObj = new DeferredJavaObject(new HiveDecimalWritable(HiveDecimal.create("-107.123456789")));
args[0] = valueObj;
output = (HiveDecimalWritable) udf.evaluate(args);
assertEquals("abs() test for HiveDecimal failed ", 107.123456789, output.getHiveDecimal().doubleValue());
// null input
args[0] = new DeferredJavaObject(null);
output = (HiveDecimalWritable) udf.evaluate(args);
assertEquals("abs(null)", null, output);
// if value too large, should also be null
args[0] = new DeferredJavaObject(new HiveDecimalWritable(HiveDecimal.create("-1000.123456")));
output = (HiveDecimalWritable) udf.evaluate(args);
assertEquals("abs() of too large decimal value", null, output);
}
Aggregations