use of org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector in project hive by apache.
the class GenericUDFSortArrayByField method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
GenericUDFUtils.ReturnObjectInspectorResolver returnOIResolver;
returnOIResolver = new GenericUDFUtils.ReturnObjectInspectorResolver(true);
/**This UDF requires minimum 2 arguments array_name,field name*/
if (arguments.length < 2) {
throw new UDFArgumentLengthException("SORT_ARRAY_BY requires minimum 2 arguments, got " + arguments.length);
}
/**First argument must be array*/
switch(arguments[0].getCategory()) {
case LIST:
listObjectInspector = (ListObjectInspector) arguments[0];
break;
default:
throw new UDFArgumentTypeException(0, "Argument 1 of function SORT_ARRAY_BY must be " + serdeConstants.LIST_TYPE_NAME + ", but " + arguments[0].getTypeName() + " was found.");
}
/**Elements inside first argument(array) must be tuple(s)*/
switch(listObjectInspector.getListElementObjectInspector().getCategory()) {
case STRUCT:
structObjectInspector = (StructObjectInspector) listObjectInspector.getListElementObjectInspector();
break;
default:
throw new UDFArgumentTypeException(0, "Element[s] of first argument array in function SORT_ARRAY_BY must be " + serdeConstants.STRUCT_TYPE_NAME + ", but " + listObjectInspector.getTypeName() + " was found.");
}
/**All sort fields argument name and sort order name must be in String type*/
converters = new Converter[arguments.length];
inputTypes = new PrimitiveCategory[arguments.length];
fields = new StructField[arguments.length - 1];
noOfInputFields = arguments.length - 1;
for (int i = 1; i < arguments.length; i++) {
checkArgPrimitive(arguments, i);
checkArgGroups(arguments, i, inputTypes, PrimitiveGrouping.STRING_GROUP);
if (arguments[i] instanceof ConstantObjectInspector) {
String fieldName = getConstantStringValue(arguments, i);
/**checking whether any sorting order (ASC,DESC) has specified in last argument*/
if (i != 1 && (i == arguments.length - 1) && (fieldName.trim().toUpperCase().equals(SORT_ORDER_TYPE.ASC.name()) || fieldName.trim().toUpperCase().equals(SORT_ORDER_TYPE.DESC.name()))) {
sortOrder = SORT_ORDER_TYPE.valueOf(fieldName.trim().toUpperCase());
noOfInputFields -= 1;
continue;
}
fields[i - 1] = structObjectInspector.getStructFieldRef(getConstantStringValue(arguments, i));
}
obtainStringConverter(arguments, i, inputTypes, converters);
}
ObjectInspector returnOI = returnOIResolver.get(structObjectInspector);
converters[0] = ObjectInspectorConverters.getConverter(structObjectInspector, returnOI);
return ObjectInspectorFactory.getStandardListObjectInspector(structObjectInspector);
}
use of org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector in project hive by apache.
the class GenericUDFSplit method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 2) {
throw new UDFArgumentLengthException("The function SPLIT(s, regexp) takes exactly 2 arguments.");
}
converters = new ObjectInspectorConverters.Converter[arguments.length];
for (int i = 0; i < arguments.length; i++) {
converters[i] = ObjectInspectorConverters.getConverter(arguments[i], PrimitiveObjectInspectorFactory.writableStringObjectInspector);
}
ObjectInspector rightArg = arguments[1];
if (rightArg instanceof ConstantObjectInspector) {
constPattern = Pattern.compile(((ConstantObjectInspector) rightArg).getWritableConstantValue().toString());
}
return ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
}
use of org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector in project hive by apache.
the class MaskTransformer method getCharArg.
int getCharArg(ObjectInspector[] arguments, int index, int defaultValue) {
int ret = defaultValue;
ObjectInspector arg = (arguments != null && arguments.length > index) ? arguments[index] : null;
if (arg != null) {
if (arg instanceof WritableConstantIntObjectInspector) {
IntWritable value = ((WritableConstantIntObjectInspector) arg).getWritableConstantValue();
if (value != null) {
ret = value.get();
}
} else if (arg instanceof WritableConstantLongObjectInspector) {
LongWritable value = ((WritableConstantLongObjectInspector) arg).getWritableConstantValue();
if (value != null) {
ret = (int) value.get();
}
} else if (arg instanceof WritableConstantShortObjectInspector) {
ShortWritable value = ((WritableConstantShortObjectInspector) arg).getWritableConstantValue();
if (value != null) {
ret = value.get();
}
} else if (arg instanceof ConstantObjectInspector) {
Object value = ((ConstantObjectInspector) arg).getWritableConstantValue();
if (value != null) {
String strValue = value.toString();
if (strValue != null && strValue.length() > 0) {
ret = strValue.charAt(0);
}
}
}
}
return ret;
}
use of org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector in project hive by apache.
the class GenericUDFMonthsBetween method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
checkArgsSize(arguments, 2, 3);
checkArgPrimitive(arguments, 0);
checkArgPrimitive(arguments, 1);
if (arguments.length == 3) {
if (arguments[2] instanceof ConstantObjectInspector) {
isRoundOffNeeded = getConstantBooleanValue(arguments, 2);
}
}
// the function should support both short date and full timestamp format
// time part of the timestamp should not be skipped
checkArgGroups(arguments, 0, tsInputTypes, STRING_GROUP, DATE_GROUP);
checkArgGroups(arguments, 1, tsInputTypes, STRING_GROUP, DATE_GROUP);
checkArgGroups(arguments, 0, dtInputTypes, STRING_GROUP, DATE_GROUP);
checkArgGroups(arguments, 1, dtInputTypes, STRING_GROUP, DATE_GROUP);
obtainTimestampConverter(arguments, 0, tsInputTypes, tsConverters);
obtainTimestampConverter(arguments, 1, tsInputTypes, tsConverters);
obtainDateConverter(arguments, 0, dtInputTypes, dtConverters);
obtainDateConverter(arguments, 1, dtInputTypes, dtConverters);
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
return outputOI;
}
use of org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector in project hive by apache.
the class TestGenericUDFInternalInterval method testDayIntervalConstant.
@Test
public void testDayIntervalConstant() throws Exception {
try (GenericUDFInternalInterval udf = new GenericUDFInternalInterval()) {
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.intTypeInfo, new IntWritable(HiveParser.TOK_INTERVAL_DAY_LITERAL)), PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.intTypeInfo, new IntWritable(3)) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.intervalDayTimeTypeInfo, oi.getTypeInfo());
ConstantObjectInspector coi = (ConstantObjectInspector) oi;
HiveIntervalDayTimeWritable res = (HiveIntervalDayTimeWritable) coi.getWritableConstantValue();
Assert.assertEquals(3, res.getHiveIntervalDayTime().getDays());
}
}
Aggregations