use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableBooleanObjectInspector in project hive by apache.
the class GenericUDFBaseCompare method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 2) {
throw new UDFArgumentException(opName + " requires two arguments.");
}
argumentOIs = arguments;
Category c1 = arguments[0].getCategory();
Category c2 = arguments[1].getCategory();
if (c1 != c2) {
throw new UDFArgumentException("Type mismatch in " + opName + "(" + c1 + "," + c2 + ")");
}
if (!supportsCategory(c1)) {
throw new UDFArgumentException(opName + " does not support " + c1 + " types");
}
switch(c1) {
case PRIMITIVE:
initForPrimitives(arguments[0], arguments[1]);
break;
case MAP:
case STRUCT:
case LIST:
initForNonPrimitives(arguments[0], arguments[1]);
break;
default:
throw new AssertionError("Missing init method for " + c1 + " types");
}
return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableBooleanObjectInspector in project hive by apache.
the class GenericUDFLikeAny method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length < 2) {
throw new UDFArgumentLengthException("The like any operator requires at least one pattern for matching, got " + (arguments.length - 1));
}
inputTypes = new PrimitiveCategory[arguments.length];
converters = new Converter[arguments.length];
/**
*expects string and null arguments
*/
for (int idx = 0; idx < arguments.length; idx++) {
checkArgPrimitive(arguments, idx);
checkArgGroups(arguments, idx, inputTypes, PrimitiveGrouping.STRING_GROUP, PrimitiveGrouping.VOID_GROUP);
PrimitiveCategory inputType = ((PrimitiveObjectInspector) arguments[idx]).getPrimitiveCategory();
if (arguments[idx] instanceof ConstantObjectInspector && idx != 0) {
Object constValue = ((ConstantObjectInspector) arguments[idx]).getWritableConstantValue();
if (!isConstantNullPatternContain && constValue == null) {
isConstantNullPatternContain = true;
}
} else if (idx != 0 && isAllPatternsConstant) {
isAllPatternsConstant = false;
}
converters[idx] = ObjectInspectorConverters.getConverter(arguments[idx], getOutputOI(inputType));
inputTypes[idx] = inputType;
}
return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableBooleanObjectInspector in project parquet-mr by apache.
the class ArrayWritableObjectInspector method getObjectInspector.
private ObjectInspector getObjectInspector(final TypeInfo typeInfo) {
if (typeInfo.equals(TypeInfoFactory.doubleTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.booleanTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.floatTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableFloatObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.intTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.longTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.stringTypeInfo)) {
return ParquetPrimitiveInspectorFactory.parquetStringInspector;
} else if (typeInfo.getCategory().equals(Category.STRUCT)) {
return new ArrayWritableObjectInspector((StructTypeInfo) typeInfo);
} else if (typeInfo.getCategory().equals(Category.LIST)) {
final TypeInfo subTypeInfo = ((ListTypeInfo) typeInfo).getListElementTypeInfo();
return new ParquetHiveArrayInspector(getObjectInspector(subTypeInfo));
} else if (typeInfo.getCategory().equals(Category.MAP)) {
final TypeInfo keyTypeInfo = ((MapTypeInfo) typeInfo).getMapKeyTypeInfo();
final TypeInfo valueTypeInfo = ((MapTypeInfo) typeInfo).getMapValueTypeInfo();
if (keyTypeInfo.equals(TypeInfoFactory.stringTypeInfo) || keyTypeInfo.equals(TypeInfoFactory.byteTypeInfo) || keyTypeInfo.equals(TypeInfoFactory.shortTypeInfo)) {
return new DeepParquetHiveMapInspector(getObjectInspector(keyTypeInfo), getObjectInspector(valueTypeInfo));
} else {
return new StandardParquetHiveMapInspector(getObjectInspector(keyTypeInfo), getObjectInspector(valueTypeInfo));
}
} else if (typeInfo.equals(TypeInfoFactory.timestampTypeInfo)) {
throw new UnsupportedOperationException("timestamp not implemented yet");
} else if (typeInfo.equals(TypeInfoFactory.byteTypeInfo)) {
return ParquetPrimitiveInspectorFactory.parquetByteInspector;
} else if (typeInfo.equals(TypeInfoFactory.shortTypeInfo)) {
return ParquetPrimitiveInspectorFactory.parquetShortInspector;
} else {
throw new IllegalArgumentException("Unknown field info: " + typeInfo);
}
}
Aggregations