use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.
the class GenericUDFRound method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length < 1 || arguments.length > 2) {
throw new UDFArgumentLengthException("ROUND requires one or two argument, got " + arguments.length);
}
if (arguments[0].getCategory() != Category.PRIMITIVE) {
throw new UDFArgumentTypeException(0, "ROUND input only takes primitive types, got " + arguments[0].getTypeName());
}
inputOI = (PrimitiveObjectInspector) arguments[0];
if (arguments.length == 2) {
if (arguments[1].getCategory() != Category.PRIMITIVE) {
throw new UDFArgumentTypeException(1, "ROUND second argument only takes primitive types, got " + arguments[1].getTypeName());
}
scaleOI = (PrimitiveObjectInspector) arguments[1];
switch(scaleOI.getPrimitiveCategory()) {
case VOID:
break;
case BYTE:
if (scaleOI instanceof WritableConstantByteObjectInspector) {
scale = ((WritableConstantByteObjectInspector) scaleOI).getWritableConstantValue().get();
} else {
constantScale = false;
}
break;
case SHORT:
if (scaleOI instanceof WritableConstantShortObjectInspector) {
scale = ((WritableConstantShortObjectInspector) scaleOI).getWritableConstantValue().get();
} else {
constantScale = false;
}
break;
case INT:
if (scaleOI instanceof WritableConstantIntObjectInspector) {
scale = ((WritableConstantIntObjectInspector) scaleOI).getWritableConstantValue().get();
} else {
constantScale = false;
}
break;
case LONG:
if (scaleOI instanceof WritableConstantLongObjectInspector) {
long l = ((WritableConstantLongObjectInspector) scaleOI).getWritableConstantValue().get();
if (l < Integer.MIN_VALUE || l > Integer.MAX_VALUE) {
throw new UDFArgumentException(getFuncName().toUpperCase() + " scale argument out of allowed range");
}
scale = (int) l;
} else {
constantScale = false;
}
break;
default:
throw new UDFArgumentTypeException(1, getFuncName().toUpperCase() + " second argument only takes numeric type");
}
}
inputType = inputOI.getPrimitiveCategory();
ObjectInspector outputOI = null;
switch(inputType) {
case DECIMAL:
DecimalTypeInfo inputTypeInfo = (DecimalTypeInfo) inputOI.getTypeInfo();
DecimalTypeInfo typeInfo = getOutputTypeInfo(inputTypeInfo, scale);
outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(typeInfo);
if (!constantScale) {
throw new UDFArgumentTypeException(1, getFuncName().toUpperCase() + " scale argument for " + "decimal must be constant");
}
break;
case VOID:
case BYTE:
case SHORT:
case INT:
case LONG:
case FLOAT:
case DOUBLE:
outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputType);
break;
case STRING:
case VARCHAR:
case CHAR:
outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(PrimitiveCategory.DOUBLE);
converterFromString = ObjectInspectorConverters.getConverter(inputOI, outputOI);
break;
default:
throw new UDFArgumentTypeException(0, "Only numeric or string group data types are allowed for ROUND function. Got " + inputType.name());
}
return outputOI;
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.
the class GenericUDFToTimestampLocalTZ method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length < 1) {
throw new UDFArgumentLengthException("The function CAST as TIMESTAMP WITH LOCAL TIME ZONE requires at least one argument, got " + arguments.length);
}
try {
argumentOI = (PrimitiveObjectInspector) arguments[0];
switch(argumentOI.getPrimitiveCategory()) {
case CHAR:
case VARCHAR:
case STRING:
case DATE:
case TIMESTAMP:
case TIMESTAMPLOCALTZ:
break;
default:
throw new UDFArgumentException("CAST as TIMESTAMP WITH LOCAL TIME ZONE only allows" + "string/date/timestamp/timestamp with time zone types");
}
} catch (ClassCastException e) {
throw new UDFArgumentException("The function CAST as TIMESTAMP WITH LOCAL TIME ZONE takes only primitive types");
}
SettableTimestampLocalTZObjectInspector outputOI = (SettableTimestampLocalTZObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(typeInfo);
converter = new TimestampLocalTZConverter(argumentOI, outputOI);
return outputOI;
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.
the class PartExprEvalUtils method evalExprWithPart.
/**
* Evaluate expression with partition columns
*
* @param expr
* @param partSpec
* @param rowObjectInspector
* @return value returned by the expression
* @throws HiveException
*/
public static synchronized Object evalExprWithPart(ExprNodeDesc expr, Partition p, List<VirtualColumn> vcs, StructObjectInspector rowObjectInspector) throws HiveException {
LinkedHashMap<String, String> partSpec = p.getSpec();
Properties partProps = p.getSchema();
String pcolTypes = partProps.getProperty(hive_metastoreConstants.META_TABLE_PARTITION_COLUMN_TYPES);
String[] partKeyTypes = pcolTypes.trim().split(":");
if (partSpec.size() != partKeyTypes.length) {
throw new HiveException("Internal error : Partition Spec size, " + partSpec.size() + " doesn't match partition key definition size, " + partKeyTypes.length);
}
boolean hasVC = vcs != null && !vcs.isEmpty();
Object[] rowWithPart = new Object[hasVC ? 3 : 2];
// Create the row object
ArrayList<String> partNames = new ArrayList<String>();
ArrayList<Object> partValues = new ArrayList<Object>();
ArrayList<ObjectInspector> partObjectInspectors = new ArrayList<ObjectInspector>();
int i = 0;
for (Map.Entry<String, String> entry : partSpec.entrySet()) {
partNames.add(entry.getKey());
ObjectInspector oi = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(partKeyTypes[i++]));
partValues.add(ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, oi).convert(entry.getValue()));
partObjectInspectors.add(oi);
}
StructObjectInspector partObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(partNames, partObjectInspectors);
rowWithPart[1] = partValues;
ArrayList<StructObjectInspector> ois = new ArrayList<StructObjectInspector>(2);
ois.add(rowObjectInspector);
ois.add(partObjectInspector);
if (hasVC) {
ois.add(VirtualColumn.getVCSObjectInspector(vcs));
}
StructObjectInspector rowWithPartObjectInspector = ObjectInspectorFactory.getUnionStructObjectInspector(ois);
ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(expr);
ObjectInspector evaluateResultOI = evaluator.initialize(rowWithPartObjectInspector);
Object evaluateResultO = evaluator.evaluate(rowWithPart);
return ((PrimitiveObjectInspector) evaluateResultOI).getPrimitiveJavaObject(evaluateResultO);
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.
the class TypeInfoUtils method getExtendedTypeInfoFromJavaType.
/**
* Return the extended TypeInfo from a Java type. By extended TypeInfo, we
* allow unknownType for java.lang.Object.
*
* @param t
* The Java type.
* @param m
* The method, only used for generating error messages.
*/
private static TypeInfo getExtendedTypeInfoFromJavaType(Type t, Method m) {
if (t == Object.class) {
return TypeInfoFactory.unknownTypeInfo;
}
if (t instanceof ParameterizedType) {
ParameterizedType pt = (ParameterizedType) t;
// List?
if (List.class == (Class<?>) pt.getRawType() || ArrayList.class == (Class<?>) pt.getRawType()) {
return TypeInfoFactory.getListTypeInfo(getExtendedTypeInfoFromJavaType(pt.getActualTypeArguments()[0], m));
}
// Map?
if (Map.class == (Class<?>) pt.getRawType() || HashMap.class == (Class<?>) pt.getRawType()) {
return TypeInfoFactory.getMapTypeInfo(getExtendedTypeInfoFromJavaType(pt.getActualTypeArguments()[0], m), getExtendedTypeInfoFromJavaType(pt.getActualTypeArguments()[1], m));
}
// Otherwise convert t to RawType so we will fall into the following if
// block.
t = pt.getRawType();
}
// Must be a class.
if (!(t instanceof Class)) {
throw new RuntimeException("Hive does not understand type " + t + " from " + m);
}
Class<?> c = (Class<?>) t;
// Java Primitive Type?
if (PrimitiveObjectInspectorUtils.isPrimitiveJavaType(c)) {
return TypeInfoUtils.getTypeInfoFromObjectInspector(PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveObjectInspectorUtils.getTypeEntryFromPrimitiveJavaType(c).primitiveCategory));
}
// Java Primitive Class?
if (PrimitiveObjectInspectorUtils.isPrimitiveJavaClass(c)) {
return TypeInfoUtils.getTypeInfoFromObjectInspector(PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveObjectInspectorUtils.getTypeEntryFromPrimitiveJavaClass(c).primitiveCategory));
}
// Primitive Writable class?
if (PrimitiveObjectInspectorUtils.isPrimitiveWritableClass(c)) {
return TypeInfoUtils.getTypeInfoFromObjectInspector(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(PrimitiveObjectInspectorUtils.getTypeEntryFromPrimitiveWritableClass(c).primitiveCategory));
}
// Must be a struct
Field[] fields = ObjectInspectorUtils.getDeclaredNonStaticFields(c);
ArrayList<String> fieldNames = new ArrayList<String>(fields.length);
ArrayList<TypeInfo> fieldTypeInfos = new ArrayList<TypeInfo>(fields.length);
for (Field field : fields) {
fieldNames.add(field.getName());
fieldTypeInfos.add(getExtendedTypeInfoFromJavaType(field.getGenericType(), m));
}
return TypeInfoFactory.getStructTypeInfo(fieldNames, fieldTypeInfos);
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.
the class SerdeRandomRowSource method chooseSchema.
private void chooseSchema(SupportedTypes supportedTypes, int maxComplexDepth) {
HashSet hashSet = null;
final boolean allTypes;
final boolean onlyOne = (r.nextInt(100) == 7);
if (onlyOne) {
columnCount = 1;
allTypes = false;
} else {
allTypes = r.nextBoolean();
if (allTypes) {
switch(supportedTypes) {
case ALL:
columnCount = possibleHivePrimitiveTypeNames.length + possibleHiveComplexTypeNames.length;
break;
case ALL_EXCEPT_MAP:
columnCount = possibleHivePrimitiveTypeNames.length + possibleHiveComplexTypeNames.length - 1;
break;
case PRIMITIVE:
columnCount = possibleHivePrimitiveTypeNames.length;
break;
}
hashSet = new HashSet<Integer>();
} else {
columnCount = 1 + r.nextInt(20);
}
}
typeNames = new ArrayList<String>(columnCount);
categories = new Category[columnCount];
typeInfos = new TypeInfo[columnCount];
objectInspectorList = new ArrayList<ObjectInspector>(columnCount);
primitiveCategories = new PrimitiveCategory[columnCount];
primitiveTypeInfos = new PrimitiveTypeInfo[columnCount];
primitiveObjectInspectorList = new ArrayList<ObjectInspector>(columnCount);
final List<String> columnNames = new ArrayList<String>(columnCount);
for (int c = 0; c < columnCount; c++) {
columnNames.add(String.format("col%d", c));
String typeName;
if (onlyOne) {
typeName = getRandomTypeName(supportedTypes);
} else {
int typeNum;
if (allTypes) {
int maxTypeNum = 0;
switch(supportedTypes) {
case PRIMITIVE:
maxTypeNum = possibleHivePrimitiveTypeNames.length;
break;
case ALL_EXCEPT_MAP:
maxTypeNum = possibleHivePrimitiveTypeNames.length + possibleHiveComplexTypeNames.length - 1;
break;
case ALL:
maxTypeNum = possibleHivePrimitiveTypeNames.length + possibleHiveComplexTypeNames.length;
break;
}
while (true) {
typeNum = r.nextInt(maxTypeNum);
final Integer typeNumInteger = new Integer(typeNum);
if (!hashSet.contains(typeNumInteger)) {
hashSet.add(typeNumInteger);
break;
}
}
} else {
if (supportedTypes == SupportedTypes.PRIMITIVE || r.nextInt(10) != 0) {
typeNum = r.nextInt(possibleHivePrimitiveTypeNames.length);
} else {
typeNum = possibleHivePrimitiveTypeNames.length + r.nextInt(possibleHiveComplexTypeNames.length);
if (supportedTypes == SupportedTypes.ALL_EXCEPT_MAP) {
typeNum--;
}
}
}
if (typeNum < possibleHivePrimitiveTypeNames.length) {
typeName = possibleHivePrimitiveTypeNames[typeNum];
} else {
typeName = possibleHiveComplexTypeNames[typeNum - possibleHivePrimitiveTypeNames.length];
}
}
final String decoratedTypeName = getDecoratedTypeName(typeName, supportedTypes, 0, maxComplexDepth);
final TypeInfo typeInfo;
try {
typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(decoratedTypeName);
} catch (Exception e) {
throw new RuntimeException("Cannot convert type name " + decoratedTypeName + " to a type " + e);
}
typeInfos[c] = typeInfo;
final Category category = typeInfo.getCategory();
categories[c] = category;
ObjectInspector objectInspector = getObjectInspector(typeInfo);
switch(category) {
case PRIMITIVE:
{
final PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
final PrimitiveCategory primitiveCategory = primitiveTypeInfo.getPrimitiveCategory();
objectInspector = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(primitiveTypeInfo);
primitiveTypeInfos[c] = primitiveTypeInfo;
primitiveCategories[c] = primitiveCategory;
primitiveObjectInspectorList.add(objectInspector);
}
break;
case LIST:
case MAP:
case STRUCT:
case UNION:
primitiveObjectInspectorList.add(null);
break;
default:
throw new RuntimeException("Unexpected catagory " + category);
}
objectInspectorList.add(objectInspector);
if (category == Category.PRIMITIVE) {
}
typeNames.add(decoratedTypeName);
}
rowStructObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, objectInspectorList);
alphabets = new String[columnCount];
}
Aggregations