use of org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory in project hive by apache.
the class GenericUDF method obtainIntConverter.
protected void obtainIntConverter(ObjectInspector[] arguments, int i, PrimitiveCategory[] inputTypes, Converter[] converters) throws UDFArgumentTypeException {
PrimitiveObjectInspector inOi = (PrimitiveObjectInspector) arguments[i];
PrimitiveCategory inputType = inOi.getPrimitiveCategory();
switch(inputType) {
case BYTE:
case SHORT:
case INT:
case VOID:
break;
default:
throw new UDFArgumentTypeException(i, getFuncName() + " only takes INT/SHORT/BYTE types as " + getArgOrder(i) + " argument, got " + inputType);
}
Converter converter = ObjectInspectorConverters.getConverter(arguments[i], PrimitiveObjectInspectorFactory.writableIntObjectInspector);
converters[i] = converter;
inputTypes[i] = inputType;
}
use of org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory in project hive by apache.
the class GenericUDF method obtainStringConverter.
protected void obtainStringConverter(ObjectInspector[] arguments, int i, PrimitiveCategory[] inputTypes, Converter[] converters) throws UDFArgumentTypeException {
PrimitiveObjectInspector inOi = (PrimitiveObjectInspector) arguments[i];
PrimitiveCategory inputType = inOi.getPrimitiveCategory();
Converter converter = ObjectInspectorConverters.getConverter(arguments[i], PrimitiveObjectInspectorFactory.writableStringObjectInspector);
converters[i] = converter;
inputTypes[i] = inputType;
}
use of org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory in project hive by apache.
the class GenericUDFDateDiff method checkArguments.
private Converter checkArguments(ObjectInspector[] arguments, int i) throws UDFArgumentException {
if (arguments[i].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but " + arguments[i].getTypeName() + " is passed. as first arguments");
}
PrimitiveCategory inputType = ((PrimitiveObjectInspector) arguments[i]).getPrimitiveCategory();
Converter converter;
switch(inputType) {
case STRING:
case VARCHAR:
case CHAR:
converter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[i], PrimitiveObjectInspectorFactory.writableStringObjectInspector);
break;
case TIMESTAMP:
converter = new TimestampConverter((PrimitiveObjectInspector) arguments[i], PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
break;
case DATE:
converter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[i], PrimitiveObjectInspectorFactory.writableDateObjectInspector);
break;
default:
throw new UDFArgumentException(" DATEDIFF() only takes STRING/TIMESTAMP/DATEWRITABLE types as " + (i + 1) + "-th argument, got " + inputType);
}
return converter;
}
use of org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory in project hive by apache.
the class VectorRandomRowSource method chooseSchema.
private void chooseSchema() {
HashSet hashSet = null;
boolean allTypes;
boolean onlyOne = (r.nextInt(100) == 7);
if (onlyOne) {
columnCount = 1;
allTypes = false;
} else {
allTypes = r.nextBoolean();
if (allTypes) {
// One of each type.
columnCount = possibleHiveTypeNames.length;
hashSet = new HashSet<Integer>();
} else {
columnCount = 1 + r.nextInt(20);
}
}
typeNames = new ArrayList<String>(columnCount);
primitiveCategories = new PrimitiveCategory[columnCount];
primitiveTypeInfos = new PrimitiveTypeInfo[columnCount];
primitiveObjectInspectorList = new ArrayList<ObjectInspector>(columnCount);
List<String> columnNames = new ArrayList<String>(columnCount);
for (int c = 0; c < columnCount; c++) {
columnNames.add(String.format("col%d", c));
String typeName;
if (onlyOne) {
typeName = possibleHiveTypeNames[r.nextInt(possibleHiveTypeNames.length)];
} else {
int typeNum;
if (allTypes) {
while (true) {
typeNum = r.nextInt(possibleHiveTypeNames.length);
Integer typeNumInteger = new Integer(typeNum);
if (!hashSet.contains(typeNumInteger)) {
hashSet.add(typeNumInteger);
break;
}
}
} else {
typeNum = r.nextInt(possibleHiveTypeNames.length);
}
typeName = possibleHiveTypeNames[typeNum];
}
if (typeName.equals("char")) {
int maxLength = 1 + r.nextInt(100);
typeName = String.format("char(%d)", maxLength);
} else if (typeName.equals("varchar")) {
int maxLength = 1 + r.nextInt(100);
typeName = String.format("varchar(%d)", maxLength);
} else if (typeName.equals("decimal")) {
typeName = String.format("decimal(%d,%d)", HiveDecimal.SYSTEM_DEFAULT_PRECISION, HiveDecimal.SYSTEM_DEFAULT_SCALE);
}
PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) TypeInfoUtils.getTypeInfoFromTypeString(typeName);
primitiveTypeInfos[c] = primitiveTypeInfo;
PrimitiveCategory primitiveCategory = primitiveTypeInfo.getPrimitiveCategory();
primitiveCategories[c] = primitiveCategory;
primitiveObjectInspectorList.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(primitiveTypeInfo));
typeNames.add(typeName);
}
rowStructObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, primitiveObjectInspectorList);
alphabets = new String[columnCount];
}
use of org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory in project hive by apache.
the class VectorUDFArgDesc method prepareConstant.
/* Prepare the constant for use when the function is called. To be used
* during initialization.
*/
public void prepareConstant() {
final Object writableValue;
if (constExpr != null) {
PrimitiveCategory pc = ((PrimitiveTypeInfo) constExpr.getTypeInfo()).getPrimitiveCategory();
// Convert from Java to Writable
writableValue = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(pc).getPrimitiveWritableObject(constExpr.getValue());
} else {
writableValue = null;
}
constObjVal = new GenericUDF.DeferredJavaObject(writableValue);
}
Aggregations