use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.
the class GenericUDFOPDTIMinus method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 2) {
throw new UDFArgumentException(opName + " requires two arguments.");
}
PrimitiveObjectInspector resultOI = null;
for (int i = 0; i < 2; i++) {
Category category = arguments[i].getCategory();
if (category != Category.PRIMITIVE) {
throw new UDFArgumentTypeException(i, "The " + GenericUDFUtils.getOrdinal(i + 1) + " argument of " + opName + " is expected to a " + Category.PRIMITIVE.toString().toLowerCase() + " type, but " + category.toString().toLowerCase() + " is found");
}
}
inputOIs = new PrimitiveObjectInspector[] { (PrimitiveObjectInspector) arguments[0], (PrimitiveObjectInspector) arguments[1] };
PrimitiveObjectInspector leftOI = inputOIs[0];
PrimitiveObjectInspector rightOI = inputOIs[1];
// Timestamp - Date = IntervalDayTime (operands reversible)
if (checkArgs(PrimitiveCategory.INTERVAL_YEAR_MONTH, PrimitiveCategory.INTERVAL_YEAR_MONTH)) {
minusOpType = OperationType.INTERVALYM_MINUS_INTERVALYM;
intervalArg1Idx = 0;
intervalArg2Idx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.intervalYearMonthTypeInfo);
} else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.INTERVAL_YEAR_MONTH)) {
minusOpType = OperationType.DATE_MINUS_INTERVALYM;
dtArg1Idx = 0;
intervalArg1Idx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.dateTypeInfo);
} else if (checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.INTERVAL_YEAR_MONTH)) {
minusOpType = OperationType.TIMESTAMP_MINUS_INTERVALYM;
dtArg1Idx = 0;
intervalArg1Idx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.timestampTypeInfo);
} else if (checkArgs(PrimitiveCategory.INTERVAL_DAY_TIME, PrimitiveCategory.INTERVAL_DAY_TIME)) {
minusOpType = OperationType.INTERVALDT_MINUS_INTERVALDT;
intervalArg1Idx = 0;
intervalArg2Idx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.intervalDayTimeTypeInfo);
} else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.INTERVAL_DAY_TIME) || checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.INTERVAL_DAY_TIME)) {
minusOpType = OperationType.TIMESTAMP_MINUS_INTERVALDT;
dtArg1Idx = 0;
intervalArg1Idx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.timestampTypeInfo);
dt1Converter = ObjectInspectorConverters.getConverter(leftOI, resultOI);
} else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.DATE) || checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.TIMESTAMP) || checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.TIMESTAMP) || checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.DATE)) {
// Operands converted to timestamp, result as interval day-time
minusOpType = OperationType.TIMESTAMP_MINUS_TIMESTAMP;
dtArg1Idx = 0;
dtArg2Idx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.intervalDayTimeTypeInfo);
dt1Converter = ObjectInspectorConverters.getConverter(leftOI, resultOI);
dt2Converter = ObjectInspectorConverters.getConverter(leftOI, resultOI);
} else {
// Unsupported types - error
List<TypeInfo> argTypeInfos = new ArrayList<TypeInfo>(2);
argTypeInfos.add(leftOI.getTypeInfo());
argTypeInfos.add(rightOI.getTypeInfo());
throw new NoMatchingMethodException(this.getClass(), argTypeInfos, null);
}
return resultOI;
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.
the class GenericUDFInternalInterval method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
// read operation mode
if (!(arguments[0] instanceof ConstantObjectInspector)) {
throw new UDFArgumentTypeException(0, getFuncName() + ": may only accept constant as first argument");
}
Integer operationMode = getConstantIntValue(arguments, 0);
if (operationMode == null) {
throw new UDFArgumentTypeException(0, "must supply operationmode");
}
processor = getProcessorMap().get(operationMode);
if (processor == null) {
throw new UDFArgumentTypeException(0, getFuncName() + ": unsupported operationMode: " + operationMode);
}
// check value argument
if (arguments[1].getCategory() != Category.PRIMITIVE) {
throw new UDFArgumentTypeException(1, "The first argument to " + getFuncName() + " must be primitive");
}
inputOI = (PrimitiveObjectInspector) arguments[1];
PrimitiveCategory inputCategory = inputOI.getPrimitiveCategory();
if (!isValidInputCategory(inputCategory)) {
throw new UDFArgumentTypeException(1, "The second argument to " + getFuncName() + " must be from the string group or numeric group (except:float/double)");
}
if (arguments[1] instanceof ConstantObjectInspector) {
// return value as constant in case arg is constant
return PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(processor.getTypeInfo(), processor.evaluate(getConstantStringValue(arguments, 1)));
} else {
return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(processor.getTypeInfo());
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.
the class UnsupportedDatatypeTransformAdapter method initialize.
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
LOG.debug("==> BaseMaskUDF.initialize()");
// first argument is the column to be transformed
checkArgPrimitive(arguments, 0);
PrimitiveObjectInspector columnType = ((PrimitiveObjectInspector) arguments[0]);
transformer.init(arguments, 1);
transformerAdapter = AbstractTransformerAdapter.getTransformerAdapter(columnType, transformer);
ObjectInspector ret = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(columnType.getPrimitiveCategory());
LOG.debug("<== BaseMaskUDF.initialize()");
return ret;
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.
the class GenericUDFFloorCeilBase method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 1) {
throw new UDFArgumentException(opName + " requires one argument.");
}
Category category = arguments[0].getCategory();
if (category != Category.PRIMITIVE) {
throw new UDFArgumentTypeException(0, "The " + GenericUDFUtils.getOrdinal(1) + " argument of " + opName + " is expected to a " + Category.PRIMITIVE.toString().toLowerCase() + " type, but " + category.toString().toLowerCase() + " is found");
}
inputOI = (PrimitiveObjectInspector) arguments[0];
if (!FunctionRegistry.isNumericType(inputOI.getTypeInfo())) {
throw new UDFArgumentTypeException(0, "The " + GenericUDFUtils.getOrdinal(1) + " argument of " + opName + " is expected to a " + "numeric type, but " + inputOI.getTypeName() + " is found");
}
PrimitiveTypeInfo resultTypeInfo = null;
PrimitiveTypeInfo inputTypeInfo = inputOI.getTypeInfo();
if (inputTypeInfo instanceof DecimalTypeInfo) {
DecimalTypeInfo decTypeInfo = (DecimalTypeInfo) inputTypeInfo;
resultTypeInfo = TypeInfoFactory.getDecimalTypeInfo(decTypeInfo.precision() - decTypeInfo.scale() + 1, 0);
ObjectInspector decimalOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(decTypeInfo);
converter = ObjectInspectorConverters.getConverter(inputOI, decimalOI);
} else {
resultTypeInfo = TypeInfoFactory.longTypeInfo;
ObjectInspector doubleObjectInspector = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
converter = ObjectInspectorConverters.getConverter(inputOI, doubleObjectInspector);
}
return resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(resultTypeInfo);
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.
the class VectorRandomRowSource method chooseSchema.
private void chooseSchema() {
HashSet hashSet = null;
boolean allTypes;
boolean onlyOne = (r.nextInt(100) == 7);
if (onlyOne) {
columnCount = 1;
allTypes = false;
} else {
allTypes = r.nextBoolean();
if (allTypes) {
// One of each type.
columnCount = possibleHiveTypeNames.length;
hashSet = new HashSet<Integer>();
} else {
columnCount = 1 + r.nextInt(20);
}
}
typeNames = new ArrayList<String>(columnCount);
primitiveCategories = new PrimitiveCategory[columnCount];
primitiveTypeInfos = new PrimitiveTypeInfo[columnCount];
primitiveObjectInspectorList = new ArrayList<ObjectInspector>(columnCount);
List<String> columnNames = new ArrayList<String>(columnCount);
for (int c = 0; c < columnCount; c++) {
columnNames.add(String.format("col%d", c));
String typeName;
if (onlyOne) {
typeName = possibleHiveTypeNames[r.nextInt(possibleHiveTypeNames.length)];
} else {
int typeNum;
if (allTypes) {
while (true) {
typeNum = r.nextInt(possibleHiveTypeNames.length);
Integer typeNumInteger = new Integer(typeNum);
if (!hashSet.contains(typeNumInteger)) {
hashSet.add(typeNumInteger);
break;
}
}
} else {
typeNum = r.nextInt(possibleHiveTypeNames.length);
}
typeName = possibleHiveTypeNames[typeNum];
}
if (typeName.equals("char")) {
int maxLength = 1 + r.nextInt(100);
typeName = String.format("char(%d)", maxLength);
} else if (typeName.equals("varchar")) {
int maxLength = 1 + r.nextInt(100);
typeName = String.format("varchar(%d)", maxLength);
} else if (typeName.equals("decimal")) {
typeName = String.format("decimal(%d,%d)", HiveDecimal.SYSTEM_DEFAULT_PRECISION, HiveDecimal.SYSTEM_DEFAULT_SCALE);
}
PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) TypeInfoUtils.getTypeInfoFromTypeString(typeName);
primitiveTypeInfos[c] = primitiveTypeInfo;
PrimitiveCategory primitiveCategory = primitiveTypeInfo.getPrimitiveCategory();
primitiveCategories[c] = primitiveCategory;
primitiveObjectInspectorList.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(primitiveTypeInfo));
typeNames.add(typeName);
}
rowStructObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, primitiveObjectInspectorList);
alphabets = new String[columnCount];
}
Aggregations