use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.
the class GenericUDFBaseCompare method initForPrimitives.
private void initForPrimitives(ObjectInspector arg0, ObjectInspector arg1) throws UDFArgumentException {
assert arg0.getCategory() == Category.PRIMITIVE;
assert arg1.getCategory() == Category.PRIMITIVE;
final TypeInfo type0 = TypeInfoUtils.getTypeInfoFromObjectInspector(arg0);
final TypeInfo type1 = TypeInfoUtils.getTypeInfoFromObjectInspector(arg1);
if (type0.equals(TypeInfoFactory.stringTypeInfo) && type1.equals(TypeInfoFactory.stringTypeInfo)) {
soi0 = (StringObjectInspector) arg0;
soi1 = (StringObjectInspector) arg1;
if (soi0.preferWritable() || soi1.preferWritable()) {
compareType = CompareType.COMPARE_TEXT;
} else {
compareType = CompareType.COMPARE_STRING;
}
} else if (type0.equals(TypeInfoFactory.intTypeInfo) && type1.equals(TypeInfoFactory.intTypeInfo)) {
compareType = CompareType.COMPARE_INT;
ioi0 = (IntObjectInspector) arg0;
ioi1 = (IntObjectInspector) arg1;
} else if (type0.equals(TypeInfoFactory.longTypeInfo) && type1.equals(TypeInfoFactory.longTypeInfo)) {
compareType = CompareType.COMPARE_LONG;
loi0 = (LongObjectInspector) arg0;
loi1 = (LongObjectInspector) arg1;
} else if (type0.equals(TypeInfoFactory.byteTypeInfo) && type1.equals(TypeInfoFactory.byteTypeInfo)) {
compareType = CompareType.COMPARE_BYTE;
byoi0 = (ByteObjectInspector) arg0;
byoi1 = (ByteObjectInspector) arg1;
} else if (type0.equals(TypeInfoFactory.booleanTypeInfo) && type1.equals(TypeInfoFactory.booleanTypeInfo)) {
compareType = CompareType.COMPARE_BOOL;
boi0 = (BooleanObjectInspector) arg0;
boi1 = (BooleanObjectInspector) arg1;
} else {
if (type0 == type1 || TypeInfoUtils.doPrimitiveCategoriesMatch(type0, type1)) {
compareType = CompareType.SAME_TYPE;
} else {
compareType = CompareType.NEED_CONVERT;
TypeInfo compareType = FunctionRegistry.getCommonClassForComparison(type0, type1);
// For now, we always convert to double if we can't find a common type
compareOI = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo((compareType == null) ? TypeInfoFactory.doubleTypeInfo : compareType);
converter0 = ObjectInspectorConverters.getConverter(arg0, compareOI);
converter1 = ObjectInspectorConverters.getConverter(arg1, compareOI);
checkConversionAllowed(arg0, compareOI);
checkConversionAllowed(arg1, compareOI);
}
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.
the class VectorUDFArgDesc method prepareConstant.
/* Prepare the constant for use when the function is called. To be used
* during initialization.
*/
public void prepareConstant() {
final Object writableValue;
if (constExpr != null) {
Object constantValue = constExpr.getValue();
TypeInfo typeInfo = constExpr.getTypeInfo();
ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(typeInfo);
Category category = typeInfo.getCategory();
switch(category) {
case PRIMITIVE:
{
PrimitiveCategory pc = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
// Convert from Java to Writable
AbstractPrimitiveJavaObjectInspector primitiveJavaObjectInspector = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(pc);
writableValue = primitiveJavaObjectInspector.getPrimitiveWritableObject(constantValue);
}
break;
case STRUCT:
{
if (constantValue.getClass().isArray()) {
constantValue = java.util.Arrays.asList((Object[]) constantValue);
}
StructObjectInspector structObjectInspector = (StructObjectInspector) objectInspector;
List<? extends StructField> fields = structObjectInspector.getAllStructFieldRefs();
List<String> fieldNames = new ArrayList<String>(fields.size());
List<ObjectInspector> fieldObjectInspectors = new ArrayList<ObjectInspector>(fields.size());
for (StructField f : fields) {
fieldNames.add(f.getFieldName());
fieldObjectInspectors.add(ObjectInspectorUtils.getStandardObjectInspector(f.getFieldObjectInspector(), ObjectInspectorCopyOption.WRITABLE));
}
StandardConstantStructObjectInspector constantStructObjectInspector = ObjectInspectorFactory.getStandardConstantStructObjectInspector(fieldNames, fieldObjectInspectors, (List<?>) constantValue);
writableValue = constantStructObjectInspector.getWritableConstantValue();
}
break;
default:
throw new RuntimeException("Unexpected category " + category);
}
} else {
writableValue = null;
}
constObjVal = new GenericUDF.DeferredJavaObject(writableValue);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.
the class GenericUDFWidthBucket method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
this.objectInspectors = arguments;
checkArgsSize(arguments, 4, 4);
checkArgPrimitive(arguments, 0);
checkArgPrimitive(arguments, 1);
checkArgPrimitive(arguments, 2);
checkArgPrimitive(arguments, 3);
PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[4];
checkArgGroups(arguments, 0, inputTypes, NUMERIC_GROUP, VOID_GROUP);
checkArgGroups(arguments, 1, inputTypes, NUMERIC_GROUP, VOID_GROUP);
checkArgGroups(arguments, 2, inputTypes, NUMERIC_GROUP, VOID_GROUP);
checkArgGroups(arguments, 3, inputTypes, NUMERIC_GROUP, VOID_GROUP);
TypeInfo exprTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(this.objectInspectors[0]);
TypeInfo minValueTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(this.objectInspectors[1]);
TypeInfo maxValueTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(this.objectInspectors[2]);
TypeInfo commonExprMinMaxTypeInfo = FunctionRegistry.getCommonClassForComparison(exprTypeInfo, FunctionRegistry.getCommonClassForComparison(minValueTypeInfo, maxValueTypeInfo));
this.commonExprMinMaxOI = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(commonExprMinMaxTypeInfo);
this.epxrConverterOI = ObjectInspectorConverters.getConverter(this.objectInspectors[0], this.commonExprMinMaxOI);
this.minValueConverterOI = ObjectInspectorConverters.getConverter(this.objectInspectors[1], this.commonExprMinMaxOI);
this.maxValueConverterOI = ObjectInspectorConverters.getConverter(this.objectInspectors[2], this.commonExprMinMaxOI);
return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.
the class TestVectorStringUnary method extractResultObjects.
private void extractResultObjects(VectorizedRowBatch batch, int rowIndex, VectorExtractRow resultVectorExtractRow, Object[] scrqtchRow, TypeInfo targetTypeInfo, Object[] resultObjects) {
ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(targetTypeInfo);
boolean selectedInUse = batch.selectedInUse;
int[] selected = batch.selected;
for (int logicalIndex = 0; logicalIndex < batch.size; logicalIndex++) {
final int batchIndex = (selectedInUse ? selected[logicalIndex] : logicalIndex);
try {
resultVectorExtractRow.extractRow(batch, batchIndex, scrqtchRow);
} catch (Exception e) {
Assert.fail(e.toString());
}
Object copyResult = ObjectInspectorUtils.copyToStandardObject(scrqtchRow[0], objectInspector, ObjectInspectorCopyOption.WRITABLE);
resultObjects[rowIndex++] = copyResult;
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.
the class TestVectorStringUnary method doRowIfTest.
private void doRowIfTest(TypeInfo typeInfo, TypeInfo targetTypeInfo, List<String> columns, List<ExprNodeDesc> children, Object[][] randomRows, ObjectInspector rowInspector, GenericUDF genericUdf, Object[] resultObjects) throws Exception {
ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(typeInfo, genericUdf, children);
HiveConf hiveConf = new HiveConf();
ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(exprDesc, hiveConf);
evaluator.initialize(rowInspector);
ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(targetTypeInfo);
final int rowCount = randomRows.length;
for (int i = 0; i < rowCount; i++) {
Object[] row = randomRows[i];
Object result = evaluator.evaluate(row);
Object copyResult = ObjectInspectorUtils.copyToStandardObject(result, objectInspector, ObjectInspectorCopyOption.WRITABLE);
resultObjects[i] = copyResult;
}
}
Aggregations