use of org.apache.hadoop.hive.ql.io.HiveKey in project presto by prestodb.
the class TestHiveBucketing method getHiveBucket.
public static int getHiveBucket(List<Entry<ObjectInspector, Object>> columnBindings, int bucketCount) throws HiveException {
GenericUDFHash udf = new GenericUDFHash();
ObjectInspector[] objectInspectors = new ObjectInspector[columnBindings.size()];
GenericUDF.DeferredObject[] deferredObjects = new GenericUDF.DeferredObject[columnBindings.size()];
int i = 0;
for (Entry<ObjectInspector, Object> entry : columnBindings) {
objectInspectors[i] = entry.getKey();
if (entry.getValue() != null && entry.getKey() instanceof JavaHiveVarcharObjectInspector) {
JavaHiveVarcharObjectInspector varcharObjectInspector = (JavaHiveVarcharObjectInspector) entry.getKey();
deferredObjects[i] = new GenericUDF.DeferredJavaObject(new HiveVarchar(((String) entry.getValue()), varcharObjectInspector.getMaxLength()));
} else {
deferredObjects[i] = new GenericUDF.DeferredJavaObject(entry.getValue());
}
i++;
}
ObjectInspector udfInspector = udf.initialize(objectInspectors);
IntObjectInspector inspector = (IntObjectInspector) udfInspector;
Object result = udf.evaluate(deferredObjects);
HiveKey hiveKey = new HiveKey();
hiveKey.setHashCode(inspector.get(result));
return new DefaultHivePartitioner<>().getBucket(hiveKey, null, bucketCount);
}
Aggregations