use of org.apache.hadoop.io.BytesWritable in project hive by apache.
the class GenericUDFParamUtils method getBinaryValue.
public static BytesWritable getBinaryValue(DeferredObject[] arguments, int i, Converter[] converters) throws HiveException {
Object obj;
if ((obj = arguments[i].get()) == null) {
return null;
}
Object writableValue = converters[i].convert(obj);
return (BytesWritable) writableValue;
}
use of org.apache.hadoop.io.BytesWritable in project hive by apache.
the class TestHiveKVResultCache method scanResultList.
private static long scanResultList(long rows, int threshold, int separate, List<Tuple2<HiveKey, BytesWritable>> output, String prefix1, String prefix2) {
final long iteratorCount = threshold == 0 ? 1 : rows * (100 - separate) / 100 / threshold;
MyHiveFunctionResultList resultList = new MyHiveFunctionResultList(new Iterator() {
// Input record iterator, not used
private int i = 0;
@Override
public boolean hasNext() {
return i++ < iteratorCount;
}
@Override
public Object next() {
return Integer.valueOf(i);
}
@Override
public void remove() {
}
});
resultList.init(rows, threshold, separate, prefix1, prefix2);
long startTime = System.currentTimeMillis();
while (resultList.hasNext()) {
Object item = resultList.next();
if (output != null) {
output.add((Tuple2<HiveKey, BytesWritable>) item);
}
}
long endTime = System.currentTimeMillis();
return endTime - startTime;
}
use of org.apache.hadoop.io.BytesWritable in project hive by apache.
the class TestHiveKVResultCache method testSimple.
@Test
public void testSimple() throws Exception {
// Create KV result cache object, add one (k,v) pair and retrieve them.
HiveKVResultCache cache = new HiveKVResultCache();
HiveKey key = new HiveKey("key".getBytes(), "key".hashCode());
BytesWritable value = new BytesWritable("value".getBytes());
cache.add(key, value);
assertTrue("KV result cache should have at least one element", cache.hasNext());
Tuple2<HiveKey, BytesWritable> row = cache.next();
assertTrue("Incorrect key", row._1().equals(key));
assertTrue("Incorrect value", row._2().equals(value));
assertTrue("Cache shouldn't have more records", !cache.hasNext());
}
use of org.apache.hadoop.io.BytesWritable in project hive by apache.
the class GenericUDFEncode method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 2) {
throw new UDFArgumentLengthException("Encode() requires exactly two arguments");
}
if (arguments[0].getCategory() != Category.PRIMITIVE || PrimitiveGrouping.STRING_GROUP != PrimitiveObjectInspectorUtils.getPrimitiveGrouping(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory())) {
throw new UDFArgumentTypeException(0, "The first argument to Encode() must be a string/varchar");
}
stringOI = (PrimitiveObjectInspector) arguments[0];
if (arguments[1].getCategory() != Category.PRIMITIVE || PrimitiveGrouping.STRING_GROUP != PrimitiveObjectInspectorUtils.getPrimitiveGrouping(((PrimitiveObjectInspector) arguments[1]).getPrimitiveCategory())) {
throw new UDFArgumentTypeException(1, "The second argument to Encode() must be a string/varchar");
}
charsetOI = (PrimitiveObjectInspector) arguments[1];
// If the character set for encoding is constant, we can optimize that
if (charsetOI instanceof ConstantObjectInspector) {
String charSetName = ((ConstantObjectInspector) arguments[1]).getWritableConstantValue().toString();
encoder = Charset.forName(charSetName).newEncoder().onMalformedInput(CodingErrorAction.REPORT).onUnmappableCharacter(CodingErrorAction.REPORT);
}
result = new BytesWritable();
return (ObjectInspector) PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
}
use of org.apache.hadoop.io.BytesWritable in project hive by apache.
the class TestUDFSha1 method runAndVerifyBin.
private void runAndVerifyBin(byte[] binV, String expResult, UDFSha1 udf) throws HiveException {
BytesWritable binWr = binV != null ? new BytesWritable(binV) : null;
Text output = (Text) udf.evaluate(binWr);
assertEquals("sha1() test ", expResult, output != null ? output.toString() : null);
}
Aggregations