Search in sources :

Example 51 with BytesWritable

use of org.apache.hadoop.io.BytesWritable in project hive by apache.

the class GenericUDFParamUtils method getBinaryValue.

public static BytesWritable getBinaryValue(DeferredObject[] arguments, int i, Converter[] converters) throws HiveException {
    Object obj;
    if ((obj = arguments[i].get()) == null) {
        return null;
    }
    Object writableValue = converters[i].convert(obj);
    return (BytesWritable) writableValue;
}
Also used : DeferredObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject) BytesWritable(org.apache.hadoop.io.BytesWritable)

Example 52 with BytesWritable

use of org.apache.hadoop.io.BytesWritable in project hive by apache.

the class TestHiveKVResultCache method scanResultList.

private static long scanResultList(long rows, int threshold, int separate, List<Tuple2<HiveKey, BytesWritable>> output, String prefix1, String prefix2) {
    final long iteratorCount = threshold == 0 ? 1 : rows * (100 - separate) / 100 / threshold;
    MyHiveFunctionResultList resultList = new MyHiveFunctionResultList(new Iterator() {

        // Input record iterator, not used
        private int i = 0;

        @Override
        public boolean hasNext() {
            return i++ < iteratorCount;
        }

        @Override
        public Object next() {
            return Integer.valueOf(i);
        }

        @Override
        public void remove() {
        }
    });
    resultList.init(rows, threshold, separate, prefix1, prefix2);
    long startTime = System.currentTimeMillis();
    while (resultList.hasNext()) {
        Object item = resultList.next();
        if (output != null) {
            output.add((Tuple2<HiveKey, BytesWritable>) item);
        }
    }
    long endTime = System.currentTimeMillis();
    return endTime - startTime;
}
Also used : HiveKey(org.apache.hadoop.hive.ql.io.HiveKey) Iterator(java.util.Iterator) BytesWritable(org.apache.hadoop.io.BytesWritable)

Example 53 with BytesWritable

use of org.apache.hadoop.io.BytesWritable in project hive by apache.

the class TestHiveKVResultCache method testSimple.

@Test
public void testSimple() throws Exception {
    // Create KV result cache object, add one (k,v) pair and retrieve them.
    HiveKVResultCache cache = new HiveKVResultCache();
    HiveKey key = new HiveKey("key".getBytes(), "key".hashCode());
    BytesWritable value = new BytesWritable("value".getBytes());
    cache.add(key, value);
    assertTrue("KV result cache should have at least one element", cache.hasNext());
    Tuple2<HiveKey, BytesWritable> row = cache.next();
    assertTrue("Incorrect key", row._1().equals(key));
    assertTrue("Incorrect value", row._2().equals(value));
    assertTrue("Cache shouldn't have more records", !cache.hasNext());
}
Also used : HiveKey(org.apache.hadoop.hive.ql.io.HiveKey) BytesWritable(org.apache.hadoop.io.BytesWritable) Test(org.junit.Test)

Example 54 with BytesWritable

use of org.apache.hadoop.io.BytesWritable in project hive by apache.

the class GenericUDFEncode method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length != 2) {
        throw new UDFArgumentLengthException("Encode() requires exactly two arguments");
    }
    if (arguments[0].getCategory() != Category.PRIMITIVE || PrimitiveGrouping.STRING_GROUP != PrimitiveObjectInspectorUtils.getPrimitiveGrouping(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory())) {
        throw new UDFArgumentTypeException(0, "The first argument to Encode() must be a string/varchar");
    }
    stringOI = (PrimitiveObjectInspector) arguments[0];
    if (arguments[1].getCategory() != Category.PRIMITIVE || PrimitiveGrouping.STRING_GROUP != PrimitiveObjectInspectorUtils.getPrimitiveGrouping(((PrimitiveObjectInspector) arguments[1]).getPrimitiveCategory())) {
        throw new UDFArgumentTypeException(1, "The second argument to Encode() must be a string/varchar");
    }
    charsetOI = (PrimitiveObjectInspector) arguments[1];
    // If the character set for encoding is constant, we can optimize that
    if (charsetOI instanceof ConstantObjectInspector) {
        String charSetName = ((ConstantObjectInspector) arguments[1]).getWritableConstantValue().toString();
        encoder = Charset.forName(charSetName).newEncoder().onMalformedInput(CodingErrorAction.REPORT).onUnmappableCharacter(CodingErrorAction.REPORT);
    }
    result = new BytesWritable();
    return (ObjectInspector) PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
}
Also used : PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) BytesWritable(org.apache.hadoop.io.BytesWritable) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector)

Example 55 with BytesWritable

use of org.apache.hadoop.io.BytesWritable in project hive by apache.

the class TestUDFSha1 method runAndVerifyBin.

private void runAndVerifyBin(byte[] binV, String expResult, UDFSha1 udf) throws HiveException {
    BytesWritable binWr = binV != null ? new BytesWritable(binV) : null;
    Text output = (Text) udf.evaluate(binWr);
    assertEquals("sha1() test ", expResult, output != null ? output.toString() : null);
}
Also used : BytesWritable(org.apache.hadoop.io.BytesWritable) Text(org.apache.hadoop.io.Text)

Aggregations

BytesWritable (org.apache.hadoop.io.BytesWritable)275 Text (org.apache.hadoop.io.Text)73 LongWritable (org.apache.hadoop.io.LongWritable)59 Test (org.junit.Test)53 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)46 IntWritable (org.apache.hadoop.io.IntWritable)44 ArrayList (java.util.ArrayList)39 Path (org.apache.hadoop.fs.Path)38 IOException (java.io.IOException)36 Configuration (org.apache.hadoop.conf.Configuration)33 FloatWritable (org.apache.hadoop.io.FloatWritable)33 Writable (org.apache.hadoop.io.Writable)32 BooleanWritable (org.apache.hadoop.io.BooleanWritable)31 List (java.util.List)30 SequenceFile (org.apache.hadoop.io.SequenceFile)27 Random (java.util.Random)24 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)24 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)23 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)22 FileSystem (org.apache.hadoop.fs.FileSystem)21