Search in sources :

Example 76 with ShortWritable

use of org.apache.hadoop.hive.serde2.io.ShortWritable in project hive by apache.

the class TestGenericUDFOPMultiply method testByteTimesShort.

@Test
public void testByteTimesShort() throws HiveException {
    GenericUDFOPMultiply udf = new GenericUDFOPMultiply();
    ByteWritable left = new ByteWritable((byte) 4);
    ShortWritable right = new ShortWritable((short) 6);
    ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableByteObjectInspector, PrimitiveObjectInspectorFactory.writableShortObjectInspector };
    DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right) };
    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
    Assert.assertEquals(oi.getTypeInfo(), TypeInfoFactory.shortTypeInfo);
    ShortWritable res = (ShortWritable) udf.evaluate(args);
    Assert.assertEquals(24, res.get());
}
Also used : PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) DeferredJavaObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject) DeferredObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) Test(org.junit.Test)

Example 77 with ShortWritable

use of org.apache.hadoop.hive.serde2.io.ShortWritable in project hive by apache.

the class TestOrcFile method testSeek.

@Test
public void testSeek() throws Exception {
    ObjectInspector inspector;
    synchronized (TestOrcFile.class) {
        inspector = ObjectInspectorFactory.getReflectionObjectInspector(BigRow.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
    }
    Writer writer = OrcFile.createWriter(testFilePath, OrcFile.writerOptions(conf).inspector(inspector).stripeSize(200000).bufferSize(65536).rowIndexStride(1000));
    Random rand = new Random(42);
    final int COUNT = 32768;
    long[] intValues = new long[COUNT];
    double[] doubleValues = new double[COUNT];
    String[] stringValues = new String[COUNT];
    BytesWritable[] byteValues = new BytesWritable[COUNT];
    String[] words = new String[128];
    for (int i = 0; i < words.length; ++i) {
        words[i] = Integer.toHexString(rand.nextInt());
    }
    for (int i = 0; i < COUNT / 2; ++i) {
        intValues[2 * i] = rand.nextLong();
        intValues[2 * i + 1] = intValues[2 * i];
        stringValues[2 * i] = words[rand.nextInt(words.length)];
        stringValues[2 * i + 1] = stringValues[2 * i];
    }
    for (int i = 0; i < COUNT; ++i) {
        doubleValues[i] = rand.nextDouble();
        byte[] buf = new byte[20];
        rand.nextBytes(buf);
        byteValues[i] = new BytesWritable(buf);
    }
    for (int i = 0; i < COUNT; ++i) {
        writer.addRow(createRandomRow(intValues, doubleValues, stringValues, byteValues, words, i));
    }
    writer.close();
    Reader reader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf).filesystem(fs));
    assertEquals(COUNT, reader.getNumberOfRows());
    RecordReader rows = reader.rows();
    OrcStruct row = null;
    for (int i = COUNT - 1; i >= 0; --i) {
        // we load the previous buffer of rows
        if (i % COUNT == COUNT - 1) {
            rows.seekToRow(i - (COUNT - 1));
        }
        rows.seekToRow(i);
        row = (OrcStruct) rows.next(row);
        BigRow expected = createRandomRow(intValues, doubleValues, stringValues, byteValues, words, i);
        // assertEquals(expected, row);
        assertEquals(expected.boolean1.booleanValue(), ((BooleanWritable) row.getFieldValue(0)).get());
        assertEquals(expected.byte1.byteValue(), ((ByteWritable) row.getFieldValue(1)).get());
        assertEquals(expected.short1.shortValue(), ((ShortWritable) row.getFieldValue(2)).get());
        assertEquals(expected.int1.intValue(), ((IntWritable) row.getFieldValue(3)).get());
        assertEquals(expected.long1.longValue(), ((LongWritable) row.getFieldValue(4)).get());
        assertEquals(expected.float1, ((FloatWritable) row.getFieldValue(5)).get(), 0.0001);
        assertEquals(expected.double1, ((DoubleWritable) row.getFieldValue(6)).get(), 0.0001);
        assertEquals(expected.bytes1, row.getFieldValue(7));
        assertEquals(expected.string1, row.getFieldValue(8));
        List<InnerStruct> expectedList = expected.middle.list;
        List<OrcStruct> actualList = (List<OrcStruct>) ((OrcStruct) row.getFieldValue(9)).getFieldValue(0);
        compareList(expectedList, actualList);
        compareList(expected.list, (List<OrcStruct>) row.getFieldValue(10));
    }
    rows.close();
    Iterator<StripeInformation> stripeIterator = reader.getStripes().iterator();
    long offsetOfStripe2 = 0;
    long offsetOfStripe4 = 0;
    long lastRowOfStripe2 = 0;
    for (int i = 0; i < 5; ++i) {
        StripeInformation stripe = stripeIterator.next();
        if (i < 2) {
            lastRowOfStripe2 += stripe.getNumberOfRows();
        } else if (i == 2) {
            offsetOfStripe2 = stripe.getOffset();
            lastRowOfStripe2 += stripe.getNumberOfRows() - 1;
        } else if (i == 4) {
            offsetOfStripe4 = stripe.getOffset();
        }
    }
    boolean[] columns = new boolean[reader.getStatistics().length];
    // long colulmn
    columns[5] = true;
    // text column
    columns[9] = true;
    rows = reader.rowsOptions(new Reader.Options().range(offsetOfStripe2, offsetOfStripe4 - offsetOfStripe2).include(columns));
    rows.seekToRow(lastRowOfStripe2);
    for (int i = 0; i < 2; ++i) {
        row = (OrcStruct) rows.next(row);
        BigRow expected = createRandomRow(intValues, doubleValues, stringValues, byteValues, words, (int) (lastRowOfStripe2 + i));
        assertEquals(expected.long1.longValue(), ((LongWritable) row.getFieldValue(4)).get());
        assertEquals(expected.string1, row.getFieldValue(8));
    }
    rows.close();
}
Also used : Random(java.util.Random) List(java.util.List) ArrayList(java.util.ArrayList) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) BooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector) ShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) FloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) BinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector) ByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector) DoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) BytesWritable(org.apache.hadoop.io.BytesWritable) StripeInformation(org.apache.orc.StripeInformation) Test(org.junit.Test)

Example 78 with ShortWritable

use of org.apache.hadoop.hive.serde2.io.ShortWritable in project hive by apache.

the class TestOrcFile method testZeroCopySeek.

@Test
public void testZeroCopySeek() throws Exception {
    ObjectInspector inspector;
    synchronized (TestOrcFile.class) {
        inspector = ObjectInspectorFactory.getReflectionObjectInspector(BigRow.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
    }
    Writer writer = OrcFile.createWriter(testFilePath, OrcFile.writerOptions(conf).inspector(inspector).stripeSize(200000).bufferSize(65536).rowIndexStride(1000));
    Random rand = new Random(42);
    final int COUNT = 32768;
    long[] intValues = new long[COUNT];
    double[] doubleValues = new double[COUNT];
    String[] stringValues = new String[COUNT];
    BytesWritable[] byteValues = new BytesWritable[COUNT];
    String[] words = new String[128];
    for (int i = 0; i < words.length; ++i) {
        words[i] = Integer.toHexString(rand.nextInt());
    }
    for (int i = 0; i < COUNT / 2; ++i) {
        intValues[2 * i] = rand.nextLong();
        intValues[2 * i + 1] = intValues[2 * i];
        stringValues[2 * i] = words[rand.nextInt(words.length)];
        stringValues[2 * i + 1] = stringValues[2 * i];
    }
    for (int i = 0; i < COUNT; ++i) {
        doubleValues[i] = rand.nextDouble();
        byte[] buf = new byte[20];
        rand.nextBytes(buf);
        byteValues[i] = new BytesWritable(buf);
    }
    for (int i = 0; i < COUNT; ++i) {
        writer.addRow(createRandomRow(intValues, doubleValues, stringValues, byteValues, words, i));
    }
    writer.close();
    writer = null;
    Reader reader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf).filesystem(fs));
    assertEquals(COUNT, reader.getNumberOfRows());
    /* enable zero copy record reader */
    Configuration conf = new Configuration();
    conf.setBoolean(OrcConf.USE_ZEROCOPY.getHiveConfName(), true);
    RecordReader rows = reader.rows();
    /* all tests are identical to the other seek() tests */
    OrcStruct row = null;
    for (int i = COUNT - 1; i >= 0; --i) {
        // we load the previous buffer of rows
        if (i % COUNT == COUNT - 1) {
            rows.seekToRow(i - (COUNT - 1));
        }
        rows.seekToRow(i);
        row = (OrcStruct) rows.next(row);
        BigRow expected = createRandomRow(intValues, doubleValues, stringValues, byteValues, words, i);
        assertEquals(expected.boolean1.booleanValue(), ((BooleanWritable) row.getFieldValue(0)).get());
        assertEquals(expected.byte1.byteValue(), ((ByteWritable) row.getFieldValue(1)).get());
        assertEquals(expected.short1.shortValue(), ((ShortWritable) row.getFieldValue(2)).get());
        assertEquals(expected.int1.intValue(), ((IntWritable) row.getFieldValue(3)).get());
        assertEquals(expected.long1.longValue(), ((LongWritable) row.getFieldValue(4)).get());
        assertEquals(expected.float1.floatValue(), ((FloatWritable) row.getFieldValue(5)).get(), 0.0001);
        assertEquals(expected.double1.doubleValue(), ((DoubleWritable) row.getFieldValue(6)).get(), 0.0001);
        assertEquals(expected.bytes1, row.getFieldValue(7));
        assertEquals(expected.string1, row.getFieldValue(8));
        List<InnerStruct> expectedList = expected.middle.list;
        List<OrcStruct> actualList = (List) ((OrcStruct) row.getFieldValue(9)).getFieldValue(0);
        compareList(expectedList, actualList);
        compareList(expected.list, (List) row.getFieldValue(10));
    }
    rows.close();
    Iterator<StripeInformation> stripeIterator = reader.getStripes().iterator();
    long offsetOfStripe2 = 0;
    long offsetOfStripe4 = 0;
    long lastRowOfStripe2 = 0;
    for (int i = 0; i < 5; ++i) {
        StripeInformation stripe = stripeIterator.next();
        if (i < 2) {
            lastRowOfStripe2 += stripe.getNumberOfRows();
        } else if (i == 2) {
            offsetOfStripe2 = stripe.getOffset();
            lastRowOfStripe2 += stripe.getNumberOfRows() - 1;
        } else if (i == 4) {
            offsetOfStripe4 = stripe.getOffset();
        }
    }
    boolean[] columns = new boolean[reader.getStatistics().length];
    // long colulmn
    columns[5] = true;
    // text column
    columns[9] = true;
    /* use zero copy record reader */
    rows = reader.rowsOptions(new Reader.Options().range(offsetOfStripe2, offsetOfStripe4 - offsetOfStripe2).include(columns));
    rows.seekToRow(lastRowOfStripe2);
    for (int i = 0; i < 2; ++i) {
        row = (OrcStruct) rows.next(row);
        BigRow expected = createRandomRow(intValues, doubleValues, stringValues, byteValues, words, (int) (lastRowOfStripe2 + i));
        assertEquals(expected.long1.longValue(), ((LongWritable) row.getFieldValue(4)).get());
        assertEquals(expected.string1, row.getFieldValue(8));
    }
    rows.close();
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) Random(java.util.Random) List(java.util.List) ArrayList(java.util.ArrayList) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) BooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector) ShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) FloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) BinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector) ByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector) DoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) BytesWritable(org.apache.hadoop.io.BytesWritable) StripeInformation(org.apache.orc.StripeInformation) Test(org.junit.Test)

Example 79 with ShortWritable

use of org.apache.hadoop.hive.serde2.io.ShortWritable in project hive by apache.

the class TestGenericUDFFloor method testShort.

@Test
public void testShort() throws HiveException {
    GenericUDFFloor udf = new GenericUDFFloor();
    ShortWritable input = new ShortWritable((short) 74);
    ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableShortObjectInspector };
    DeferredObject[] args = { new DeferredJavaObject(input) };
    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
    LongWritable res = (LongWritable) udf.evaluate(args);
    Assert.assertEquals(74L, res.get());
}
Also used : PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) DeferredJavaObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject) DeferredObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) LongWritable(org.apache.hadoop.io.LongWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Test(org.junit.Test)

Example 80 with ShortWritable

use of org.apache.hadoop.hive.serde2.io.ShortWritable in project hive by apache.

the class KafkaJsonSerDe method parseAsPrimitive.

private Object parseAsPrimitive(JsonNode value, TypeInfo typeInfo) throws SerDeException {
    switch(TypeInfoFactory.getPrimitiveTypeInfo(typeInfo.getTypeName()).getPrimitiveCategory()) {
        case TIMESTAMP:
            TimestampWritable timestampWritable = new TimestampWritable();
            timestampWritable.setTime(TS_PARSER.get().parseMillis(value.textValue()));
            return timestampWritable;
        case TIMESTAMPLOCALTZ:
            final long numberOfMillis = TS_PARSER.get().parseMillis(value.textValue());
            return new TimestampLocalTZWritable(new TimestampTZ(ZonedDateTime.ofInstant(Instant.ofEpochMilli(numberOfMillis), ((TimestampLocalTZTypeInfo) typeInfo).timeZone())));
        case BYTE:
            return new ByteWritable((byte) value.intValue());
        case SHORT:
            return (new ShortWritable(value.shortValue()));
        case INT:
            return new IntWritable(value.intValue());
        case LONG:
            return (new LongWritable((value.longValue())));
        case FLOAT:
            return (new FloatWritable(value.floatValue()));
        case DOUBLE:
            return (new DoubleWritable(value.doubleValue()));
        case DECIMAL:
            return (new HiveDecimalWritable(HiveDecimal.create(value.decimalValue())));
        case CHAR:
            return (new HiveCharWritable(new HiveChar(value.textValue(), ((CharTypeInfo) typeInfo).getLength())));
        case VARCHAR:
            return (new HiveVarcharWritable(new HiveVarchar(value.textValue(), ((CharTypeInfo) typeInfo).getLength())));
        case STRING:
            return (new Text(value.textValue()));
        case BOOLEAN:
            return (new BooleanWritable(value.isBoolean() ? value.booleanValue() : Boolean.valueOf(value.textValue())));
        default:
            throw new SerDeException("Unknown type: " + typeInfo.getTypeName());
    }
}
Also used : HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) Text(org.apache.hadoop.io.Text) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) TimestampTZ(org.apache.hadoop.hive.common.type.TimestampTZ) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) TimestampLocalTZWritable(org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) SerDeException(org.apache.hadoop.hive.serde2.SerDeException)

Aggregations

ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)94 IntWritable (org.apache.hadoop.io.IntWritable)62 LongWritable (org.apache.hadoop.io.LongWritable)54 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)53 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)50 Test (org.junit.Test)48 Text (org.apache.hadoop.io.Text)43 FloatWritable (org.apache.hadoop.io.FloatWritable)41 BooleanWritable (org.apache.hadoop.io.BooleanWritable)38 BytesWritable (org.apache.hadoop.io.BytesWritable)30 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)29 ArrayList (java.util.ArrayList)25 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)23 HiveCharWritable (org.apache.hadoop.hive.serde2.io.HiveCharWritable)18 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)17 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)17 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)17 HiveVarcharWritable (org.apache.hadoop.hive.serde2.io.HiveVarcharWritable)17 Writable (org.apache.hadoop.io.Writable)17 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)15