Search in sources :

Example 86 with ByteWritable

use of org.apache.hadoop.hive.serde2.io.ByteWritable in project hive by apache.

the class TeradataBinarySerde method deserializeField.

private Object deserializeField(TeradataBinaryDataInputStream in, TypeInfo type, Object reuse, boolean isNull) throws IOException, ParseException, SerDeException {
    // In this case, you cannot avoid reading the bytes even it is not used.
    switch(type.getCategory()) {
        case PRIMITIVE:
            PrimitiveTypeInfo ptype = (PrimitiveTypeInfo) type;
            switch(ptype.getPrimitiveCategory()) {
                case // Teradata Type: VARCHAR
                VARCHAR:
                    String st = in.readVarchar();
                    if (isNull) {
                        return null;
                    } else {
                        HiveVarcharWritable r = reuse == null ? new HiveVarcharWritable() : (HiveVarcharWritable) reuse;
                        r.set(st, ((VarcharTypeInfo) type).getLength());
                        return r;
                    }
                case // Teradata Type: INT
                INT:
                    int i = in.readInt();
                    if (isNull) {
                        return null;
                    } else {
                        IntWritable r = reuse == null ? new IntWritable() : (IntWritable) reuse;
                        r.set(i);
                        return r;
                    }
                case // Teradata Type: TIMESTAMP
                TIMESTAMP:
                    Timestamp ts = in.readTimestamp(getTimeStampByteNum(timestampPrecision));
                    if (isNull) {
                        return null;
                    } else {
                        TimestampWritableV2 r = reuse == null ? new TimestampWritableV2() : (TimestampWritableV2) reuse;
                        r.set(ts);
                        return r;
                    }
                case // Teradata Type: FLOAT
                DOUBLE:
                    double d = in.readDouble();
                    if (isNull) {
                        return null;
                    } else {
                        DoubleWritable r = reuse == null ? new DoubleWritable() : (DoubleWritable) reuse;
                        r.set(d);
                        return r;
                    }
                case // Teradata Type: DATE
                DATE:
                    Date dt = in.readDate();
                    if (isNull) {
                        return null;
                    } else {
                        DateWritableV2 r = reuse == null ? new DateWritableV2() : (DateWritableV2) reuse;
                        r.set(dt);
                        return r;
                    }
                case // Teradata Type: BYTEINT
                BYTE:
                    byte bt = in.readByte();
                    if (isNull) {
                        return null;
                    } else {
                        ByteWritable r = reuse == null ? new ByteWritable() : (ByteWritable) reuse;
                        r.set(bt);
                        return r;
                    }
                case // Teradata Type: BIGINT
                LONG:
                    long l = in.readLong();
                    if (isNull) {
                        return null;
                    } else {
                        LongWritable r = reuse == null ? new LongWritable() : (LongWritable) reuse;
                        r.set(l);
                        return r;
                    }
                case // Teradata Type: CHAR
                CHAR:
                    CharTypeInfo ctype = (CharTypeInfo) type;
                    int length = ctype.getLength();
                    String c = in.readChar(length * getCharByteNum(charCharset));
                    if (isNull) {
                        return null;
                    } else {
                        HiveCharWritable r = reuse == null ? new HiveCharWritable() : (HiveCharWritable) reuse;
                        r.set(c, length);
                        return r;
                    }
                case // Teradata Type: DECIMAL
                DECIMAL:
                    DecimalTypeInfo dtype = (DecimalTypeInfo) type;
                    int precision = dtype.precision();
                    int scale = dtype.scale();
                    HiveDecimal hd = in.readDecimal(scale, getDecimalByteNum(precision));
                    if (isNull) {
                        return null;
                    } else {
                        HiveDecimalWritable r = (reuse == null ? new HiveDecimalWritable() : (HiveDecimalWritable) reuse);
                        r.set(hd);
                        return r;
                    }
                case // Teradata Type: SMALLINT
                SHORT:
                    short s = in.readShort();
                    if (isNull) {
                        return null;
                    } else {
                        ShortWritable r = reuse == null ? new ShortWritable() : (ShortWritable) reuse;
                        r.set(s);
                        return r;
                    }
                case // Teradata Type: VARBYTE
                BINARY:
                    byte[] content = in.readVarbyte();
                    if (isNull) {
                        return null;
                    } else {
                        BytesWritable r = new BytesWritable();
                        r.set(content, 0, content.length);
                        return r;
                    }
                default:
                    throw new SerDeException("Unrecognized type: " + ptype.getPrimitiveCategory());
            }
        // Currently, deserialization of complex types is not supported
        case LIST:
        case MAP:
        case STRUCT:
        default:
            throw new SerDeException("Unsupported category: " + type.getCategory());
    }
}
Also used : DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) Date(org.apache.hadoop.hive.common.type.Date) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)

Example 87 with ByteWritable

use of org.apache.hadoop.hive.serde2.io.ByteWritable in project hive by apache.

the class TestLazySimpleSerDe method testLazySimpleSerDeLastColumnTakesRest.

/**
 * Test the LazySimpleSerDe class with LastColumnTakesRest option.
 */
@Test
public void testLazySimpleSerDeLastColumnTakesRest() throws Throwable {
    try {
        // Create the SerDe
        LazySimpleSerDe serDe = new LazySimpleSerDe();
        Configuration conf = new Configuration();
        Properties tbl = createProperties();
        tbl.setProperty(serdeConstants.SERIALIZATION_LAST_COLUMN_TAKES_REST, "true");
        serDe.initialize(conf, tbl, null);
        // Data
        Text t = new Text("123\t456\t789\t1000\t5.3\thive and hadoop\t1.\ta\tb\t");
        String s = "123\t456\t789\t1000\t5.3\thive and hadoop\t1\ta\tb\t";
        Object[] expectedFieldsData = { new ByteWritable((byte) 123), new ShortWritable((short) 456), new IntWritable(789), new LongWritable(1000), new DoubleWritable(5.3), new Text("hive and hadoop"), new IntWritable(1), new Text("a\tb\t") };
        // Test
        deserializeAndSerialize(serDe, t, s, expectedFieldsData);
    } catch (Throwable e) {
        e.printStackTrace();
        throw e;
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) Text(org.apache.hadoop.io.Text) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) Properties(java.util.Properties) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) Test(org.junit.Test)

Example 88 with ByteWritable

use of org.apache.hadoop.hive.serde2.io.ByteWritable in project hive by apache.

the class TestLazySimpleSerDe method testLazySimpleSerDe.

/**
 * Test the LazySimpleSerDe class.
 */
@Test
public void testLazySimpleSerDe() throws Throwable {
    try {
        // Create the SerDe
        LazySimpleSerDe serDe = new LazySimpleSerDe();
        Configuration conf = new Configuration();
        Properties tbl = new Properties();
        tbl.setProperty(serdeConstants.SERIALIZATION_FORMAT, "9");
        tbl.setProperty("columns", "abyte,ashort,aint,along,adouble,astring,anullint,anullstring,aba");
        tbl.setProperty("columns.types", "tinyint:smallint:int:bigint:double:string:int:string:binary");
        tbl.setProperty(serdeConstants.SERIALIZATION_NULL_FORMAT, "NULL");
        serDe.initialize(conf, tbl, null);
        // Data
        Text t = new Text("123\t456\t789\t1000\t5.3\thive and hadoop\t1.\tNULL\t");
        t.append(new byte[] { (byte) Integer.parseInt("10111111", 2) }, 0, 1);
        StringBuilder sb = new StringBuilder("123\t456\t789\t1000\t5.3\thive and hadoop\t1\tNULL\t");
        String s = sb.append(Base64.getEncoder().withoutPadding().encodeToString(new byte[] { (byte) Integer.parseInt("10111111", 2) })).toString();
        Object[] expectedFieldsData = { new ByteWritable((byte) 123), new ShortWritable((short) 456), new IntWritable(789), new LongWritable(1000), new DoubleWritable(5.3), new Text("hive and hadoop"), new IntWritable(1), null, new BytesWritable(new byte[] { (byte) Integer.parseInt("10111111", 2) }) };
        // Test
        deserializeAndSerialize(serDe, t, s, expectedFieldsData);
    } catch (Throwable e) {
        e.printStackTrace();
        throw e;
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) Text(org.apache.hadoop.io.Text) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) Properties(java.util.Properties) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) Test(org.junit.Test)

Example 89 with ByteWritable

use of org.apache.hadoop.hive.serde2.io.ByteWritable in project hive by apache.

the class VerifyFast method serializeWrite.

public static void serializeWrite(SerializeWrite serializeWrite, TypeInfo typeInfo, Object object) throws IOException {
    if (object == null) {
        serializeWrite.writeNull();
        return;
    }
    switch(typeInfo.getCategory()) {
        case PRIMITIVE:
            {
                PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
                switch(primitiveTypeInfo.getPrimitiveCategory()) {
                    case BOOLEAN:
                        {
                            boolean value = ((BooleanWritable) object).get();
                            serializeWrite.writeBoolean(value);
                        }
                        break;
                    case BYTE:
                        {
                            byte value = ((ByteWritable) object).get();
                            serializeWrite.writeByte(value);
                        }
                        break;
                    case SHORT:
                        {
                            short value = ((ShortWritable) object).get();
                            serializeWrite.writeShort(value);
                        }
                        break;
                    case INT:
                        {
                            int value = ((IntWritable) object).get();
                            serializeWrite.writeInt(value);
                        }
                        break;
                    case LONG:
                        {
                            long value = ((LongWritable) object).get();
                            serializeWrite.writeLong(value);
                        }
                        break;
                    case FLOAT:
                        {
                            float value = ((FloatWritable) object).get();
                            serializeWrite.writeFloat(value);
                        }
                        break;
                    case DOUBLE:
                        {
                            double value = ((DoubleWritable) object).get();
                            serializeWrite.writeDouble(value);
                        }
                        break;
                    case STRING:
                        {
                            Text value = (Text) object;
                            byte[] stringBytes = value.getBytes();
                            int stringLength = stringBytes.length;
                            serializeWrite.writeString(stringBytes, 0, stringLength);
                        }
                        break;
                    case CHAR:
                        {
                            HiveChar value = ((HiveCharWritable) object).getHiveChar();
                            serializeWrite.writeHiveChar(value);
                        }
                        break;
                    case VARCHAR:
                        {
                            HiveVarchar value = ((HiveVarcharWritable) object).getHiveVarchar();
                            serializeWrite.writeHiveVarchar(value);
                        }
                        break;
                    case DECIMAL:
                        {
                            HiveDecimal value = ((HiveDecimalWritable) object).getHiveDecimal();
                            DecimalTypeInfo decTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
                            serializeWrite.writeHiveDecimal(value, decTypeInfo.scale());
                        }
                        break;
                    case DATE:
                        {
                            Date value = ((DateWritableV2) object).get();
                            serializeWrite.writeDate(value);
                        }
                        break;
                    case TIMESTAMP:
                        {
                            Timestamp value = ((TimestampWritableV2) object).getTimestamp();
                            serializeWrite.writeTimestamp(value);
                        }
                        break;
                    case INTERVAL_YEAR_MONTH:
                        {
                            HiveIntervalYearMonth value = ((HiveIntervalYearMonthWritable) object).getHiveIntervalYearMonth();
                            serializeWrite.writeHiveIntervalYearMonth(value);
                        }
                        break;
                    case INTERVAL_DAY_TIME:
                        {
                            HiveIntervalDayTime value = ((HiveIntervalDayTimeWritable) object).getHiveIntervalDayTime();
                            serializeWrite.writeHiveIntervalDayTime(value);
                        }
                        break;
                    case BINARY:
                        {
                            BytesWritable byteWritable = (BytesWritable) object;
                            byte[] binaryBytes = byteWritable.getBytes();
                            int length = byteWritable.getLength();
                            serializeWrite.writeBinary(binaryBytes, 0, length);
                        }
                        break;
                    default:
                        throw new Error("Unknown primitive category " + primitiveTypeInfo.getPrimitiveCategory().name());
                }
            }
            break;
        case LIST:
            {
                ListTypeInfo listTypeInfo = (ListTypeInfo) typeInfo;
                TypeInfo elementTypeInfo = listTypeInfo.getListElementTypeInfo();
                ArrayList<Object> elements = (ArrayList<Object>) object;
                serializeWrite.beginList(elements);
                boolean isFirst = true;
                for (Object elementObject : elements) {
                    if (isFirst) {
                        isFirst = false;
                    } else {
                        serializeWrite.separateList();
                    }
                    if (elementObject == null) {
                        serializeWrite.writeNull();
                    } else {
                        serializeWrite(serializeWrite, elementTypeInfo, elementObject);
                    }
                }
                serializeWrite.finishList();
            }
            break;
        case MAP:
            {
                MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
                TypeInfo keyTypeInfo = mapTypeInfo.getMapKeyTypeInfo();
                TypeInfo valueTypeInfo = mapTypeInfo.getMapValueTypeInfo();
                HashMap<Object, Object> hashMap = (HashMap<Object, Object>) object;
                serializeWrite.beginMap(hashMap);
                boolean isFirst = true;
                for (Entry<Object, Object> entry : hashMap.entrySet()) {
                    if (isFirst) {
                        isFirst = false;
                    } else {
                        serializeWrite.separateKeyValuePair();
                    }
                    if (entry.getKey() == null) {
                        serializeWrite.writeNull();
                    } else {
                        serializeWrite(serializeWrite, keyTypeInfo, entry.getKey());
                    }
                    serializeWrite.separateKey();
                    if (entry.getValue() == null) {
                        serializeWrite.writeNull();
                    } else {
                        serializeWrite(serializeWrite, valueTypeInfo, entry.getValue());
                    }
                }
                serializeWrite.finishMap();
            }
            break;
        case STRUCT:
            {
                StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
                List<TypeInfo> fieldTypeInfos = structTypeInfo.getAllStructFieldTypeInfos();
                List<Object> fieldValues = (List<Object>) object;
                final int size = fieldValues.size();
                serializeWrite.beginStruct(fieldValues);
                boolean isFirst = true;
                for (int i = 0; i < size; i++) {
                    if (isFirst) {
                        isFirst = false;
                    } else {
                        serializeWrite.separateStruct();
                    }
                    serializeWrite(serializeWrite, fieldTypeInfos.get(i), fieldValues.get(i));
                }
                serializeWrite.finishStruct();
            }
            break;
        case UNION:
            {
                UnionTypeInfo unionTypeInfo = (UnionTypeInfo) typeInfo;
                List<TypeInfo> fieldTypeInfos = unionTypeInfo.getAllUnionObjectTypeInfos();
                final int size = fieldTypeInfos.size();
                StandardUnion standardUnion = (StandardUnion) object;
                byte tag = standardUnion.getTag();
                serializeWrite.beginUnion(tag);
                serializeWrite(serializeWrite, fieldTypeInfos.get(tag), standardUnion.getObject());
                serializeWrite.finishUnion();
            }
            break;
        default:
            throw new Error("Unknown category " + typeInfo.getCategory().name());
    }
}
Also used : HashMap(java.util.HashMap) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) ArrayList(java.util.ArrayList) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) Entry(java.util.Map.Entry) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) ArrayList(java.util.ArrayList) List(java.util.List) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) Text(org.apache.hadoop.io.Text) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) HiveIntervalYearMonthWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) Date(org.apache.hadoop.hive.common.type.Date) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) FloatWritable(org.apache.hadoop.io.FloatWritable) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) BooleanWritable(org.apache.hadoop.io.BooleanWritable) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) StandardUnion(org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector.StandardUnion) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo)

Example 90 with ByteWritable

use of org.apache.hadoop.hive.serde2.io.ByteWritable in project presto by prestodb.

the class TestObjectEncoders method testPrimitiveObjectEncoders.

@Test
public void testPrimitiveObjectEncoders() {
    ObjectInspector inspector;
    ObjectEncoder encoder;
    inspector = writableLongObjectInspector;
    encoder = createEncoder(BIGINT, inspector);
    assertTrue(encoder.encode(new LongWritable(123456L)) instanceof Long);
    inspector = writableIntObjectInspector;
    encoder = createEncoder(INTEGER, inspector);
    assertTrue(encoder.encode(new IntWritable(12345)) instanceof Long);
    inspector = writableShortObjectInspector;
    encoder = createEncoder(SMALLINT, inspector);
    assertTrue(encoder.encode(new ShortWritable((short) 1234)) instanceof Long);
    inspector = writableByteObjectInspector;
    encoder = createEncoder(TINYINT, inspector);
    assertTrue(encoder.encode(new ByteWritable((byte) 123)) instanceof Long);
    inspector = writableBooleanObjectInspector;
    encoder = createEncoder(BOOLEAN, inspector);
    assertTrue(encoder.encode(new BooleanWritable(true)) instanceof Boolean);
    inspector = writableDoubleObjectInspector;
    encoder = createEncoder(DOUBLE, inspector);
    assertTrue(encoder.encode(new DoubleWritable(0.1)) instanceof Double);
    inspector = writableDateObjectInspector;
    encoder = createEncoder(DATE, inspector);
    assertTrue(encoder.encode(new DateWritable(DateTimeUtils.createDate(18380L))) instanceof Long);
    inspector = writableHiveDecimalObjectInspector;
    encoder = createEncoder(createDecimalType(11, 10), inspector);
    assertTrue(encoder.encode(new HiveDecimalWritable("1.2345678910")) instanceof Long);
    encoder = createEncoder(createDecimalType(34, 33), inspector);
    assertTrue(encoder.encode(new HiveDecimalWritable("1.281734081274028174012432412423134")) instanceof Slice);
}
Also used : PrimitiveObjectInspectorFactory.writableByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableByteObjectInspector) PrimitiveObjectInspectorFactory.writableDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableDateObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) PrimitiveObjectInspectorFactory.writableShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableShortObjectInspector) PrimitiveObjectInspectorFactory.writableStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableStringObjectInspector) PrimitiveObjectInspectorFactory.writableBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableBooleanObjectInspector) PrimitiveObjectInspectorFactory.writableHiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableHiveDecimalObjectInspector) PrimitiveObjectInspectorFactory.writableBinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableBinaryObjectInspector) PrimitiveObjectInspectorFactory.writableIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableIntObjectInspector) PrimitiveObjectInspectorFactory.writableDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableDoubleObjectInspector) PrimitiveObjectInspectorFactory.writableLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableLongObjectInspector) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) DoubleWritable(org.apache.hadoop.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) Slice(io.airlift.slice.Slice) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) Test(org.testng.annotations.Test)

Aggregations

ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)81 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)57 IntWritable (org.apache.hadoop.io.IntWritable)57 LongWritable (org.apache.hadoop.io.LongWritable)53 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)48 Text (org.apache.hadoop.io.Text)48 Test (org.junit.Test)44 FloatWritable (org.apache.hadoop.io.FloatWritable)41 BooleanWritable (org.apache.hadoop.io.BooleanWritable)39 BytesWritable (org.apache.hadoop.io.BytesWritable)31 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)29 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)27 ArrayList (java.util.ArrayList)22 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)22 DeferredJavaObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject)21 DeferredObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject)21 HiveCharWritable (org.apache.hadoop.hive.serde2.io.HiveCharWritable)18 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)17 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)17 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)17