Search in sources :

Example 31 with DateObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector in project hive by apache.

the class LazyBinarySerDe method serialize.

/**
 * A recursive function that serialize an object to a byte buffer based on its
 * object inspector.
 *
 * @param byteStream
 *          the byte stream storing the serialization data
 * @param obj
 *          the object to serialize
 * @param objInspector
 *          the object inspector
 * @param skipLengthPrefix a boolean indicating whether length prefix is
 *          needed for list/map/struct
 * @param warnedOnceNullMapKey a boolean indicating whether a warning
 *          has been issued once already when encountering null map keys
 */
public static void serialize(RandomAccessOutput byteStream, Object obj, ObjectInspector objInspector, boolean skipLengthPrefix, BooleanRef warnedOnceNullMapKey) throws SerDeException {
    // do nothing for null object
    if (null == obj) {
        return;
    }
    switch(objInspector.getCategory()) {
        case PRIMITIVE:
            {
                PrimitiveObjectInspector poi = (PrimitiveObjectInspector) objInspector;
                switch(poi.getPrimitiveCategory()) {
                    case VOID:
                        {
                            return;
                        }
                    case BOOLEAN:
                        {
                            boolean v = ((BooleanObjectInspector) poi).get(obj);
                            byteStream.write((byte) (v ? 1 : 0));
                            return;
                        }
                    case BYTE:
                        {
                            ByteObjectInspector boi = (ByteObjectInspector) poi;
                            byte v = boi.get(obj);
                            byteStream.write(v);
                            return;
                        }
                    case SHORT:
                        {
                            ShortObjectInspector spoi = (ShortObjectInspector) poi;
                            short v = spoi.get(obj);
                            byteStream.write((byte) (v >> 8));
                            byteStream.write((byte) (v));
                            return;
                        }
                    case INT:
                        {
                            IntObjectInspector ioi = (IntObjectInspector) poi;
                            int v = ioi.get(obj);
                            LazyBinaryUtils.writeVInt(byteStream, v);
                            return;
                        }
                    case LONG:
                        {
                            LongObjectInspector loi = (LongObjectInspector) poi;
                            long v = loi.get(obj);
                            LazyBinaryUtils.writeVLong(byteStream, v);
                            return;
                        }
                    case FLOAT:
                        {
                            FloatObjectInspector foi = (FloatObjectInspector) poi;
                            int v = Float.floatToIntBits(foi.get(obj));
                            byteStream.write((byte) (v >> 24));
                            byteStream.write((byte) (v >> 16));
                            byteStream.write((byte) (v >> 8));
                            byteStream.write((byte) (v));
                            return;
                        }
                    case DOUBLE:
                        {
                            DoubleObjectInspector doi = (DoubleObjectInspector) poi;
                            LazyBinaryUtils.writeDouble(byteStream, doi.get(obj));
                            return;
                        }
                    case STRING:
                        {
                            StringObjectInspector soi = (StringObjectInspector) poi;
                            Text t = soi.getPrimitiveWritableObject(obj);
                            serializeText(byteStream, t, skipLengthPrefix);
                            return;
                        }
                    case CHAR:
                        {
                            HiveCharObjectInspector hcoi = (HiveCharObjectInspector) poi;
                            Text t = hcoi.getPrimitiveWritableObject(obj).getTextValue();
                            serializeText(byteStream, t, skipLengthPrefix);
                            return;
                        }
                    case VARCHAR:
                        {
                            HiveVarcharObjectInspector hcoi = (HiveVarcharObjectInspector) poi;
                            Text t = hcoi.getPrimitiveWritableObject(obj).getTextValue();
                            serializeText(byteStream, t, skipLengthPrefix);
                            return;
                        }
                    case BINARY:
                        {
                            BinaryObjectInspector baoi = (BinaryObjectInspector) poi;
                            BytesWritable bw = baoi.getPrimitiveWritableObject(obj);
                            int length = bw.getLength();
                            if (!skipLengthPrefix) {
                                LazyBinaryUtils.writeVInt(byteStream, length);
                            } else {
                                if (length == 0) {
                                    throw new RuntimeException("LazyBinaryColumnarSerde cannot serialize a non-null zero " + "length binary field. Consider using either LazyBinarySerde or ColumnarSerde.");
                                }
                            }
                            byteStream.write(bw.getBytes(), 0, length);
                            return;
                        }
                    case DATE:
                        {
                            DateWritableV2 d = ((DateObjectInspector) poi).getPrimitiveWritableObject(obj);
                            writeDateToByteStream(byteStream, d);
                            return;
                        }
                    case TIMESTAMP:
                        {
                            TimestampObjectInspector toi = (TimestampObjectInspector) poi;
                            TimestampWritableV2 t = toi.getPrimitiveWritableObject(obj);
                            t.writeToByteStream(byteStream);
                            return;
                        }
                    case TIMESTAMPLOCALTZ:
                        {
                            TimestampLocalTZWritable t = ((TimestampLocalTZObjectInspector) poi).getPrimitiveWritableObject(obj);
                            t.writeToByteStream(byteStream);
                            return;
                        }
                    case INTERVAL_YEAR_MONTH:
                        {
                            HiveIntervalYearMonthWritable intervalYearMonth = ((HiveIntervalYearMonthObjectInspector) poi).getPrimitiveWritableObject(obj);
                            intervalYearMonth.writeToByteStream(byteStream);
                            return;
                        }
                    case INTERVAL_DAY_TIME:
                        {
                            HiveIntervalDayTimeWritable intervalDayTime = ((HiveIntervalDayTimeObjectInspector) poi).getPrimitiveWritableObject(obj);
                            intervalDayTime.writeToByteStream(byteStream);
                            return;
                        }
                    case DECIMAL:
                        {
                            HiveDecimalObjectInspector bdoi = (HiveDecimalObjectInspector) poi;
                            HiveDecimalWritable t = bdoi.getPrimitiveWritableObject(obj);
                            if (t == null) {
                                return;
                            }
                            writeToByteStream(byteStream, t);
                            return;
                        }
                    default:
                        {
                            throw new RuntimeException("Unrecognized type: " + poi.getPrimitiveCategory());
                        }
                }
            }
        case LIST:
            {
                ListObjectInspector loi = (ListObjectInspector) objInspector;
                ObjectInspector eoi = loi.getListElementObjectInspector();
                int byteSizeStart = 0;
                int listStart = 0;
                if (!skipLengthPrefix) {
                    // 1/ reserve spaces for the byte size of the list
                    // which is a integer and takes four bytes
                    byteSizeStart = byteStream.getLength();
                    byteStream.reserve(4);
                    listStart = byteStream.getLength();
                }
                // 2/ write the size of the list as a VInt
                int size = loi.getListLength(obj);
                LazyBinaryUtils.writeVInt(byteStream, size);
                // 3/ write the null bytes
                byte nullByte = 0;
                for (int eid = 0; eid < size; eid++) {
                    // set the bit to 1 if an element is not null
                    if (null != loi.getListElement(obj, eid)) {
                        nullByte |= 1 << (eid % 8);
                    }
                    // if this is the last element
                    if (7 == eid % 8 || eid == size - 1) {
                        byteStream.write(nullByte);
                        nullByte = 0;
                    }
                }
                // 4/ write element by element from the list
                for (int eid = 0; eid < size; eid++) {
                    serialize(byteStream, loi.getListElement(obj, eid), eoi, false, warnedOnceNullMapKey);
                }
                if (!skipLengthPrefix) {
                    // 5/ update the list byte size
                    int listEnd = byteStream.getLength();
                    int listSize = listEnd - listStart;
                    writeSizeAtOffset(byteStream, byteSizeStart, listSize);
                }
                return;
            }
        case MAP:
            {
                MapObjectInspector moi = (MapObjectInspector) objInspector;
                ObjectInspector koi = moi.getMapKeyObjectInspector();
                ObjectInspector voi = moi.getMapValueObjectInspector();
                Map<?, ?> map = moi.getMap(obj);
                int byteSizeStart = 0;
                int mapStart = 0;
                if (!skipLengthPrefix) {
                    // 1/ reserve spaces for the byte size of the map
                    // which is a integer and takes four bytes
                    byteSizeStart = byteStream.getLength();
                    byteStream.reserve(4);
                    mapStart = byteStream.getLength();
                }
                // 2/ write the size of the map which is a VInt
                int size = map.size();
                LazyBinaryUtils.writeVInt(byteStream, size);
                // 3/ write the null bytes
                int b = 0;
                byte nullByte = 0;
                for (Map.Entry<?, ?> entry : map.entrySet()) {
                    // set the bit to 1 if a key is not null
                    if (null != entry.getKey()) {
                        nullByte |= 1 << (b % 8);
                    } else if (warnedOnceNullMapKey != null) {
                        if (!warnedOnceNullMapKey.value) {
                            LOG.warn("Null map key encountered! Ignoring similar problems.");
                        }
                        warnedOnceNullMapKey.value = true;
                    }
                    b++;
                    // set the bit to 1 if a value is not null
                    if (null != entry.getValue()) {
                        nullByte |= 1 << (b % 8);
                    }
                    b++;
                    // or if this is the last key-value pair
                    if (0 == b % 8 || b == size * 2) {
                        byteStream.write(nullByte);
                        nullByte = 0;
                    }
                }
                // 4/ write key-value pairs one by one
                for (Map.Entry<?, ?> entry : map.entrySet()) {
                    serialize(byteStream, entry.getKey(), koi, false, warnedOnceNullMapKey);
                    serialize(byteStream, entry.getValue(), voi, false, warnedOnceNullMapKey);
                }
                if (!skipLengthPrefix) {
                    // 5/ update the byte size of the map
                    int mapEnd = byteStream.getLength();
                    int mapSize = mapEnd - mapStart;
                    writeSizeAtOffset(byteStream, byteSizeStart, mapSize);
                }
                return;
            }
        case STRUCT:
        case UNION:
            {
                int byteSizeStart = 0;
                int typeStart = 0;
                if (!skipLengthPrefix) {
                    // 1/ reserve spaces for the byte size of the struct
                    // which is a integer and takes four bytes
                    byteSizeStart = byteStream.getLength();
                    byteStream.reserve(4);
                    typeStart = byteStream.getLength();
                }
                if (ObjectInspector.Category.STRUCT.equals(objInspector.getCategory())) {
                    // 2/ serialize the struct
                    serializeStruct(byteStream, obj, (StructObjectInspector) objInspector, warnedOnceNullMapKey);
                } else {
                    // 2/ serialize the union
                    serializeUnion(byteStream, obj, (UnionObjectInspector) objInspector, warnedOnceNullMapKey);
                }
                if (!skipLengthPrefix) {
                    // 3/ update the byte size of the struct
                    int typeEnd = byteStream.getLength();
                    int typeSize = typeEnd - typeStart;
                    writeSizeAtOffset(byteStream, byteSizeStart, typeSize);
                }
                return;
            }
        default:
            {
                throw new RuntimeException("Unrecognized type: " + objInspector.getCategory());
            }
    }
}
Also used : LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) BinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) FloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector) ByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) ShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) HiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector) UnionObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector) UnionObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) BooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector) ShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector) HiveIntervalYearMonthObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalYearMonthObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) FloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) HiveIntervalDayTimeObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalDayTimeObjectInspector) DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) TimestampLocalTZObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampLocalTZObjectInspector) HiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector) HiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) BinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector) ByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector) DoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) HiveIntervalDayTimeObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalDayTimeObjectInspector) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) TimestampLocalTZObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampLocalTZObjectInspector) Text(org.apache.hadoop.io.Text) BytesWritable(org.apache.hadoop.io.BytesWritable) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) HiveIntervalYearMonthWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) DoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector) HiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector) HiveIntervalYearMonthObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalYearMonthObjectInspector) TimestampLocalTZWritable(org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) BooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector) Map(java.util.Map) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 32 with DateObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector in project pxf by greenplum-db.

the class HiveResolver method resolvePrimitive.

private void resolvePrimitive(Object o, PrimitiveObjectInspector oi, List<OneField> record, boolean toFlatten) {
    Object val;
    switch(oi.getPrimitiveCategory()) {
        case BOOLEAN:
            {
                val = (o != null) ? ((BooleanObjectInspector) oi).get(o) : null;
                addOneFieldToRecord(record, DataType.BOOLEAN, val);
                break;
            }
        case SHORT:
            {
                if (o == null) {
                    val = null;
                } else if (o.getClass().getSimpleName().equals("ByteWritable")) {
                    val = (short) ((ByteWritable) o).get();
                } else {
                    val = ((ShortObjectInspector) oi).get(o);
                }
                addOneFieldToRecord(record, DataType.SMALLINT, val);
                break;
            }
        case INT:
            {
                val = (o != null) ? ((IntObjectInspector) oi).get(o) : null;
                addOneFieldToRecord(record, DataType.INTEGER, val);
                break;
            }
        case LONG:
            {
                val = (o != null) ? ((LongObjectInspector) oi).get(o) : null;
                addOneFieldToRecord(record, DataType.BIGINT, val);
                break;
            }
        case FLOAT:
            {
                val = (o != null) ? ((FloatObjectInspector) oi).get(o) : null;
                addOneFieldToRecord(record, DataType.REAL, val);
                break;
            }
        case DOUBLE:
            {
                val = (o != null) ? ((DoubleObjectInspector) oi).get(o) : null;
                addOneFieldToRecord(record, DataType.FLOAT8, val);
                break;
            }
        case DECIMAL:
            {
                String sVal = null;
                if (o != null) {
                    HiveDecimal hd = ((HiveDecimalObjectInspector) oi).getPrimitiveJavaObject(o);
                    if (hd != null) {
                        BigDecimal bd = hd.bigDecimalValue();
                        sVal = bd.toString();
                    }
                }
                addOneFieldToRecord(record, DataType.NUMERIC, sVal);
                break;
            }
        case STRING:
            {
                val = (o != null) ? ((StringObjectInspector) oi).getPrimitiveJavaObject(o) : null;
                // for more complex types, we need to properly handle special characters by escaping the val
                val = toFlatten ? val != null ? String.format("\"%s\"", StringEscapeUtils.escapeJava(val.toString())) : "null" : val;
                addOneFieldToRecord(record, DataType.TEXT, val);
                break;
            }
        case VARCHAR:
            val = (o != null) ? ((HiveVarcharObjectInspector) oi).getPrimitiveJavaObject(o) : null;
            addOneFieldToRecord(record, DataType.VARCHAR, toFlatten ? String.format("\"%s\"", val) : val);
            break;
        case CHAR:
            val = (o != null) ? ((HiveCharObjectInspector) oi).getPrimitiveJavaObject(o) : null;
            addOneFieldToRecord(record, DataType.BPCHAR, toFlatten ? String.format("\"%s\"", val) : val);
            break;
        case BINARY:
            {
                byte[] toEncode = null;
                if (o != null) {
                    BytesWritable bw = ((BinaryObjectInspector) oi).getPrimitiveWritableObject(o);
                    toEncode = new byte[bw.getLength()];
                    System.arraycopy(bw.getBytes(), 0, toEncode, 0, bw.getLength());
                }
                addOneFieldToRecord(record, DataType.BYTEA, toEncode);
                break;
            }
        case TIMESTAMP:
            {
                val = (o != null) ? ((TimestampObjectInspector) oi).getPrimitiveJavaObject(o) : null;
                addOneFieldToRecord(record, DataType.TIMESTAMP, val);
                break;
            }
        case DATE:
            val = (o != null) ? ((DateObjectInspector) oi).getPrimitiveJavaObject(o) : null;
            addOneFieldToRecord(record, DataType.DATE, val);
            break;
        case BYTE:
            {
                /* TINYINT */
                val = (o != null) ? (short) ((ByteObjectInspector) oi).get(o) : null;
                addOneFieldToRecord(record, DataType.SMALLINT, val);
                break;
            }
        default:
            {
                throw new UnsupportedTypeException(oi.getTypeName() + " conversion is not supported by " + getClass().getSimpleName());
            }
    }
}
Also used : ByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector) DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) HiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) UnsupportedTypeException(org.greenplum.pxf.api.error.UnsupportedTypeException) BytesWritable(org.apache.hadoop.io.BytesWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) HiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector) BigDecimal(java.math.BigDecimal)

Example 33 with DateObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector in project hetu-core by openlookeng.

the class TestDataWritableWriter method writePrimitive.

/**
 * It writes the primitive value to the Parquet RecordConsumer.
 *
 * @param value The object that contains the primitive value.
 * @param inspector The object inspector used to get the correct value type.
 */
private void writePrimitive(final Object value, final PrimitiveObjectInspector inspector) {
    if (value == null) {
        return;
    }
    switch(inspector.getPrimitiveCategory()) {
        case VOID:
            return;
        case DOUBLE:
            recordConsumer.addDouble(((DoubleObjectInspector) inspector).get(value));
            break;
        case BOOLEAN:
            recordConsumer.addBoolean(((BooleanObjectInspector) inspector).get(value));
            break;
        case FLOAT:
            recordConsumer.addFloat(((FloatObjectInspector) inspector).get(value));
            break;
        case BYTE:
            recordConsumer.addInteger(((ByteObjectInspector) inspector).get(value));
            break;
        case INT:
            recordConsumer.addInteger(((IntObjectInspector) inspector).get(value));
            break;
        case LONG:
            recordConsumer.addLong(((LongObjectInspector) inspector).get(value));
            break;
        case SHORT:
            recordConsumer.addInteger(((ShortObjectInspector) inspector).get(value));
            break;
        case STRING:
            String v = ((StringObjectInspector) inspector).getPrimitiveJavaObject(value);
            recordConsumer.addBinary(Binary.fromString(v));
            break;
        case CHAR:
            String vChar = ((HiveCharObjectInspector) inspector).getPrimitiveJavaObject(value).getStrippedValue();
            recordConsumer.addBinary(Binary.fromString(vChar));
            break;
        case VARCHAR:
            String vVarchar = ((HiveVarcharObjectInspector) inspector).getPrimitiveJavaObject(value).getValue();
            recordConsumer.addBinary(Binary.fromString(vVarchar));
            break;
        case BINARY:
            byte[] vBinary = ((BinaryObjectInspector) inspector).getPrimitiveJavaObject(value);
            recordConsumer.addBinary(Binary.fromByteArray(vBinary));
            break;
        case TIMESTAMP:
            Timestamp ts = ((TimestampObjectInspector) inspector).getPrimitiveJavaObject(value);
            recordConsumer.addBinary(NanoTimeUtils.getNanoTime(ts, false).toBinary());
            break;
        case DECIMAL:
            HiveDecimal vDecimal = ((HiveDecimal) inspector.getPrimitiveJavaObject(value));
            DecimalTypeInfo decTypeInfo = (DecimalTypeInfo) inspector.getTypeInfo();
            recordConsumer.addBinary(decimalToBinary(vDecimal, decTypeInfo));
            break;
        case DATE:
            Date vDate = ((DateObjectInspector) inspector).getPrimitiveJavaObject(value);
            recordConsumer.addInteger(vDate.toEpochDay());
            break;
        default:
            throw new IllegalArgumentException("Unsupported primitive data type: " + inspector.getPrimitiveCategory());
    }
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) BinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) Date(org.apache.hadoop.hive.common.type.Date)

Example 34 with DateObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector in project iceberg by apache.

the class TestIcebergDateObjectInspectorHive3 method testIcebergDateObjectInspector.

@Test
public void testIcebergDateObjectInspector() {
    DateObjectInspector oi = IcebergDateObjectInspectorHive3.get();
    Assert.assertEquals(ObjectInspector.Category.PRIMITIVE, oi.getCategory());
    Assert.assertEquals(PrimitiveObjectInspector.PrimitiveCategory.DATE, oi.getPrimitiveCategory());
    Assert.assertEquals(TypeInfoFactory.dateTypeInfo, oi.getTypeInfo());
    Assert.assertEquals(TypeInfoFactory.dateTypeInfo.getTypeName(), oi.getTypeName());
    Assert.assertEquals(Date.class, oi.getJavaPrimitiveClass());
    Assert.assertEquals(DateWritableV2.class, oi.getPrimitiveWritableClass());
    Assert.assertNull(oi.copyObject(null));
    Assert.assertNull(oi.getPrimitiveJavaObject(null));
    Assert.assertNull(oi.getPrimitiveWritableObject(null));
    int epochDays = 5005;
    LocalDate local = LocalDate.ofEpochDay(epochDays);
    Date date = Date.ofEpochDay(epochDays);
    Assert.assertEquals(date, oi.getPrimitiveJavaObject(local));
    Assert.assertEquals(new DateWritableV2(date), oi.getPrimitiveWritableObject(local));
    Date copy = (Date) oi.copyObject(date);
    Assert.assertEquals(date, copy);
    Assert.assertNotSame(date, copy);
    Assert.assertFalse(oi.preferWritable());
}
Also used : DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) LocalDate(java.time.LocalDate) LocalDate(java.time.LocalDate) Date(org.apache.hadoop.hive.common.type.Date) Test(org.junit.Test)

Example 35 with DateObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector in project trino by trinodb.

the class TestDataWritableWriter method writePrimitive.

/**
 * It writes the primitive value to the Parquet RecordConsumer.
 *
 * @param value The object that contains the primitive value.
 * @param inspector The object inspector used to get the correct value type.
 */
private void writePrimitive(Object value, PrimitiveObjectInspector inspector) {
    if (value == null) {
        return;
    }
    switch(inspector.getPrimitiveCategory()) {
        case VOID:
            return;
        case DOUBLE:
            recordConsumer.addDouble(((DoubleObjectInspector) inspector).get(value));
            break;
        case BOOLEAN:
            recordConsumer.addBoolean(((BooleanObjectInspector) inspector).get(value));
            break;
        case FLOAT:
            recordConsumer.addFloat(((FloatObjectInspector) inspector).get(value));
            break;
        case BYTE:
            recordConsumer.addInteger(((ByteObjectInspector) inspector).get(value));
            break;
        case INT:
            recordConsumer.addInteger(((IntObjectInspector) inspector).get(value));
            break;
        case LONG:
            recordConsumer.addLong(((LongObjectInspector) inspector).get(value));
            break;
        case SHORT:
            recordConsumer.addInteger(((ShortObjectInspector) inspector).get(value));
            break;
        case STRING:
            String v = ((StringObjectInspector) inspector).getPrimitiveJavaObject(value);
            recordConsumer.addBinary(Binary.fromString(v));
            break;
        case CHAR:
            String vChar = ((HiveCharObjectInspector) inspector).getPrimitiveJavaObject(value).getStrippedValue();
            recordConsumer.addBinary(Binary.fromString(vChar));
            break;
        case VARCHAR:
            String vVarchar = ((HiveVarcharObjectInspector) inspector).getPrimitiveJavaObject(value).getValue();
            recordConsumer.addBinary(Binary.fromString(vVarchar));
            break;
        case BINARY:
            byte[] vBinary = ((BinaryObjectInspector) inspector).getPrimitiveJavaObject(value);
            recordConsumer.addBinary(Binary.fromByteArray(vBinary));
            break;
        case TIMESTAMP:
            Timestamp ts = ((TimestampObjectInspector) inspector).getPrimitiveJavaObject(value);
            recordConsumer.addBinary(NanoTimeUtils.getNanoTime(ts, false).toBinary());
            break;
        case DECIMAL:
            HiveDecimal vDecimal = ((HiveDecimal) inspector.getPrimitiveJavaObject(value));
            DecimalTypeInfo decTypeInfo = (DecimalTypeInfo) inspector.getTypeInfo();
            recordConsumer.addBinary(decimalToBinary(vDecimal, decTypeInfo));
            break;
        case DATE:
            Date vDate = ((DateObjectInspector) inspector).getPrimitiveJavaObject(value);
            recordConsumer.addInteger(vDate.toEpochDay());
            break;
        default:
            throw new IllegalArgumentException("Unsupported primitive data type: " + inspector.getPrimitiveCategory());
    }
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) BinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) Date(org.apache.hadoop.hive.common.type.Date)

Aggregations

DateObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector)37 TimestampObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector)29 BinaryObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector)28 StringObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector)25 HiveCharObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector)22 HiveDecimalObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector)21 FloatObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector)20 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)19 LongObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector)19 HiveVarcharObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector)18 ByteObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector)17 DoubleObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector)16 IntObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector)16 ShortObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector)16 Map (java.util.Map)14 BooleanObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector)14 MapObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector)13 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)13 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)13 List (java.util.List)12