Search in sources :

Example 6 with HiveIntervalYearMonthWritable

use of org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable in project hive by apache.

the class LazyBinarySerializeWrite method writeHiveIntervalYearMonth.

/*
   * INTERVAL_YEAR_MONTH.
   */
@Override
public void writeHiveIntervalYearMonth(HiveIntervalYearMonth viyt) throws IOException {
    // Every 8 fields we write a NULL byte.
    if ((fieldIndex % 8) == 0) {
        if (fieldIndex > 0) {
            // Write back previous 8 field's NULL byte.
            output.writeByte(nullOffset, nullByte);
            nullByte = 0;
            nullOffset = output.getLength();
        }
        // Allocate next NULL byte.
        output.reserve(1);
    }
    // Set bit in NULL byte when a field is NOT NULL.
    nullByte |= 1 << (fieldIndex % 8);
    if (hiveIntervalYearMonthWritable == null) {
        hiveIntervalYearMonthWritable = new HiveIntervalYearMonthWritable();
    }
    hiveIntervalYearMonthWritable.set(viyt);
    hiveIntervalYearMonthWritable.writeToByteStream(output);
    fieldIndex++;
    if (fieldIndex == fieldCount) {
        // Write back the final NULL byte before the last fields.
        output.writeByte(nullOffset, nullByte);
    }
}
Also used : HiveIntervalYearMonthWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable)

Example 7 with HiveIntervalYearMonthWritable

use of org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable in project hive by apache.

the class LazyBinarySerializeWrite method writeHiveIntervalYearMonth.

@Override
public void writeHiveIntervalYearMonth(int totalMonths) throws IOException {
    // Every 8 fields we write a NULL byte.
    if ((fieldIndex % 8) == 0) {
        if (fieldIndex > 0) {
            // Write back previous 8 field's NULL byte.
            output.writeByte(nullOffset, nullByte);
            nullByte = 0;
            nullOffset = output.getLength();
        }
        // Allocate next NULL byte.
        output.reserve(1);
    }
    // Set bit in NULL byte when a field is NOT NULL.
    nullByte |= 1 << (fieldIndex % 8);
    if (hiveIntervalYearMonthWritable == null) {
        hiveIntervalYearMonthWritable = new HiveIntervalYearMonthWritable();
    }
    hiveIntervalYearMonthWritable.set(totalMonths);
    hiveIntervalYearMonthWritable.writeToByteStream(output);
    fieldIndex++;
    if (fieldIndex == fieldCount) {
        // Write back the final NULL byte before the last fields.
        output.writeByte(nullOffset, nullByte);
    }
}
Also used : HiveIntervalYearMonthWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable)

Example 8 with HiveIntervalYearMonthWritable

use of org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable in project hive by apache.

the class VerifyFast method verifyDeserializeRead.

public static void verifyDeserializeRead(DeserializeRead deserializeRead, PrimitiveTypeInfo primitiveTypeInfo, Writable writable) throws IOException {
    boolean isNull;
    isNull = !deserializeRead.readNextField();
    if (isNull) {
        if (writable != null) {
            TestCase.fail("Field reports null but object is not null (class " + writable.getClass().getName() + ", " + writable.toString() + ")");
        }
        return;
    } else if (writable == null) {
        TestCase.fail("Field report not null but object is null");
    }
    switch(primitiveTypeInfo.getPrimitiveCategory()) {
        case BOOLEAN:
            {
                boolean value = deserializeRead.currentBoolean;
                if (!(writable instanceof BooleanWritable)) {
                    TestCase.fail("Boolean expected writable not Boolean");
                }
                boolean expected = ((BooleanWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Boolean field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case BYTE:
            {
                byte value = deserializeRead.currentByte;
                if (!(writable instanceof ByteWritable)) {
                    TestCase.fail("Byte expected writable not Byte");
                }
                byte expected = ((ByteWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Byte field mismatch (expected " + (int) expected + " found " + (int) value + ")");
                }
            }
            break;
        case SHORT:
            {
                short value = deserializeRead.currentShort;
                if (!(writable instanceof ShortWritable)) {
                    TestCase.fail("Short expected writable not Short");
                }
                short expected = ((ShortWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Short field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case INT:
            {
                int value = deserializeRead.currentInt;
                if (!(writable instanceof IntWritable)) {
                    TestCase.fail("Integer expected writable not Integer");
                }
                int expected = ((IntWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Int field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case LONG:
            {
                long value = deserializeRead.currentLong;
                if (!(writable instanceof LongWritable)) {
                    TestCase.fail("Long expected writable not Long");
                }
                Long expected = ((LongWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Long field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case FLOAT:
            {
                float value = deserializeRead.currentFloat;
                if (!(writable instanceof FloatWritable)) {
                    TestCase.fail("Float expected writable not Float");
                }
                float expected = ((FloatWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Float field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case DOUBLE:
            {
                double value = deserializeRead.currentDouble;
                if (!(writable instanceof DoubleWritable)) {
                    TestCase.fail("Double expected writable not Double");
                }
                double expected = ((DoubleWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Double field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case STRING:
            {
                byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                Text text = new Text(stringBytes);
                String string = text.toString();
                String expected = ((Text) writable).toString();
                if (!string.equals(expected)) {
                    TestCase.fail("String field mismatch (expected '" + expected + "' found '" + string + "')");
                }
            }
            break;
        case CHAR:
            {
                byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                Text text = new Text(stringBytes);
                String string = text.toString();
                HiveChar hiveChar = new HiveChar(string, ((CharTypeInfo) primitiveTypeInfo).getLength());
                HiveChar expected = ((HiveCharWritable) writable).getHiveChar();
                if (!hiveChar.equals(expected)) {
                    TestCase.fail("Char field mismatch (expected '" + expected + "' found '" + hiveChar + "')");
                }
            }
            break;
        case VARCHAR:
            {
                byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                Text text = new Text(stringBytes);
                String string = text.toString();
                HiveVarchar hiveVarchar = new HiveVarchar(string, ((VarcharTypeInfo) primitiveTypeInfo).getLength());
                HiveVarchar expected = ((HiveVarcharWritable) writable).getHiveVarchar();
                if (!hiveVarchar.equals(expected)) {
                    TestCase.fail("Varchar field mismatch (expected '" + expected + "' found '" + hiveVarchar + "')");
                }
            }
            break;
        case DECIMAL:
            {
                HiveDecimal value = deserializeRead.currentHiveDecimalWritable.getHiveDecimal();
                if (value == null) {
                    TestCase.fail("Decimal field evaluated to NULL");
                }
                HiveDecimal expected = ((HiveDecimalWritable) writable).getHiveDecimal();
                if (!value.equals(expected)) {
                    DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
                    int precision = decimalTypeInfo.getPrecision();
                    int scale = decimalTypeInfo.getScale();
                    TestCase.fail("Decimal field mismatch (expected " + expected.toString() + " found " + value.toString() + ") precision " + precision + ", scale " + scale);
                }
            }
            break;
        case DATE:
            {
                Date value = deserializeRead.currentDateWritable.get();
                Date expected = ((DateWritable) writable).get();
                if (!value.equals(expected)) {
                    TestCase.fail("Date field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                }
            }
            break;
        case TIMESTAMP:
            {
                Timestamp value = deserializeRead.currentTimestampWritable.getTimestamp();
                Timestamp expected = ((TimestampWritable) writable).getTimestamp();
                if (!value.equals(expected)) {
                    TestCase.fail("Timestamp field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                }
            }
            break;
        case INTERVAL_YEAR_MONTH:
            {
                HiveIntervalYearMonth value = deserializeRead.currentHiveIntervalYearMonthWritable.getHiveIntervalYearMonth();
                HiveIntervalYearMonth expected = ((HiveIntervalYearMonthWritable) writable).getHiveIntervalYearMonth();
                if (!value.equals(expected)) {
                    TestCase.fail("HiveIntervalYearMonth field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                }
            }
            break;
        case INTERVAL_DAY_TIME:
            {
                HiveIntervalDayTime value = deserializeRead.currentHiveIntervalDayTimeWritable.getHiveIntervalDayTime();
                HiveIntervalDayTime expected = ((HiveIntervalDayTimeWritable) writable).getHiveIntervalDayTime();
                if (!value.equals(expected)) {
                    TestCase.fail("HiveIntervalDayTime field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                }
            }
            break;
        case BINARY:
            {
                byte[] byteArray = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                BytesWritable bytesWritable = (BytesWritable) writable;
                byte[] expected = Arrays.copyOfRange(bytesWritable.getBytes(), 0, bytesWritable.getLength());
                if (byteArray.length != expected.length) {
                    TestCase.fail("Byte Array field mismatch (expected " + Arrays.toString(expected) + " found " + Arrays.toString(byteArray) + ")");
                }
                for (int b = 0; b < byteArray.length; b++) {
                    if (byteArray[b] != expected[b]) {
                        TestCase.fail("Byte Array field mismatch (expected " + Arrays.toString(expected) + " found " + Arrays.toString(byteArray) + ")");
                    }
                }
            }
            break;
        default:
            throw new Error("Unknown primitive category " + primitiveTypeInfo.getPrimitiveCategory());
    }
}
Also used : VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) Text(org.apache.hadoop.io.Text) BytesWritable(org.apache.hadoop.io.BytesWritable) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Timestamp(java.sql.Timestamp) Date(java.sql.Date) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) FloatWritable(org.apache.hadoop.io.FloatWritable) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) BooleanWritable(org.apache.hadoop.io.BooleanWritable) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime)

Example 9 with HiveIntervalYearMonthWritable

use of org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable in project hive by apache.

the class VectorizedBatchUtil method setVector.

private static void setVector(Object row, StructObjectInspector oi, StructField field, VectorizedRowBatch batch, DataOutputBuffer buffer, int rowIndex, int colIndex, int offset) throws HiveException {
    Object fieldData = oi.getStructFieldData(row, field);
    ObjectInspector foi = field.getFieldObjectInspector();
    // Vectorization only supports PRIMITIVE data types. Assert the same
    assert (foi.getCategory() == Category.PRIMITIVE);
    // Get writable object
    PrimitiveObjectInspector poi = (PrimitiveObjectInspector) foi;
    Object writableCol = poi.getPrimitiveWritableObject(fieldData);
    // float/double. String types have no default value for null.
    switch(poi.getPrimitiveCategory()) {
        case BOOLEAN:
            {
                LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    lcv.vector[rowIndex] = ((BooleanWritable) writableCol).get() ? 1 : 0;
                    lcv.isNull[rowIndex] = false;
                } else {
                    lcv.vector[rowIndex] = 1;
                    setNullColIsNullValue(lcv, rowIndex);
                }
            }
            break;
        case BYTE:
            {
                LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    lcv.vector[rowIndex] = ((ByteWritable) writableCol).get();
                    lcv.isNull[rowIndex] = false;
                } else {
                    lcv.vector[rowIndex] = 1;
                    setNullColIsNullValue(lcv, rowIndex);
                }
            }
            break;
        case SHORT:
            {
                LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    lcv.vector[rowIndex] = ((ShortWritable) writableCol).get();
                    lcv.isNull[rowIndex] = false;
                } else {
                    lcv.vector[rowIndex] = 1;
                    setNullColIsNullValue(lcv, rowIndex);
                }
            }
            break;
        case INT:
            {
                LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    lcv.vector[rowIndex] = ((IntWritable) writableCol).get();
                    lcv.isNull[rowIndex] = false;
                } else {
                    lcv.vector[rowIndex] = 1;
                    setNullColIsNullValue(lcv, rowIndex);
                }
            }
            break;
        case LONG:
            {
                LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    lcv.vector[rowIndex] = ((LongWritable) writableCol).get();
                    lcv.isNull[rowIndex] = false;
                } else {
                    lcv.vector[rowIndex] = 1;
                    setNullColIsNullValue(lcv, rowIndex);
                }
            }
            break;
        case DATE:
            {
                LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    lcv.vector[rowIndex] = ((DateWritable) writableCol).getDays();
                    lcv.isNull[rowIndex] = false;
                } else {
                    lcv.vector[rowIndex] = 1;
                    setNullColIsNullValue(lcv, rowIndex);
                }
            }
            break;
        case FLOAT:
            {
                DoubleColumnVector dcv = (DoubleColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    dcv.vector[rowIndex] = ((FloatWritable) writableCol).get();
                    dcv.isNull[rowIndex] = false;
                } else {
                    dcv.vector[rowIndex] = Double.NaN;
                    setNullColIsNullValue(dcv, rowIndex);
                }
            }
            break;
        case DOUBLE:
            {
                DoubleColumnVector dcv = (DoubleColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    dcv.vector[rowIndex] = ((DoubleWritable) writableCol).get();
                    dcv.isNull[rowIndex] = false;
                } else {
                    dcv.vector[rowIndex] = Double.NaN;
                    setNullColIsNullValue(dcv, rowIndex);
                }
            }
            break;
        case TIMESTAMP:
            {
                TimestampColumnVector lcv = (TimestampColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    lcv.set(rowIndex, ((TimestampWritable) writableCol).getTimestamp());
                    lcv.isNull[rowIndex] = false;
                } else {
                    lcv.setNullValue(rowIndex);
                    setNullColIsNullValue(lcv, rowIndex);
                }
            }
            break;
        case INTERVAL_YEAR_MONTH:
            {
                LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    HiveIntervalYearMonth i = ((HiveIntervalYearMonthWritable) writableCol).getHiveIntervalYearMonth();
                    lcv.vector[rowIndex] = i.getTotalMonths();
                    lcv.isNull[rowIndex] = false;
                } else {
                    lcv.vector[rowIndex] = 1;
                    setNullColIsNullValue(lcv, rowIndex);
                }
            }
            break;
        case INTERVAL_DAY_TIME:
            {
                IntervalDayTimeColumnVector icv = (IntervalDayTimeColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    HiveIntervalDayTime idt = ((HiveIntervalDayTimeWritable) writableCol).getHiveIntervalDayTime();
                    icv.set(rowIndex, idt);
                    icv.isNull[rowIndex] = false;
                } else {
                    icv.setNullValue(rowIndex);
                    setNullColIsNullValue(icv, rowIndex);
                }
            }
            break;
        case BINARY:
            {
                BytesColumnVector bcv = (BytesColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    bcv.isNull[rowIndex] = false;
                    BytesWritable bw = (BytesWritable) writableCol;
                    byte[] bytes = bw.getBytes();
                    int start = buffer.getLength();
                    int length = bw.getLength();
                    try {
                        buffer.write(bytes, 0, length);
                    } catch (IOException ioe) {
                        throw new IllegalStateException("bad write", ioe);
                    }
                    bcv.setRef(rowIndex, buffer.getData(), start, length);
                } else {
                    setNullColIsNullValue(bcv, rowIndex);
                }
            }
            break;
        case STRING:
            {
                BytesColumnVector bcv = (BytesColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    bcv.isNull[rowIndex] = false;
                    Text colText = (Text) writableCol;
                    int start = buffer.getLength();
                    int length = colText.getLength();
                    try {
                        buffer.write(colText.getBytes(), 0, length);
                    } catch (IOException ioe) {
                        throw new IllegalStateException("bad write", ioe);
                    }
                    bcv.setRef(rowIndex, buffer.getData(), start, length);
                } else {
                    setNullColIsNullValue(bcv, rowIndex);
                }
            }
            break;
        case CHAR:
            {
                BytesColumnVector bcv = (BytesColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    bcv.isNull[rowIndex] = false;
                    HiveChar colHiveChar = ((HiveCharWritable) writableCol).getHiveChar();
                    byte[] bytes = colHiveChar.getStrippedValue().getBytes();
                    // We assume the CHAR maximum length was enforced when the object was created.
                    int length = bytes.length;
                    int start = buffer.getLength();
                    try {
                        // In vector mode, we store CHAR as unpadded.
                        buffer.write(bytes, 0, length);
                    } catch (IOException ioe) {
                        throw new IllegalStateException("bad write", ioe);
                    }
                    bcv.setRef(rowIndex, buffer.getData(), start, length);
                } else {
                    setNullColIsNullValue(bcv, rowIndex);
                }
            }
            break;
        case VARCHAR:
            {
                BytesColumnVector bcv = (BytesColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    bcv.isNull[rowIndex] = false;
                    HiveVarchar colHiveVarchar = ((HiveVarcharWritable) writableCol).getHiveVarchar();
                    byte[] bytes = colHiveVarchar.getValue().getBytes();
                    // We assume the VARCHAR maximum length was enforced when the object was created.
                    int length = bytes.length;
                    int start = buffer.getLength();
                    try {
                        buffer.write(bytes, 0, length);
                    } catch (IOException ioe) {
                        throw new IllegalStateException("bad write", ioe);
                    }
                    bcv.setRef(rowIndex, buffer.getData(), start, length);
                } else {
                    setNullColIsNullValue(bcv, rowIndex);
                }
            }
            break;
        case DECIMAL:
            DecimalColumnVector dcv = (DecimalColumnVector) batch.cols[offset + colIndex];
            if (writableCol != null) {
                dcv.isNull[rowIndex] = false;
                HiveDecimalWritable wobj = (HiveDecimalWritable) writableCol;
                dcv.set(rowIndex, wobj);
            } else {
                setNullColIsNullValue(dcv, rowIndex);
            }
            break;
        default:
            throw new HiveException("Vectorizaton is not supported for datatype:" + poi.getPrimitiveCategory());
    }
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime) UnionObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) Text(org.apache.hadoop.io.Text) IOException(java.io.IOException) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) FloatWritable(org.apache.hadoop.io.FloatWritable) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)

Example 10 with HiveIntervalYearMonthWritable

use of org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable in project hive by apache.

the class LazyBinarySerDe method serialize.

/**
   * A recursive function that serialize an object to a byte buffer based on its
   * object inspector.
   *
   * @param byteStream
   *          the byte stream storing the serialization data
   * @param obj
   *          the object to serialize
   * @param objInspector
   *          the object inspector
   * @param skipLengthPrefix a boolean indicating whether length prefix is
   *          needed for list/map/struct
   * @param warnedOnceNullMapKey a boolean indicating whether a warning
   *          has been issued once already when encountering null map keys
   */
public static void serialize(RandomAccessOutput byteStream, Object obj, ObjectInspector objInspector, boolean skipLengthPrefix, BooleanRef warnedOnceNullMapKey) throws SerDeException {
    // do nothing for null object
    if (null == obj) {
        return;
    }
    switch(objInspector.getCategory()) {
        case PRIMITIVE:
            {
                PrimitiveObjectInspector poi = (PrimitiveObjectInspector) objInspector;
                switch(poi.getPrimitiveCategory()) {
                    case VOID:
                        {
                            return;
                        }
                    case BOOLEAN:
                        {
                            boolean v = ((BooleanObjectInspector) poi).get(obj);
                            byteStream.write((byte) (v ? 1 : 0));
                            return;
                        }
                    case BYTE:
                        {
                            ByteObjectInspector boi = (ByteObjectInspector) poi;
                            byte v = boi.get(obj);
                            byteStream.write(v);
                            return;
                        }
                    case SHORT:
                        {
                            ShortObjectInspector spoi = (ShortObjectInspector) poi;
                            short v = spoi.get(obj);
                            byteStream.write((byte) (v >> 8));
                            byteStream.write((byte) (v));
                            return;
                        }
                    case INT:
                        {
                            IntObjectInspector ioi = (IntObjectInspector) poi;
                            int v = ioi.get(obj);
                            LazyBinaryUtils.writeVInt(byteStream, v);
                            return;
                        }
                    case LONG:
                        {
                            LongObjectInspector loi = (LongObjectInspector) poi;
                            long v = loi.get(obj);
                            LazyBinaryUtils.writeVLong(byteStream, v);
                            return;
                        }
                    case FLOAT:
                        {
                            FloatObjectInspector foi = (FloatObjectInspector) poi;
                            int v = Float.floatToIntBits(foi.get(obj));
                            byteStream.write((byte) (v >> 24));
                            byteStream.write((byte) (v >> 16));
                            byteStream.write((byte) (v >> 8));
                            byteStream.write((byte) (v));
                            return;
                        }
                    case DOUBLE:
                        {
                            DoubleObjectInspector doi = (DoubleObjectInspector) poi;
                            LazyBinaryUtils.writeDouble(byteStream, doi.get(obj));
                            return;
                        }
                    case STRING:
                        {
                            StringObjectInspector soi = (StringObjectInspector) poi;
                            Text t = soi.getPrimitiveWritableObject(obj);
                            serializeText(byteStream, t, skipLengthPrefix);
                            return;
                        }
                    case CHAR:
                        {
                            HiveCharObjectInspector hcoi = (HiveCharObjectInspector) poi;
                            Text t = hcoi.getPrimitiveWritableObject(obj).getTextValue();
                            serializeText(byteStream, t, skipLengthPrefix);
                            return;
                        }
                    case VARCHAR:
                        {
                            HiveVarcharObjectInspector hcoi = (HiveVarcharObjectInspector) poi;
                            Text t = hcoi.getPrimitiveWritableObject(obj).getTextValue();
                            serializeText(byteStream, t, skipLengthPrefix);
                            return;
                        }
                    case BINARY:
                        {
                            BinaryObjectInspector baoi = (BinaryObjectInspector) poi;
                            BytesWritable bw = baoi.getPrimitiveWritableObject(obj);
                            int length = bw.getLength();
                            if (!skipLengthPrefix) {
                                LazyBinaryUtils.writeVInt(byteStream, length);
                            } else {
                                if (length == 0) {
                                    throw new RuntimeException("LazyBinaryColumnarSerde cannot serialize a non-null zero " + "length binary field. Consider using either LazyBinarySerde or ColumnarSerde.");
                                }
                            }
                            byteStream.write(bw.getBytes(), 0, length);
                            return;
                        }
                    case DATE:
                        {
                            DateWritable d = ((DateObjectInspector) poi).getPrimitiveWritableObject(obj);
                            writeDateToByteStream(byteStream, d);
                            return;
                        }
                    case TIMESTAMP:
                        {
                            TimestampObjectInspector toi = (TimestampObjectInspector) poi;
                            TimestampWritable t = toi.getPrimitiveWritableObject(obj);
                            t.writeToByteStream(byteStream);
                            return;
                        }
                    case INTERVAL_YEAR_MONTH:
                        {
                            HiveIntervalYearMonthWritable intervalYearMonth = ((HiveIntervalYearMonthObjectInspector) poi).getPrimitiveWritableObject(obj);
                            intervalYearMonth.writeToByteStream(byteStream);
                            return;
                        }
                    case INTERVAL_DAY_TIME:
                        {
                            HiveIntervalDayTimeWritable intervalDayTime = ((HiveIntervalDayTimeObjectInspector) poi).getPrimitiveWritableObject(obj);
                            intervalDayTime.writeToByteStream(byteStream);
                            return;
                        }
                    case DECIMAL:
                        {
                            HiveDecimalObjectInspector bdoi = (HiveDecimalObjectInspector) poi;
                            HiveDecimalWritable t = bdoi.getPrimitiveWritableObject(obj);
                            if (t == null) {
                                return;
                            }
                            writeToByteStream(byteStream, t);
                            return;
                        }
                    default:
                        {
                            throw new RuntimeException("Unrecognized type: " + poi.getPrimitiveCategory());
                        }
                }
            }
        case LIST:
            {
                ListObjectInspector loi = (ListObjectInspector) objInspector;
                ObjectInspector eoi = loi.getListElementObjectInspector();
                int byteSizeStart = 0;
                int listStart = 0;
                if (!skipLengthPrefix) {
                    // 1/ reserve spaces for the byte size of the list
                    // which is a integer and takes four bytes
                    byteSizeStart = byteStream.getLength();
                    byteStream.reserve(4);
                    listStart = byteStream.getLength();
                }
                // 2/ write the size of the list as a VInt
                int size = loi.getListLength(obj);
                LazyBinaryUtils.writeVInt(byteStream, size);
                // 3/ write the null bytes
                byte nullByte = 0;
                for (int eid = 0; eid < size; eid++) {
                    // set the bit to 1 if an element is not null
                    if (null != loi.getListElement(obj, eid)) {
                        nullByte |= 1 << (eid % 8);
                    }
                    // if this is the last element
                    if (7 == eid % 8 || eid == size - 1) {
                        byteStream.write(nullByte);
                        nullByte = 0;
                    }
                }
                // 4/ write element by element from the list
                for (int eid = 0; eid < size; eid++) {
                    serialize(byteStream, loi.getListElement(obj, eid), eoi, false, warnedOnceNullMapKey);
                }
                if (!skipLengthPrefix) {
                    // 5/ update the list byte size
                    int listEnd = byteStream.getLength();
                    int listSize = listEnd - listStart;
                    writeSizeAtOffset(byteStream, byteSizeStart, listSize);
                }
                return;
            }
        case MAP:
            {
                MapObjectInspector moi = (MapObjectInspector) objInspector;
                ObjectInspector koi = moi.getMapKeyObjectInspector();
                ObjectInspector voi = moi.getMapValueObjectInspector();
                Map<?, ?> map = moi.getMap(obj);
                int byteSizeStart = 0;
                int mapStart = 0;
                if (!skipLengthPrefix) {
                    // 1/ reserve spaces for the byte size of the map
                    // which is a integer and takes four bytes
                    byteSizeStart = byteStream.getLength();
                    byteStream.reserve(4);
                    mapStart = byteStream.getLength();
                }
                // 2/ write the size of the map which is a VInt
                int size = map.size();
                LazyBinaryUtils.writeVInt(byteStream, size);
                // 3/ write the null bytes
                int b = 0;
                byte nullByte = 0;
                for (Map.Entry<?, ?> entry : map.entrySet()) {
                    // set the bit to 1 if a key is not null
                    if (null != entry.getKey()) {
                        nullByte |= 1 << (b % 8);
                    } else if (warnedOnceNullMapKey != null) {
                        if (!warnedOnceNullMapKey.value) {
                            LOG.warn("Null map key encountered! Ignoring similar problems.");
                        }
                        warnedOnceNullMapKey.value = true;
                    }
                    b++;
                    // set the bit to 1 if a value is not null
                    if (null != entry.getValue()) {
                        nullByte |= 1 << (b % 8);
                    }
                    b++;
                    // or if this is the last key-value pair
                    if (0 == b % 8 || b == size * 2) {
                        byteStream.write(nullByte);
                        nullByte = 0;
                    }
                }
                // 4/ write key-value pairs one by one
                for (Map.Entry<?, ?> entry : map.entrySet()) {
                    serialize(byteStream, entry.getKey(), koi, false, warnedOnceNullMapKey);
                    serialize(byteStream, entry.getValue(), voi, false, warnedOnceNullMapKey);
                }
                if (!skipLengthPrefix) {
                    // 5/ update the byte size of the map
                    int mapEnd = byteStream.getLength();
                    int mapSize = mapEnd - mapStart;
                    writeSizeAtOffset(byteStream, byteSizeStart, mapSize);
                }
                return;
            }
        case STRUCT:
        case UNION:
            {
                int byteSizeStart = 0;
                int typeStart = 0;
                if (!skipLengthPrefix) {
                    // 1/ reserve spaces for the byte size of the struct
                    // which is a integer and takes four bytes
                    byteSizeStart = byteStream.getLength();
                    byteStream.reserve(4);
                    typeStart = byteStream.getLength();
                }
                if (ObjectInspector.Category.STRUCT.equals(objInspector.getCategory())) {
                    // 2/ serialize the struct
                    serializeStruct(byteStream, obj, (StructObjectInspector) objInspector, warnedOnceNullMapKey);
                } else {
                    // 2/ serialize the union
                    serializeUnion(byteStream, obj, (UnionObjectInspector) objInspector, warnedOnceNullMapKey);
                }
                if (!skipLengthPrefix) {
                    // 3/ update the byte size of the struct
                    int typeEnd = byteStream.getLength();
                    int typeSize = typeEnd - typeStart;
                    writeSizeAtOffset(byteStream, byteSizeStart, typeSize);
                }
                return;
            }
        default:
            {
                throw new RuntimeException("Unrecognized type: " + objInspector.getCategory());
            }
    }
}
Also used : LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) BinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) FloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector) ByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) ShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) HiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector) UnionObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector) UnionObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) BooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector) ShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector) HiveIntervalYearMonthObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalYearMonthObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) FloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) HiveIntervalDayTimeObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalDayTimeObjectInspector) DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) HiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector) HiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) BinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector) ByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector) DoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) HiveIntervalDayTimeObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalDayTimeObjectInspector) Text(org.apache.hadoop.io.Text) BytesWritable(org.apache.hadoop.io.BytesWritable) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) HiveIntervalYearMonthWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable) DoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector) HiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector) HiveIntervalYearMonthObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalYearMonthObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) BooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector) Map(java.util.Map) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Aggregations

HiveIntervalYearMonthWritable (org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable)22 TimestampWritable (org.apache.hadoop.hive.serde2.io.TimestampWritable)14 DateWritable (org.apache.hadoop.hive.serde2.io.DateWritable)13 BytesWritable (org.apache.hadoop.io.BytesWritable)12 Text (org.apache.hadoop.io.Text)12 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)11 HiveIntervalDayTimeWritable (org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable)11 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)11 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)10 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)10 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)10 FloatWritable (org.apache.hadoop.io.FloatWritable)10 IntWritable (org.apache.hadoop.io.IntWritable)10 LongWritable (org.apache.hadoop.io.LongWritable)10 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)9 BooleanWritable (org.apache.hadoop.io.BooleanWritable)9 DeferredJavaObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject)8 DeferredObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject)8 Test (org.junit.Test)8 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)7