Search in sources :

Example 31 with BooleanWritable

use of org.apache.hadoop.io.BooleanWritable in project hive by apache.

the class BinarySortableSerDe method deserialize.

static Object deserialize(InputByteBuffer buffer, TypeInfo type, boolean invert, byte nullMarker, byte notNullMarker, Object reuse) throws IOException {
    // Is this field a null?
    byte isNull = buffer.read(invert);
    if (isNull == nullMarker) {
        return null;
    }
    assert (isNull == notNullMarker);
    switch(type.getCategory()) {
        case PRIMITIVE:
            {
                PrimitiveTypeInfo ptype = (PrimitiveTypeInfo) type;
                switch(ptype.getPrimitiveCategory()) {
                    case VOID:
                        {
                            return null;
                        }
                    case BOOLEAN:
                        {
                            BooleanWritable r = reuse == null ? new BooleanWritable() : (BooleanWritable) reuse;
                            byte b = buffer.read(invert);
                            assert (b == 1 || b == 2);
                            r.set(b == 2);
                            return r;
                        }
                    case BYTE:
                        {
                            ByteWritable r = reuse == null ? new ByteWritable() : (ByteWritable) reuse;
                            r.set((byte) (buffer.read(invert) ^ 0x80));
                            return r;
                        }
                    case SHORT:
                        {
                            ShortWritable r = reuse == null ? new ShortWritable() : (ShortWritable) reuse;
                            int v = buffer.read(invert) ^ 0x80;
                            v = (v << 8) + (buffer.read(invert) & 0xff);
                            r.set((short) v);
                            return r;
                        }
                    case INT:
                        {
                            IntWritable r = reuse == null ? new IntWritable() : (IntWritable) reuse;
                            r.set(deserializeInt(buffer, invert));
                            return r;
                        }
                    case LONG:
                        {
                            LongWritable r = reuse == null ? new LongWritable() : (LongWritable) reuse;
                            r.set(deserializeLong(buffer, invert));
                            return r;
                        }
                    case FLOAT:
                        {
                            FloatWritable r = reuse == null ? new FloatWritable() : (FloatWritable) reuse;
                            int v = 0;
                            for (int i = 0; i < 4; i++) {
                                v = (v << 8) + (buffer.read(invert) & 0xff);
                            }
                            if ((v & (1 << 31)) == 0) {
                                // negative number, flip all bits
                                v = ~v;
                            } else {
                                // positive number, flip the first bit
                                v = v ^ (1 << 31);
                            }
                            r.set(Float.intBitsToFloat(v));
                            return r;
                        }
                    case DOUBLE:
                        {
                            DoubleWritable r = reuse == null ? new DoubleWritable() : (DoubleWritable) reuse;
                            long v = 0;
                            for (int i = 0; i < 8; i++) {
                                v = (v << 8) + (buffer.read(invert) & 0xff);
                            }
                            if ((v & (1L << 63)) == 0) {
                                // negative number, flip all bits
                                v = ~v;
                            } else {
                                // positive number, flip the first bit
                                v = v ^ (1L << 63);
                            }
                            r.set(Double.longBitsToDouble(v));
                            return r;
                        }
                    case STRING:
                        {
                            Text r = reuse == null ? new Text() : (Text) reuse;
                            return deserializeText(buffer, invert, r);
                        }
                    case CHAR:
                        {
                            HiveCharWritable r = reuse == null ? new HiveCharWritable() : (HiveCharWritable) reuse;
                            // Use internal text member to read value
                            deserializeText(buffer, invert, r.getTextValue());
                            r.enforceMaxLength(getCharacterMaxLength(type));
                            return r;
                        }
                    case VARCHAR:
                        {
                            HiveVarcharWritable r = reuse == null ? new HiveVarcharWritable() : (HiveVarcharWritable) reuse;
                            // Use HiveVarchar's internal Text member to read the value.
                            deserializeText(buffer, invert, r.getTextValue());
                            // If we cache helper data for deserialization we could avoid having
                            // to call getVarcharMaxLength() on every deserialize call.
                            r.enforceMaxLength(getCharacterMaxLength(type));
                            return r;
                        }
                    case BINARY:
                        {
                            BytesWritable bw = new BytesWritable();
                            // Get the actual length first
                            int start = buffer.tell();
                            int length = 0;
                            do {
                                byte b = buffer.read(invert);
                                if (b == 0) {
                                    // end of string
                                    break;
                                }
                                if (b == 1) {
                                    // the last char is an escape char. read the actual char
                                    buffer.read(invert);
                                }
                                length++;
                            } while (true);
                            if (length == buffer.tell() - start) {
                                // No escaping happened, so we are already done.
                                bw.set(buffer.getData(), start, length);
                            } else {
                                // Escaping happened, we need to copy byte-by-byte.
                                // 1. Set the length first.
                                bw.set(buffer.getData(), start, length);
                                // 2. Reset the pointer.
                                buffer.seek(start);
                                // 3. Copy the data.
                                byte[] rdata = bw.getBytes();
                                for (int i = 0; i < length; i++) {
                                    byte b = buffer.read(invert);
                                    if (b == 1) {
                                        // The last char is an escape char, read the actual char.
                                        // The serialization format escape \0 to \1, and \1 to \2,
                                        // to make sure the string is null-terminated.
                                        b = (byte) (buffer.read(invert) - 1);
                                    }
                                    rdata[i] = b;
                                }
                                // 4. Read the null terminator.
                                byte b = buffer.read(invert);
                                assert (b == 0);
                            }
                            return bw;
                        }
                    case DATE:
                        {
                            DateWritable d = reuse == null ? new DateWritable() : (DateWritable) reuse;
                            d.set(deserializeInt(buffer, invert));
                            return d;
                        }
                    case TIMESTAMP:
                        TimestampWritable t = (reuse == null ? new TimestampWritable() : (TimestampWritable) reuse);
                        byte[] bytes = new byte[TimestampWritable.BINARY_SORTABLE_LENGTH];
                        for (int i = 0; i < bytes.length; i++) {
                            bytes[i] = buffer.read(invert);
                        }
                        t.setBinarySortable(bytes, 0);
                        return t;
                    case INTERVAL_YEAR_MONTH:
                        {
                            HiveIntervalYearMonthWritable i = reuse == null ? new HiveIntervalYearMonthWritable() : (HiveIntervalYearMonthWritable) reuse;
                            i.set(deserializeInt(buffer, invert));
                            return i;
                        }
                    case INTERVAL_DAY_TIME:
                        {
                            HiveIntervalDayTimeWritable i = reuse == null ? new HiveIntervalDayTimeWritable() : (HiveIntervalDayTimeWritable) reuse;
                            long totalSecs = deserializeLong(buffer, invert);
                            int nanos = deserializeInt(buffer, invert);
                            i.set(totalSecs, nanos);
                            return i;
                        }
                    case DECIMAL:
                        {
                            // See serialization of decimal for explanation (below)
                            HiveDecimalWritable bdw = (reuse == null ? new HiveDecimalWritable() : (HiveDecimalWritable) reuse);
                            int b = buffer.read(invert) - 1;
                            assert (b == 1 || b == -1 || b == 0);
                            boolean positive = b != -1;
                            int factor = buffer.read(invert) ^ 0x80;
                            for (int i = 0; i < 3; i++) {
                                factor = (factor << 8) + (buffer.read(invert) & 0xff);
                            }
                            if (!positive) {
                                factor = -factor;
                            }
                            int start = buffer.tell();
                            int length = 0;
                            do {
                                b = buffer.read(positive ? invert : !invert);
                                assert (b != 1);
                                if (b == 0) {
                                    // end of digits
                                    break;
                                }
                                length++;
                            } while (true);
                            final byte[] decimalBuffer = new byte[length];
                            buffer.seek(start);
                            for (int i = 0; i < length; ++i) {
                                decimalBuffer[i] = buffer.read(positive ? invert : !invert);
                            }
                            // read the null byte again
                            buffer.read(positive ? invert : !invert);
                            String digits = new String(decimalBuffer, 0, length, decimalCharSet);
                            BigInteger bi = new BigInteger(digits);
                            HiveDecimal bd = HiveDecimal.create(bi).scaleByPowerOfTen(factor - length);
                            if (!positive) {
                                bd = bd.negate();
                            }
                            bdw.set(bd);
                            return bdw;
                        }
                    default:
                        {
                            throw new RuntimeException("Unrecognized type: " + ptype.getPrimitiveCategory());
                        }
                }
            }
        case LIST:
            {
                ListTypeInfo ltype = (ListTypeInfo) type;
                TypeInfo etype = ltype.getListElementTypeInfo();
                // Create the list if needed
                ArrayList<Object> r = reuse == null ? new ArrayList<Object>() : (ArrayList<Object>) reuse;
                // Read the list
                int size = 0;
                while (true) {
                    int more = buffer.read(invert);
                    if (more == 0) {
                        // \0 to terminate
                        break;
                    }
                    // \1 followed by each element
                    assert (more == 1);
                    if (size == r.size()) {
                        r.add(null);
                    }
                    r.set(size, deserialize(buffer, etype, invert, nullMarker, notNullMarker, r.get(size)));
                    size++;
                }
                // Remove additional elements if the list is reused
                while (r.size() > size) {
                    r.remove(r.size() - 1);
                }
                return r;
            }
        case MAP:
            {
                MapTypeInfo mtype = (MapTypeInfo) type;
                TypeInfo ktype = mtype.getMapKeyTypeInfo();
                TypeInfo vtype = mtype.getMapValueTypeInfo();
                // Create the map if needed
                Map<Object, Object> r;
                if (reuse == null) {
                    r = new HashMap<Object, Object>();
                } else {
                    r = (HashMap<Object, Object>) reuse;
                    r.clear();
                }
                while (true) {
                    int more = buffer.read(invert);
                    if (more == 0) {
                        // \0 to terminate
                        break;
                    }
                    // \1 followed by each key and then each value
                    assert (more == 1);
                    Object k = deserialize(buffer, ktype, invert, nullMarker, notNullMarker, null);
                    Object v = deserialize(buffer, vtype, invert, nullMarker, notNullMarker, null);
                    r.put(k, v);
                }
                return r;
            }
        case STRUCT:
            {
                StructTypeInfo stype = (StructTypeInfo) type;
                List<TypeInfo> fieldTypes = stype.getAllStructFieldTypeInfos();
                int size = fieldTypes.size();
                // Create the struct if needed
                ArrayList<Object> r = reuse == null ? new ArrayList<Object>(size) : (ArrayList<Object>) reuse;
                assert (r.size() <= size);
                // Set the size of the struct
                while (r.size() < size) {
                    r.add(null);
                }
                // Read one field by one field
                for (int eid = 0; eid < size; eid++) {
                    r.set(eid, deserialize(buffer, fieldTypes.get(eid), invert, nullMarker, notNullMarker, r.get(eid)));
                }
                return r;
            }
        case UNION:
            {
                UnionTypeInfo utype = (UnionTypeInfo) type;
                StandardUnion r = reuse == null ? new StandardUnion() : (StandardUnion) reuse;
                // Read the tag
                byte tag = buffer.read(invert);
                r.setTag(tag);
                r.setObject(deserialize(buffer, utype.getAllUnionObjectTypeInfos().get(tag), invert, nullMarker, notNullMarker, null));
                return r;
            }
        default:
            {
                throw new RuntimeException("Unrecognized type: " + type.getCategory());
            }
    }
}
Also used : HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) List(java.util.List) ArrayList(java.util.ArrayList) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) Text(org.apache.hadoop.io.Text) BytesWritable(org.apache.hadoop.io.BytesWritable) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) HiveIntervalYearMonthWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo) BaseCharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.BaseCharTypeInfo) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) StandardUnion(org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector.StandardUnion) BigInteger(java.math.BigInteger) Map(java.util.Map) HashMap(java.util.HashMap) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo)

Example 32 with BooleanWritable

use of org.apache.hadoop.io.BooleanWritable in project hive by apache.

the class VectorAssignRow method assignRowColumn.

/**
   * Assign a row's column object to the ColumnVector at batchIndex in the VectorizedRowBatch.
   *
   * @param batch
   * @param batchIndex
   * @param logicalColumnIndex
   * @param object    The row column object whose type is the target data type.
   */
public void assignRowColumn(VectorizedRowBatch batch, int batchIndex, int logicalColumnIndex, Object object) {
    Category targetCategory = targetCategories[logicalColumnIndex];
    if (targetCategory == null) {
        /*
       * This is a column that we don't want (i.e. not included) -- we are done.
       */
        return;
    }
    final int projectionColumnNum = projectionColumnNums[logicalColumnIndex];
    if (object == null) {
        VectorizedBatchUtil.setNullColIsNullValue(batch.cols[projectionColumnNum], batchIndex);
        return;
    }
    switch(targetCategory) {
        case PRIMITIVE:
            {
                PrimitiveCategory targetPrimitiveCategory = targetPrimitiveCategories[logicalColumnIndex];
                switch(targetPrimitiveCategory) {
                    case VOID:
                        VectorizedBatchUtil.setNullColIsNullValue(batch.cols[projectionColumnNum], batchIndex);
                        return;
                    case BOOLEAN:
                        ((LongColumnVector) batch.cols[projectionColumnNum]).vector[batchIndex] = (((BooleanWritable) object).get() ? 1 : 0);
                        break;
                    case BYTE:
                        ((LongColumnVector) batch.cols[projectionColumnNum]).vector[batchIndex] = ((ByteWritable) object).get();
                        break;
                    case SHORT:
                        ((LongColumnVector) batch.cols[projectionColumnNum]).vector[batchIndex] = ((ShortWritable) object).get();
                        break;
                    case INT:
                        ((LongColumnVector) batch.cols[projectionColumnNum]).vector[batchIndex] = ((IntWritable) object).get();
                        break;
                    case LONG:
                        ((LongColumnVector) batch.cols[projectionColumnNum]).vector[batchIndex] = ((LongWritable) object).get();
                        break;
                    case TIMESTAMP:
                        ((TimestampColumnVector) batch.cols[projectionColumnNum]).set(batchIndex, ((TimestampWritable) object).getTimestamp());
                        break;
                    case DATE:
                        ((LongColumnVector) batch.cols[projectionColumnNum]).vector[batchIndex] = ((DateWritable) object).getDays();
                        break;
                    case FLOAT:
                        ((DoubleColumnVector) batch.cols[projectionColumnNum]).vector[batchIndex] = ((FloatWritable) object).get();
                        break;
                    case DOUBLE:
                        ((DoubleColumnVector) batch.cols[projectionColumnNum]).vector[batchIndex] = ((DoubleWritable) object).get();
                        break;
                    case BINARY:
                        {
                            BytesWritable bw = (BytesWritable) object;
                            ((BytesColumnVector) batch.cols[projectionColumnNum]).setVal(batchIndex, bw.getBytes(), 0, bw.getLength());
                        }
                        break;
                    case STRING:
                        {
                            Text tw = (Text) object;
                            ((BytesColumnVector) batch.cols[projectionColumnNum]).setVal(batchIndex, tw.getBytes(), 0, tw.getLength());
                        }
                        break;
                    case VARCHAR:
                        {
                            // UNDONE: Performance problem with conversion to String, then bytes...
                            // We store VARCHAR type stripped of pads.
                            HiveVarchar hiveVarchar;
                            if (object instanceof HiveVarchar) {
                                hiveVarchar = (HiveVarchar) object;
                            } else {
                                hiveVarchar = ((HiveVarcharWritable) object).getHiveVarchar();
                            }
                            // TODO: HIVE-13624 Do we need maxLength checking?
                            byte[] bytes = hiveVarchar.getValue().getBytes();
                            ((BytesColumnVector) batch.cols[projectionColumnNum]).setVal(batchIndex, bytes, 0, bytes.length);
                        }
                        break;
                    case CHAR:
                        {
                            // UNDONE: Performance problem with conversion to String, then bytes...
                            // We store CHAR type stripped of pads.
                            HiveChar hiveChar;
                            if (object instanceof HiveChar) {
                                hiveChar = (HiveChar) object;
                            } else {
                                hiveChar = ((HiveCharWritable) object).getHiveChar();
                            }
                            // TODO: HIVE-13624 Do we need maxLength checking?
                            // We store CHAR in vector row batch with padding stripped.
                            byte[] bytes = hiveChar.getStrippedValue().getBytes();
                            ((BytesColumnVector) batch.cols[projectionColumnNum]).setVal(batchIndex, bytes, 0, bytes.length);
                        }
                        break;
                    case DECIMAL:
                        if (object instanceof HiveDecimal) {
                            ((DecimalColumnVector) batch.cols[projectionColumnNum]).set(batchIndex, (HiveDecimal) object);
                        } else {
                            ((DecimalColumnVector) batch.cols[projectionColumnNum]).set(batchIndex, (HiveDecimalWritable) object);
                        }
                        break;
                    case INTERVAL_YEAR_MONTH:
                        ((LongColumnVector) batch.cols[projectionColumnNum]).vector[batchIndex] = ((HiveIntervalYearMonthWritable) object).getHiveIntervalYearMonth().getTotalMonths();
                        break;
                    case INTERVAL_DAY_TIME:
                        ((IntervalDayTimeColumnVector) batch.cols[projectionColumnNum]).set(batchIndex, ((HiveIntervalDayTimeWritable) object).getHiveIntervalDayTime());
                        break;
                    default:
                        throw new RuntimeException("Primitive category " + targetPrimitiveCategory.name() + " not supported");
                }
            }
            break;
        default:
            throw new RuntimeException("Category " + targetCategory.name() + " not supported");
    }
    /*
     * We always set the null flag to false when there is a value.
     */
    batch.cols[projectionColumnNum].isNull[batchIndex] = false;
}
Also used : PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) Category(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) Text(org.apache.hadoop.io.Text) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) LongWritable(org.apache.hadoop.io.LongWritable) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable)

Example 33 with BooleanWritable

use of org.apache.hadoop.io.BooleanWritable in project hive by apache.

the class VectorColumnAssignFactory method buildObjectAssign.

public static VectorColumnAssign buildObjectAssign(VectorizedRowBatch outputBatch, int outColIndex, PrimitiveCategory category) throws HiveException {
    VectorColumnAssign outVCA = null;
    ColumnVector destCol = outputBatch.cols[outColIndex];
    if (destCol == null) {
        switch(category) {
            case VOID:
                outVCA = new VectorLongColumnAssign() {

                    // This is a dummy assigner
                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        // This is no-op, there is no column to assign to and val is expected to be null
                        assert (val == null);
                    }
                };
                break;
            default:
                throw new HiveException("Incompatible (null) vector column and primitive category " + category);
        }
    } else if (destCol instanceof LongColumnVector) {
        switch(category) {
            case BOOLEAN:
                outVCA = new VectorLongColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            BooleanWritable bw = (BooleanWritable) val;
                            assignLong(bw.get() ? 1 : 0, destIndex);
                        }
                    }
                }.init(outputBatch, (LongColumnVector) destCol);
                break;
            case BYTE:
                outVCA = new VectorLongColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            ByteWritable bw = (ByteWritable) val;
                            assignLong(bw.get(), destIndex);
                        }
                    }
                }.init(outputBatch, (LongColumnVector) destCol);
                break;
            case SHORT:
                outVCA = new VectorLongColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            ShortWritable bw = (ShortWritable) val;
                            assignLong(bw.get(), destIndex);
                        }
                    }
                }.init(outputBatch, (LongColumnVector) destCol);
                break;
            case INT:
                outVCA = new VectorLongColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            IntWritable bw = (IntWritable) val;
                            assignLong(bw.get(), destIndex);
                        }
                    }
                }.init(outputBatch, (LongColumnVector) destCol);
                break;
            case LONG:
                outVCA = new VectorLongColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            LongWritable bw = (LongWritable) val;
                            assignLong(bw.get(), destIndex);
                        }
                    }
                }.init(outputBatch, (LongColumnVector) destCol);
                break;
            case TIMESTAMP:
                outVCA = new VectorTimestampColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            assignTimestamp((TimestampWritable) val, destIndex);
                        }
                    }
                }.init(outputBatch, (TimestampColumnVector) destCol);
                break;
            case DATE:
                outVCA = new VectorLongColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            DateWritable bw = (DateWritable) val;
                            assignLong(bw.getDays(), destIndex);
                        }
                    }
                }.init(outputBatch, (LongColumnVector) destCol);
                break;
            case INTERVAL_YEAR_MONTH:
                outVCA = new VectorLongColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            HiveIntervalYearMonthWritable bw = (HiveIntervalYearMonthWritable) val;
                            assignLong(bw.getHiveIntervalYearMonth().getTotalMonths(), destIndex);
                        }
                    }
                }.init(outputBatch, (LongColumnVector) destCol);
                break;
            case INTERVAL_DAY_TIME:
                outVCA = new VectorIntervalDayTimeColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            HiveIntervalDayTimeWritable bw = (HiveIntervalDayTimeWritable) val;
                            assignIntervalDayTime(bw.getHiveIntervalDayTime(), destIndex);
                        }
                    }
                }.init(outputBatch, (IntervalDayTimeColumnVector) destCol);
                break;
            default:
                throw new HiveException("Incompatible Long vector column and primitive category " + category);
        }
    } else if (destCol instanceof DoubleColumnVector) {
        switch(category) {
            case DOUBLE:
                outVCA = new VectorDoubleColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            DoubleWritable bw = (DoubleWritable) val;
                            assignDouble(bw.get(), destIndex);
                        }
                    }
                }.init(outputBatch, (DoubleColumnVector) destCol);
                break;
            case FLOAT:
                outVCA = new VectorDoubleColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            FloatWritable bw = (FloatWritable) val;
                            assignDouble(bw.get(), destIndex);
                        }
                    }
                }.init(outputBatch, (DoubleColumnVector) destCol);
                break;
            default:
                throw new HiveException("Incompatible Double vector column and primitive category " + category);
        }
    } else if (destCol instanceof BytesColumnVector) {
        switch(category) {
            case BINARY:
                outVCA = new VectorBytesColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            BytesWritable bw = (BytesWritable) val;
                            byte[] bytes = bw.getBytes();
                            assignBytes(bytes, 0, bw.getLength(), destIndex);
                        }
                    }
                }.init(outputBatch, (BytesColumnVector) destCol);
                break;
            case STRING:
                outVCA = new VectorBytesColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            Text bw = (Text) val;
                            byte[] bytes = bw.getBytes();
                            assignBytes(bytes, 0, bw.getLength(), destIndex);
                        }
                    }
                }.init(outputBatch, (BytesColumnVector) destCol);
                break;
            case VARCHAR:
                outVCA = new VectorBytesColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            // We store VARCHAR type stripped of pads.
                            HiveVarchar hiveVarchar;
                            if (val instanceof HiveVarchar) {
                                hiveVarchar = (HiveVarchar) val;
                            } else {
                                hiveVarchar = ((HiveVarcharWritable) val).getHiveVarchar();
                            }
                            byte[] bytes = hiveVarchar.getValue().getBytes();
                            assignBytes(bytes, 0, bytes.length, destIndex);
                        }
                    }
                }.init(outputBatch, (BytesColumnVector) destCol);
                break;
            case CHAR:
                outVCA = new VectorBytesColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            // We store CHAR type stripped of pads.
                            HiveChar hiveChar;
                            if (val instanceof HiveChar) {
                                hiveChar = (HiveChar) val;
                            } else {
                                hiveChar = ((HiveCharWritable) val).getHiveChar();
                            }
                            byte[] bytes = hiveChar.getStrippedValue().getBytes();
                            assignBytes(bytes, 0, bytes.length, destIndex);
                        }
                    }
                }.init(outputBatch, (BytesColumnVector) destCol);
                break;
            default:
                throw new HiveException("Incompatible Bytes vector column and primitive category " + category);
        }
    } else if (destCol instanceof DecimalColumnVector) {
        switch(category) {
            case DECIMAL:
                outVCA = new VectorDecimalColumnAssign() {

                    @Override
                    public void assignObjectValue(Object val, int destIndex) throws HiveException {
                        if (val == null) {
                            assignNull(destIndex);
                        } else {
                            if (val instanceof HiveDecimal) {
                                assignDecimal((HiveDecimal) val, destIndex);
                            } else {
                                assignDecimal((HiveDecimalWritable) val, destIndex);
                            }
                        }
                    }
                }.init(outputBatch, (DecimalColumnVector) destCol);
                break;
            default:
                throw new HiveException("Incompatible Decimal vector column and primitive category " + category);
        }
    } else {
        throw new HiveException("Unknown vector column type " + destCol.getClass().getName());
    }
    return outVCA;
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) Text(org.apache.hadoop.io.Text) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) HiveIntervalYearMonthWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable)

Example 34 with BooleanWritable

use of org.apache.hadoop.io.BooleanWritable in project hive by apache.

the class VectorDeserializeRow method convertRowColumn.

/**
   * Convert one row column value that is the current value in deserializeRead.
   *
   * We deserialize into a writable and then pass that writable to an instance of VectorAssignRow
   * to convert the writable to the target data type and assign it into the VectorizedRowBatch.
   *
   * @param batch
   * @param batchIndex
   * @param logicalColumnIndex
   * @throws IOException
   */
private void convertRowColumn(VectorizedRowBatch batch, int batchIndex, int logicalColumnIndex) throws IOException {
    final int projectionColumnNum = projectionColumnNums[logicalColumnIndex];
    Writable convertSourceWritable = convertSourceWritables[logicalColumnIndex];
    switch(sourceCategories[logicalColumnIndex]) {
        case PRIMITIVE:
            {
                switch(sourcePrimitiveCategories[logicalColumnIndex]) {
                    case VOID:
                        convertSourceWritable = null;
                        break;
                    case BOOLEAN:
                        ((BooleanWritable) convertSourceWritable).set(deserializeRead.currentBoolean);
                        break;
                    case BYTE:
                        ((ByteWritable) convertSourceWritable).set(deserializeRead.currentByte);
                        break;
                    case SHORT:
                        ((ShortWritable) convertSourceWritable).set(deserializeRead.currentShort);
                        break;
                    case INT:
                        ((IntWritable) convertSourceWritable).set(deserializeRead.currentInt);
                        break;
                    case LONG:
                        ((LongWritable) convertSourceWritable).set(deserializeRead.currentLong);
                        break;
                    case TIMESTAMP:
                        ((TimestampWritable) convertSourceWritable).set(deserializeRead.currentTimestampWritable);
                        break;
                    case DATE:
                        ((DateWritable) convertSourceWritable).set(deserializeRead.currentDateWritable);
                        break;
                    case FLOAT:
                        ((FloatWritable) convertSourceWritable).set(deserializeRead.currentFloat);
                        break;
                    case DOUBLE:
                        ((DoubleWritable) convertSourceWritable).set(deserializeRead.currentDouble);
                        break;
                    case BINARY:
                        if (deserializeRead.currentBytes == null) {
                            LOG.info("null binary entry: batchIndex " + batchIndex + " projection column num " + projectionColumnNum);
                        }
                        ((BytesWritable) convertSourceWritable).set(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesLength);
                        break;
                    case STRING:
                        if (deserializeRead.currentBytes == null) {
                            throw new RuntimeException("null string entry: batchIndex " + batchIndex + " projection column num " + projectionColumnNum);
                        }
                        // Use org.apache.hadoop.io.Text as our helper to go from byte[] to String.
                        ((Text) convertSourceWritable).set(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesLength);
                        break;
                    case VARCHAR:
                        {
                            // that does not use Java String objects.
                            if (deserializeRead.currentBytes == null) {
                                throw new RuntimeException("null varchar entry: batchIndex " + batchIndex + " projection column num " + projectionColumnNum);
                            }
                            int adjustedLength = StringExpr.truncate(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesLength, maxLengths[logicalColumnIndex]);
                            ((HiveVarcharWritable) convertSourceWritable).set(new String(deserializeRead.currentBytes, deserializeRead.currentBytesStart, adjustedLength, Charsets.UTF_8), -1);
                        }
                        break;
                    case CHAR:
                        {
                            // that does not use Java String objects.
                            if (deserializeRead.currentBytes == null) {
                                throw new RuntimeException("null char entry: batchIndex " + batchIndex + " projection column num " + projectionColumnNum);
                            }
                            int adjustedLength = StringExpr.rightTrimAndTruncate(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesLength, maxLengths[logicalColumnIndex]);
                            ((HiveCharWritable) convertSourceWritable).set(new String(deserializeRead.currentBytes, deserializeRead.currentBytesStart, adjustedLength, Charsets.UTF_8), -1);
                        }
                        break;
                    case DECIMAL:
                        ((HiveDecimalWritable) convertSourceWritable).set(deserializeRead.currentHiveDecimalWritable);
                        break;
                    case INTERVAL_YEAR_MONTH:
                        ((HiveIntervalYearMonthWritable) convertSourceWritable).set(deserializeRead.currentHiveIntervalYearMonthWritable);
                        break;
                    case INTERVAL_DAY_TIME:
                        ((HiveIntervalDayTimeWritable) convertSourceWritable).set(deserializeRead.currentHiveIntervalDayTimeWritable);
                        break;
                    default:
                        throw new RuntimeException("Primitive category " + sourcePrimitiveCategories[logicalColumnIndex] + " not supported");
                }
            }
            break;
        default:
            throw new RuntimeException("Category " + sourceCategories[logicalColumnIndex] + " not supported");
    }
    /*
     * Convert our source object we just read into the target object and store that in the
     * VectorizedRowBatch.
     */
    convertVectorAssignRow.assignConvertRowColumn(batch, batchIndex, logicalColumnIndex, convertSourceWritable);
}
Also used : ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveIntervalYearMonthWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable)

Example 35 with BooleanWritable

use of org.apache.hadoop.io.BooleanWritable in project hive by apache.

the class VectorExtractRow method extractRowColumn.

/**
   * Extract a row's column object from the ColumnVector at batchIndex in the VectorizedRowBatch.
   *
   * @param batch
   * @param batchIndex
   * @param logicalColumnIndex
   * @return
   */
public Object extractRowColumn(VectorizedRowBatch batch, int batchIndex, int logicalColumnIndex) {
    final int projectionColumnNum = projectionColumnNums[logicalColumnIndex];
    ColumnVector colVector = batch.cols[projectionColumnNum];
    if (colVector == null) {
        // may ask for them..
        return null;
    }
    int adjustedIndex = (colVector.isRepeating ? 0 : batchIndex);
    if (!colVector.noNulls && colVector.isNull[adjustedIndex]) {
        return null;
    }
    Category category = categories[logicalColumnIndex];
    switch(category) {
        case PRIMITIVE:
            {
                Writable primitiveWritable = primitiveWritables[logicalColumnIndex];
                PrimitiveCategory primitiveCategory = primitiveCategories[logicalColumnIndex];
                switch(primitiveCategory) {
                    case VOID:
                        return null;
                    case BOOLEAN:
                        ((BooleanWritable) primitiveWritable).set(((LongColumnVector) batch.cols[projectionColumnNum]).vector[adjustedIndex] == 0 ? false : true);
                        return primitiveWritable;
                    case BYTE:
                        ((ByteWritable) primitiveWritable).set((byte) ((LongColumnVector) batch.cols[projectionColumnNum]).vector[adjustedIndex]);
                        return primitiveWritable;
                    case SHORT:
                        ((ShortWritable) primitiveWritable).set((short) ((LongColumnVector) batch.cols[projectionColumnNum]).vector[adjustedIndex]);
                        return primitiveWritable;
                    case INT:
                        ((IntWritable) primitiveWritable).set((int) ((LongColumnVector) batch.cols[projectionColumnNum]).vector[adjustedIndex]);
                        return primitiveWritable;
                    case LONG:
                        ((LongWritable) primitiveWritable).set(((LongColumnVector) batch.cols[projectionColumnNum]).vector[adjustedIndex]);
                        return primitiveWritable;
                    case TIMESTAMP:
                        ((TimestampWritable) primitiveWritable).set(((TimestampColumnVector) batch.cols[projectionColumnNum]).asScratchTimestamp(adjustedIndex));
                        return primitiveWritable;
                    case DATE:
                        ((DateWritable) primitiveWritable).set((int) ((LongColumnVector) batch.cols[projectionColumnNum]).vector[adjustedIndex]);
                        return primitiveWritable;
                    case FLOAT:
                        ((FloatWritable) primitiveWritable).set((float) ((DoubleColumnVector) batch.cols[projectionColumnNum]).vector[adjustedIndex]);
                        return primitiveWritable;
                    case DOUBLE:
                        ((DoubleWritable) primitiveWritable).set(((DoubleColumnVector) batch.cols[projectionColumnNum]).vector[adjustedIndex]);
                        return primitiveWritable;
                    case BINARY:
                        {
                            BytesColumnVector bytesColVector = ((BytesColumnVector) batch.cols[projectionColumnNum]);
                            byte[] bytes = bytesColVector.vector[adjustedIndex];
                            int start = bytesColVector.start[adjustedIndex];
                            int length = bytesColVector.length[adjustedIndex];
                            if (bytes == null) {
                                LOG.info("null binary entry: batchIndex " + batchIndex + " projection column num " + projectionColumnNum);
                            }
                            BytesWritable bytesWritable = (BytesWritable) primitiveWritable;
                            bytesWritable.set(bytes, start, length);
                            return primitiveWritable;
                        }
                    case STRING:
                        {
                            BytesColumnVector bytesColVector = ((BytesColumnVector) batch.cols[projectionColumnNum]);
                            byte[] bytes = bytesColVector.vector[adjustedIndex];
                            int start = bytesColVector.start[adjustedIndex];
                            int length = bytesColVector.length[adjustedIndex];
                            if (bytes == null) {
                                nullBytesReadError(primitiveCategory, batchIndex, projectionColumnNum);
                            }
                            // Use org.apache.hadoop.io.Text as our helper to go from byte[] to String.
                            ((Text) primitiveWritable).set(bytes, start, length);
                            return primitiveWritable;
                        }
                    case VARCHAR:
                        {
                            BytesColumnVector bytesColVector = ((BytesColumnVector) batch.cols[projectionColumnNum]);
                            byte[] bytes = bytesColVector.vector[adjustedIndex];
                            int start = bytesColVector.start[adjustedIndex];
                            int length = bytesColVector.length[adjustedIndex];
                            if (bytes == null) {
                                nullBytesReadError(primitiveCategory, batchIndex, projectionColumnNum);
                            }
                            int adjustedLength = StringExpr.truncate(bytes, start, length, maxLengths[logicalColumnIndex]);
                            HiveVarcharWritable hiveVarcharWritable = (HiveVarcharWritable) primitiveWritable;
                            hiveVarcharWritable.set(new String(bytes, start, adjustedLength, Charsets.UTF_8), -1);
                            return primitiveWritable;
                        }
                    case CHAR:
                        {
                            BytesColumnVector bytesColVector = ((BytesColumnVector) batch.cols[projectionColumnNum]);
                            byte[] bytes = bytesColVector.vector[adjustedIndex];
                            int start = bytesColVector.start[adjustedIndex];
                            int length = bytesColVector.length[adjustedIndex];
                            if (bytes == null) {
                                nullBytesReadError(primitiveCategory, batchIndex, projectionColumnNum);
                            }
                            int adjustedLength = StringExpr.rightTrimAndTruncate(bytes, start, length, maxLengths[logicalColumnIndex]);
                            HiveCharWritable hiveCharWritable = (HiveCharWritable) primitiveWritable;
                            hiveCharWritable.set(new String(bytes, start, adjustedLength, Charsets.UTF_8), maxLengths[logicalColumnIndex]);
                            return primitiveWritable;
                        }
                    case DECIMAL:
                        // The HiveDecimalWritable set method will quickly copy the deserialized decimal writable fields.
                        ((HiveDecimalWritable) primitiveWritable).set(((DecimalColumnVector) batch.cols[projectionColumnNum]).vector[adjustedIndex]);
                        return primitiveWritable;
                    case INTERVAL_YEAR_MONTH:
                        ((HiveIntervalYearMonthWritable) primitiveWritable).set((int) ((LongColumnVector) batch.cols[projectionColumnNum]).vector[adjustedIndex]);
                        return primitiveWritable;
                    case INTERVAL_DAY_TIME:
                        ((HiveIntervalDayTimeWritable) primitiveWritable).set(((IntervalDayTimeColumnVector) batch.cols[projectionColumnNum]).asScratchIntervalDayTime(adjustedIndex));
                        return primitiveWritable;
                    default:
                        throw new RuntimeException("Primitive category " + primitiveCategory.name() + " not supported");
                }
            }
        default:
            throw new RuntimeException("Category " + category.name() + " not supported");
    }
}
Also used : PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) Category(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveIntervalYearMonthWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) Text(org.apache.hadoop.io.Text) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)

Aggregations

BooleanWritable (org.apache.hadoop.io.BooleanWritable)63 IntWritable (org.apache.hadoop.io.IntWritable)41 LongWritable (org.apache.hadoop.io.LongWritable)40 FloatWritable (org.apache.hadoop.io.FloatWritable)37 Text (org.apache.hadoop.io.Text)31 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)27 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)26 BytesWritable (org.apache.hadoop.io.BytesWritable)26 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)25 Writable (org.apache.hadoop.io.Writable)17 Test (org.junit.Test)17 ArrayList (java.util.ArrayList)15 Configuration (org.apache.hadoop.conf.Configuration)12 TimestampWritable (org.apache.hadoop.hive.serde2.io.TimestampWritable)12 Random (java.util.Random)11 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)10 DateWritable (org.apache.hadoop.hive.serde2.io.DateWritable)9 KeyValue (org.apache.hadoop.hbase.KeyValue)7 Result (org.apache.hadoop.hbase.client.Result)7 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)7