Search in sources :

Example 31 with Category

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category in project hive by apache.

the class VectorAssignRow method assignConvertRowColumn.

private void assignConvertRowColumn(ColumnVector columnVector, int batchIndex, TypeInfo targetTypeInfo, ObjectInspector sourceObjectInspector, Writable convertTargetWritable, Object object) {
    final Category targetCategory = targetTypeInfo.getCategory();
    if (targetCategory == null) {
        /*
       * This is a column that we don't want (i.e. not included) -- we are done.
       */
        return;
    }
    if (object == null) {
        VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
        return;
    }
    try {
        switch(targetCategory) {
            case PRIMITIVE:
                final PrimitiveObjectInspector sourcePrimitiveOI = (PrimitiveObjectInspector) sourceObjectInspector;
                final PrimitiveCategory targetPrimitiveCategory = ((PrimitiveTypeInfo) targetTypeInfo).getPrimitiveCategory();
                switch(targetPrimitiveCategory) {
                    case VOID:
                        VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                        return;
                    case BOOLEAN:
                        ((LongColumnVector) columnVector).vector[batchIndex] = (PrimitiveObjectInspectorUtils.getBoolean(object, sourcePrimitiveOI) ? 1 : 0);
                        break;
                    case BYTE:
                        ((LongColumnVector) columnVector).vector[batchIndex] = PrimitiveObjectInspectorUtils.getByte(object, sourcePrimitiveOI);
                        break;
                    case SHORT:
                        ((LongColumnVector) columnVector).vector[batchIndex] = PrimitiveObjectInspectorUtils.getShort(object, sourcePrimitiveOI);
                        break;
                    case INT:
                        ((LongColumnVector) columnVector).vector[batchIndex] = PrimitiveObjectInspectorUtils.getInt(object, sourcePrimitiveOI);
                        break;
                    case LONG:
                        ((LongColumnVector) columnVector).vector[batchIndex] = PrimitiveObjectInspectorUtils.getLong(object, sourcePrimitiveOI);
                        break;
                    case TIMESTAMP:
                        {
                            final Timestamp timestamp = PrimitiveObjectInspectorUtils.getTimestamp(object, sourcePrimitiveOI);
                            if (timestamp == null) {
                                VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                                return;
                            }
                            ((TimestampColumnVector) columnVector).set(batchIndex, timestamp);
                        }
                        break;
                    case DATE:
                        {
                            final Date date = PrimitiveObjectInspectorUtils.getDate(object, sourcePrimitiveOI);
                            if (date == null) {
                                VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                                return;
                            }
                            DateWritable dateWritable = (DateWritable) convertTargetWritable;
                            if (dateWritable == null) {
                                dateWritable = new DateWritable();
                            }
                            dateWritable.set(date);
                            ((LongColumnVector) columnVector).vector[batchIndex] = dateWritable.getDays();
                        }
                        break;
                    case FLOAT:
                        ((DoubleColumnVector) columnVector).vector[batchIndex] = PrimitiveObjectInspectorUtils.getFloat(object, sourcePrimitiveOI);
                        break;
                    case DOUBLE:
                        ((DoubleColumnVector) columnVector).vector[batchIndex] = PrimitiveObjectInspectorUtils.getDouble(object, sourcePrimitiveOI);
                        break;
                    case BINARY:
                        {
                            final BytesWritable bytesWritable = PrimitiveObjectInspectorUtils.getBinary(object, sourcePrimitiveOI);
                            if (bytesWritable == null) {
                                VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                                return;
                            }
                            ((BytesColumnVector) columnVector).setVal(batchIndex, bytesWritable.getBytes(), 0, bytesWritable.getLength());
                        }
                        break;
                    case STRING:
                        {
                            final String string = PrimitiveObjectInspectorUtils.getString(object, sourcePrimitiveOI);
                            if (string == null) {
                                VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                                return;
                            }
                            Text text = (Text) convertTargetWritable;
                            if (text == null) {
                                text = new Text();
                            }
                            text.set(string);
                            ((BytesColumnVector) columnVector).setVal(batchIndex, text.getBytes(), 0, text.getLength());
                        }
                        break;
                    case VARCHAR:
                        {
                            // UNDONE: Performance problem with conversion to String, then bytes...
                            final HiveVarchar hiveVarchar = PrimitiveObjectInspectorUtils.getHiveVarchar(object, sourcePrimitiveOI);
                            if (hiveVarchar == null) {
                                VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                                return;
                            }
                            // TODO: Do we need maxLength checking?
                            byte[] bytes = hiveVarchar.getValue().getBytes();
                            ((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
                        }
                        break;
                    case CHAR:
                        {
                            // UNDONE: Performance problem with conversion to String, then bytes...
                            final HiveChar hiveChar = PrimitiveObjectInspectorUtils.getHiveChar(object, sourcePrimitiveOI);
                            if (hiveChar == null) {
                                VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                                return;
                            }
                            // We store CHAR in vector row batch with padding stripped.
                            // TODO: Do we need maxLength checking?
                            final byte[] bytes = hiveChar.getStrippedValue().getBytes();
                            ((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
                        }
                        break;
                    case DECIMAL:
                        {
                            final HiveDecimal hiveDecimal = PrimitiveObjectInspectorUtils.getHiveDecimal(object, sourcePrimitiveOI);
                            if (hiveDecimal == null) {
                                VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                                return;
                            }
                            if (columnVector instanceof Decimal64ColumnVector) {
                                Decimal64ColumnVector dec64ColVector = (Decimal64ColumnVector) columnVector;
                                dec64ColVector.set(batchIndex, hiveDecimal);
                                if (dec64ColVector.isNull[batchIndex]) {
                                    return;
                                }
                            } else {
                                ((DecimalColumnVector) columnVector).set(batchIndex, hiveDecimal);
                            }
                        }
                        break;
                    case INTERVAL_YEAR_MONTH:
                        {
                            final HiveIntervalYearMonth intervalYearMonth = PrimitiveObjectInspectorUtils.getHiveIntervalYearMonth(object, sourcePrimitiveOI);
                            if (intervalYearMonth == null) {
                                VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                                return;
                            }
                            ((LongColumnVector) columnVector).vector[batchIndex] = intervalYearMonth.getTotalMonths();
                        }
                        break;
                    case INTERVAL_DAY_TIME:
                        {
                            final HiveIntervalDayTime intervalDayTime = PrimitiveObjectInspectorUtils.getHiveIntervalDayTime(object, sourcePrimitiveOI);
                            if (intervalDayTime == null) {
                                VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                                return;
                            }
                            ((IntervalDayTimeColumnVector) columnVector).set(batchIndex, intervalDayTime);
                        }
                        break;
                    default:
                        throw new RuntimeException("Primitive category " + targetPrimitiveCategory.name() + " not supported");
                }
                break;
            case LIST:
                {
                    final ListColumnVector listColumnVector = (ListColumnVector) columnVector;
                    final ListObjectInspector sourceListOI = (ListObjectInspector) sourceObjectInspector;
                    final ObjectInspector sourceElementOI = sourceListOI.getListElementObjectInspector();
                    final int size = sourceListOI.getListLength(object);
                    final TypeInfo targetElementTypeInfo = ((ListTypeInfo) targetTypeInfo).getListElementTypeInfo();
                    listColumnVector.offsets[batchIndex] = listColumnVector.childCount;
                    listColumnVector.childCount += size;
                    listColumnVector.ensureSize(listColumnVector.childCount, true);
                    listColumnVector.lengths[batchIndex] = size;
                    for (int i = 0; i < size; i++) {
                        final Object element = sourceListOI.getListElement(object, i);
                        final int offset = (int) (listColumnVector.offsets[batchIndex] + i);
                        assignConvertRowColumn(listColumnVector.child, offset, targetElementTypeInfo, sourceElementOI, null, element);
                    }
                }
                break;
            case MAP:
                {
                    final MapColumnVector mapColumnVector = (MapColumnVector) columnVector;
                    final MapObjectInspector mapObjectInspector = (MapObjectInspector) sourceObjectInspector;
                    final MapTypeInfo mapTypeInfo = (MapTypeInfo) targetTypeInfo;
                    final Map<?, ?> map = mapObjectInspector.getMap(object);
                    for (Map.Entry<?, ?> entry : map.entrySet()) {
                        assignConvertRowColumn(mapColumnVector.keys, batchIndex, mapTypeInfo.getMapKeyTypeInfo(), mapObjectInspector.getMapKeyObjectInspector(), null, entry.getKey());
                        assignConvertRowColumn(mapColumnVector.values, batchIndex, mapTypeInfo.getMapValueTypeInfo(), mapObjectInspector.getMapValueObjectInspector(), null, entry.getValue());
                    }
                }
                break;
            case STRUCT:
                {
                    final StructColumnVector structColumnVector = (StructColumnVector) columnVector;
                    final StructObjectInspector sourceStructOI = (StructObjectInspector) sourceObjectInspector;
                    final List<? extends StructField> sourceFields = sourceStructOI.getAllStructFieldRefs();
                    final StructTypeInfo targetStructTypeInfo = (StructTypeInfo) targetTypeInfo;
                    final List<TypeInfo> targetTypeInfos = targetStructTypeInfo.getAllStructFieldTypeInfos();
                    final int size = targetTypeInfos.size();
                    for (int i = 0; i < size; i++) {
                        if (i < sourceFields.size()) {
                            final StructField sourceStructField = sourceFields.get(i);
                            final ObjectInspector sourceFieldOI = sourceStructField.getFieldObjectInspector();
                            final Object sourceData = sourceStructOI.getStructFieldData(object, sourceStructField);
                            assignConvertRowColumn(structColumnVector.fields[i], batchIndex, targetTypeInfos.get(i), sourceFieldOI, null, sourceData);
                        } else {
                            final ColumnVector fieldColumnVector = structColumnVector.fields[i];
                            VectorizedBatchUtil.setNullColIsNullValue(fieldColumnVector, batchIndex);
                        }
                    }
                }
                break;
            case UNION:
                {
                    final UnionColumnVector unionColumnVector = (UnionColumnVector) columnVector;
                    final UnionObjectInspector unionObjectInspector = (UnionObjectInspector) sourceObjectInspector;
                    final UnionTypeInfo unionTypeInfo = (UnionTypeInfo) targetTypeInfo;
                    final int tag = unionObjectInspector.getTag(object);
                    assignConvertRowColumn(unionColumnVector.fields[tag], batchIndex, unionTypeInfo.getAllUnionObjectTypeInfos().get(tag), unionObjectInspector.getObjectInspectors().get(tag), null, unionObjectInspector.getField(tag));
                }
                break;
            default:
                throw new RuntimeException("Category " + targetCategory.name() + " not supported");
        }
    } catch (NumberFormatException e) {
        // Some of the conversion methods throw this exception on numeric parsing errors.
        VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
        return;
    }
    // We always set the null flag to false when there is a value.
    columnVector.isNull[batchIndex] = false;
}
Also used : PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) Category(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) Timestamp(java.sql.Timestamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) List(java.util.List) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime) UnionObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector) UnionObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) Text(org.apache.hadoop.io.Text) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) Date(java.sql.Date) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) Map(java.util.Map) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo)

Example 32 with Category

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category in project hive by apache.

the class VectorExtractRow method extractRowColumn.

public Object extractRowColumn(ColumnVector colVector, TypeInfo typeInfo, ObjectInspector objectInspector, int batchIndex) {
    if (colVector == null) {
        // may ask for them..
        return null;
    }
    final int adjustedIndex = (colVector.isRepeating ? 0 : batchIndex);
    if (!colVector.noNulls && colVector.isNull[adjustedIndex]) {
        return null;
    }
    final Category category = typeInfo.getCategory();
    switch(category) {
        case PRIMITIVE:
            {
                final PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
                final PrimitiveCategory primitiveCategory = primitiveTypeInfo.getPrimitiveCategory();
                final Writable primitiveWritable = VectorizedBatchUtil.getPrimitiveWritable(primitiveCategory);
                switch(primitiveCategory) {
                    case VOID:
                        return null;
                    case BOOLEAN:
                        ((BooleanWritable) primitiveWritable).set(((LongColumnVector) colVector).vector[adjustedIndex] == 0 ? false : true);
                        return primitiveWritable;
                    case BYTE:
                        ((ByteWritable) primitiveWritable).set((byte) ((LongColumnVector) colVector).vector[adjustedIndex]);
                        return primitiveWritable;
                    case SHORT:
                        ((ShortWritable) primitiveWritable).set((short) ((LongColumnVector) colVector).vector[adjustedIndex]);
                        return primitiveWritable;
                    case INT:
                        ((IntWritable) primitiveWritable).set((int) ((LongColumnVector) colVector).vector[adjustedIndex]);
                        return primitiveWritable;
                    case LONG:
                        ((LongWritable) primitiveWritable).set(((LongColumnVector) colVector).vector[adjustedIndex]);
                        return primitiveWritable;
                    case TIMESTAMP:
                        ((TimestampWritable) primitiveWritable).set(((TimestampColumnVector) colVector).asScratchTimestamp(adjustedIndex));
                        return primitiveWritable;
                    case DATE:
                        ((DateWritable) primitiveWritable).set((int) ((LongColumnVector) colVector).vector[adjustedIndex]);
                        return primitiveWritable;
                    case FLOAT:
                        ((FloatWritable) primitiveWritable).set((float) ((DoubleColumnVector) colVector).vector[adjustedIndex]);
                        return primitiveWritable;
                    case DOUBLE:
                        ((DoubleWritable) primitiveWritable).set(((DoubleColumnVector) colVector).vector[adjustedIndex]);
                        return primitiveWritable;
                    case BINARY:
                        {
                            final BytesColumnVector bytesColVector = ((BytesColumnVector) colVector);
                            final byte[] bytes = bytesColVector.vector[adjustedIndex];
                            final int start = bytesColVector.start[adjustedIndex];
                            final int length = bytesColVector.length[adjustedIndex];
                            if (bytesColVector.isRepeating) {
                                if (!bytesColVector.isNull[0] && bytes == null) {
                                    nullBytesReadError(primitiveCategory, batchIndex);
                                }
                            } else {
                                if ((bytesColVector.noNulls || !bytesColVector.isNull[batchIndex]) && bytes == null) {
                                    nullBytesReadError(primitiveCategory, batchIndex);
                                }
                            }
                            BytesWritable bytesWritable = (BytesWritable) primitiveWritable;
                            bytesWritable.set(bytes, start, length);
                            return primitiveWritable;
                        }
                    case STRING:
                        {
                            final BytesColumnVector bytesColVector = ((BytesColumnVector) colVector);
                            final byte[] bytes = bytesColVector.vector[adjustedIndex];
                            final int start = bytesColVector.start[adjustedIndex];
                            final int length = bytesColVector.length[adjustedIndex];
                            if (bytesColVector.isRepeating) {
                                if (!bytesColVector.isNull[0] && bytes == null) {
                                    nullBytesReadError(primitiveCategory, batchIndex);
                                }
                            } else {
                                if ((bytesColVector.noNulls || !bytesColVector.isNull[batchIndex]) && bytes == null) {
                                    nullBytesReadError(primitiveCategory, batchIndex);
                                }
                            }
                            // Use org.apache.hadoop.io.Text as our helper to go from byte[] to String.
                            ((Text) primitiveWritable).set(bytes, start, length);
                            return primitiveWritable;
                        }
                    case VARCHAR:
                        {
                            final BytesColumnVector bytesColVector = ((BytesColumnVector) colVector);
                            final byte[] bytes = bytesColVector.vector[adjustedIndex];
                            final int start = bytesColVector.start[adjustedIndex];
                            final int length = bytesColVector.length[adjustedIndex];
                            if (bytesColVector.isRepeating) {
                                if (!bytesColVector.isNull[0] && bytes == null) {
                                    nullBytesReadError(primitiveCategory, batchIndex);
                                }
                            } else {
                                if ((bytesColVector.noNulls || !bytesColVector.isNull[batchIndex]) && bytes == null) {
                                    nullBytesReadError(primitiveCategory, batchIndex);
                                }
                            }
                            final int adjustedLength = StringExpr.truncate(bytes, start, length, ((VarcharTypeInfo) primitiveTypeInfo).getLength());
                            final HiveVarcharWritable hiveVarcharWritable = (HiveVarcharWritable) primitiveWritable;
                            hiveVarcharWritable.set(new String(bytes, start, adjustedLength, Charsets.UTF_8), -1);
                            return primitiveWritable;
                        }
                    case CHAR:
                        {
                            final BytesColumnVector bytesColVector = ((BytesColumnVector) colVector);
                            final byte[] bytes = bytesColVector.vector[adjustedIndex];
                            final int start = bytesColVector.start[adjustedIndex];
                            final int length = bytesColVector.length[adjustedIndex];
                            if (bytesColVector.isRepeating) {
                                if (!bytesColVector.isNull[0] && bytes == null) {
                                    nullBytesReadError(primitiveCategory, batchIndex);
                                }
                            } else {
                                if ((bytesColVector.noNulls || !bytesColVector.isNull[batchIndex]) && bytes == null) {
                                    nullBytesReadError(primitiveCategory, batchIndex);
                                }
                            }
                            final int adjustedLength = StringExpr.rightTrimAndTruncate(bytes, start, length, ((CharTypeInfo) primitiveTypeInfo).getLength());
                            final HiveCharWritable hiveCharWritable = (HiveCharWritable) primitiveWritable;
                            hiveCharWritable.set(new String(bytes, start, adjustedLength, Charsets.UTF_8), ((CharTypeInfo) primitiveTypeInfo).getLength());
                            return primitiveWritable;
                        }
                    case DECIMAL:
                        if (colVector instanceof Decimal64ColumnVector) {
                            Decimal64ColumnVector dec32ColVector = (Decimal64ColumnVector) colVector;
                            ((HiveDecimalWritable) primitiveWritable).deserialize64(dec32ColVector.vector[adjustedIndex], dec32ColVector.scale);
                        } else {
                            // The HiveDecimalWritable set method will quickly copy the deserialized decimal writable fields.
                            ((HiveDecimalWritable) primitiveWritable).set(((DecimalColumnVector) colVector).vector[adjustedIndex]);
                        }
                        return primitiveWritable;
                    case INTERVAL_YEAR_MONTH:
                        ((HiveIntervalYearMonthWritable) primitiveWritable).set((int) ((LongColumnVector) colVector).vector[adjustedIndex]);
                        return primitiveWritable;
                    case INTERVAL_DAY_TIME:
                        ((HiveIntervalDayTimeWritable) primitiveWritable).set(((IntervalDayTimeColumnVector) colVector).asScratchIntervalDayTime(adjustedIndex));
                        return primitiveWritable;
                    default:
                        throw new RuntimeException("Primitive category " + primitiveCategory.name() + " not supported");
                }
            }
        case LIST:
            {
                final ListColumnVector listColumnVector = (ListColumnVector) colVector;
                final ListTypeInfo listTypeInfo = (ListTypeInfo) typeInfo;
                final ListObjectInspector listObjectInspector = (ListObjectInspector) objectInspector;
                final int offset = (int) listColumnVector.offsets[adjustedIndex];
                final int size = (int) listColumnVector.lengths[adjustedIndex];
                final List list = new ArrayList();
                for (int i = 0; i < size; i++) {
                    list.add(extractRowColumn(listColumnVector.child, listTypeInfo.getListElementTypeInfo(), listObjectInspector.getListElementObjectInspector(), offset + i));
                }
                return list;
            }
        case MAP:
            {
                final MapColumnVector mapColumnVector = (MapColumnVector) colVector;
                final MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
                final MapObjectInspector mapObjectInspector = (MapObjectInspector) objectInspector;
                final int offset = (int) mapColumnVector.offsets[adjustedIndex];
                final int size = (int) mapColumnVector.lengths[adjustedIndex];
                final Map map = new HashMap();
                for (int i = 0; i < size; i++) {
                    final Object key = extractRowColumn(mapColumnVector.keys, mapTypeInfo.getMapKeyTypeInfo(), mapObjectInspector.getMapKeyObjectInspector(), offset + i);
                    final Object value = extractRowColumn(mapColumnVector.values, mapTypeInfo.getMapValueTypeInfo(), mapObjectInspector.getMapValueObjectInspector(), offset + i);
                    map.put(key, value);
                }
                return map;
            }
        case STRUCT:
            {
                final StructColumnVector structColumnVector = (StructColumnVector) colVector;
                final StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
                final StandardStructObjectInspector structInspector = (StandardStructObjectInspector) objectInspector;
                final List<TypeInfo> fieldTypeInfos = structTypeInfo.getAllStructFieldTypeInfos();
                final int size = fieldTypeInfos.size();
                final List<? extends StructField> structFields = structInspector.getAllStructFieldRefs();
                final Object struct = structInspector.create();
                for (int i = 0; i < size; i++) {
                    final StructField structField = structFields.get(i);
                    final TypeInfo fieldTypeInfo = fieldTypeInfos.get(i);
                    final Object value = extractRowColumn(structColumnVector.fields[i], fieldTypeInfo, structField.getFieldObjectInspector(), adjustedIndex);
                    structInspector.setStructFieldData(struct, structField, value);
                }
                return struct;
            }
        case UNION:
            {
                final UnionTypeInfo unionTypeInfo = (UnionTypeInfo) typeInfo;
                final List<TypeInfo> objectTypeInfos = unionTypeInfo.getAllUnionObjectTypeInfos();
                final UnionObjectInspector unionInspector = (UnionObjectInspector) objectInspector;
                final List<ObjectInspector> unionInspectors = unionInspector.getObjectInspectors();
                final UnionColumnVector unionColumnVector = (UnionColumnVector) colVector;
                final byte tag = (byte) unionColumnVector.tags[adjustedIndex];
                final Object object = extractRowColumn(unionColumnVector.fields[tag], objectTypeInfos.get(tag), unionInspectors.get(tag), adjustedIndex);
                final StandardUnion standardUnion = new StandardUnion();
                standardUnion.setTag(tag);
                standardUnion.setObject(object);
                return standardUnion;
            }
        default:
            throw new RuntimeException("Category " + category.name() + " not supported");
    }
}
Also used : PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) Category(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveIntervalYearMonthWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) ArrayList(java.util.ArrayList) List(java.util.List) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) UnionObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector) StandardUnionObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) Text(org.apache.hadoop.io.Text) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) StandardUnion(org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector.StandardUnion) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) HashMap(java.util.HashMap) Map(java.util.Map) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo)

Example 33 with Category

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category in project hive by apache.

the class VectorSerializeRow method serializeWrite.

private void serializeWrite(ColumnVector colVector, Field field, int batchIndex) throws IOException {
    int adjustedBatchIndex;
    if (colVector.isRepeating) {
        adjustedBatchIndex = 0;
    } else {
        adjustedBatchIndex = batchIndex;
    }
    if (!colVector.noNulls && colVector.isNull[adjustedBatchIndex]) {
        serializeWrite.writeNull();
        hasAnyNulls = true;
        return;
    }
    isAllNulls = false;
    if (field.isPrimitive) {
        serializePrimitiveWrite(colVector, field, adjustedBatchIndex);
        return;
    }
    final Category category = field.category;
    switch(category) {
        case LIST:
            serializeListWrite((ListColumnVector) colVector, field, adjustedBatchIndex);
            break;
        case MAP:
            serializeMapWrite((MapColumnVector) colVector, field, adjustedBatchIndex);
            break;
        case STRUCT:
            serializeStructWrite((StructColumnVector) colVector, field, adjustedBatchIndex);
            break;
        case UNION:
            serializeUnionWrite((UnionColumnVector) colVector, field, adjustedBatchIndex);
            break;
        default:
            throw new RuntimeException("Unexpected category " + category);
    }
}
Also used : PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) Category(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category)

Example 34 with Category

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category in project hive by apache.

the class VectorAssignRow method assignRowColumn.

private void assignRowColumn(ColumnVector columnVector, int batchIndex, TypeInfo targetTypeInfo, Object object) {
    if (object == null) {
        VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
        return;
    }
    switch(targetTypeInfo.getCategory()) {
        case PRIMITIVE:
            {
                final PrimitiveCategory targetPrimitiveCategory = ((PrimitiveTypeInfo) targetTypeInfo).getPrimitiveCategory();
                switch(targetPrimitiveCategory) {
                    case VOID:
                        VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                        return;
                    case BOOLEAN:
                        if (object instanceof Boolean) {
                            ((LongColumnVector) columnVector).vector[batchIndex] = (((Boolean) object) ? 1 : 0);
                        } else {
                            ((LongColumnVector) columnVector).vector[batchIndex] = (((BooleanWritable) object).get() ? 1 : 0);
                        }
                        break;
                    case BYTE:
                        if (object instanceof Byte) {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((Byte) object);
                        } else {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((ByteWritable) object).get();
                        }
                        break;
                    case SHORT:
                        if (object instanceof Short) {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((Short) object);
                        } else {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((ShortWritable) object).get();
                        }
                        break;
                    case INT:
                        if (object instanceof Integer) {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((Integer) object);
                        } else {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((IntWritable) object).get();
                        }
                        break;
                    case LONG:
                        if (object instanceof Long) {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((Long) object);
                        } else {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((LongWritable) object).get();
                        }
                        break;
                    case TIMESTAMP:
                        if (object instanceof Timestamp) {
                            ((TimestampColumnVector) columnVector).set(batchIndex, ((Timestamp) object));
                        } else {
                            ((TimestampColumnVector) columnVector).set(batchIndex, ((TimestampWritable) object).getTimestamp());
                        }
                        break;
                    case DATE:
                        if (object instanceof Date) {
                            ((LongColumnVector) columnVector).vector[batchIndex] = DateWritable.dateToDays((Date) object);
                        } else {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((DateWritable) object).getDays();
                        }
                        break;
                    case FLOAT:
                        if (object instanceof Float) {
                            ((DoubleColumnVector) columnVector).vector[batchIndex] = ((Float) object);
                        } else {
                            ((DoubleColumnVector) columnVector).vector[batchIndex] = ((FloatWritable) object).get();
                        }
                        break;
                    case DOUBLE:
                        if (object instanceof Double) {
                            ((DoubleColumnVector) columnVector).vector[batchIndex] = ((Double) object);
                        } else {
                            ((DoubleColumnVector) columnVector).vector[batchIndex] = ((DoubleWritable) object).get();
                        }
                        break;
                    case BINARY:
                        {
                            if (object instanceof byte[]) {
                                byte[] bytes = (byte[]) object;
                                ((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
                            } else {
                                BytesWritable bw = (BytesWritable) object;
                                ((BytesColumnVector) columnVector).setVal(batchIndex, bw.getBytes(), 0, bw.getLength());
                            }
                        }
                        break;
                    case STRING:
                        {
                            if (object instanceof String) {
                                String string = (String) object;
                                byte[] bytes = string.getBytes();
                                ((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
                            } else {
                                Text tw = (Text) object;
                                ((BytesColumnVector) columnVector).setVal(batchIndex, tw.getBytes(), 0, tw.getLength());
                            }
                        }
                        break;
                    case VARCHAR:
                        {
                            // UNDONE: Performance problem with conversion to String, then bytes...
                            // We store VARCHAR type stripped of pads.
                            HiveVarchar hiveVarchar;
                            if (object instanceof HiveVarchar) {
                                hiveVarchar = (HiveVarchar) object;
                            } else {
                                hiveVarchar = ((HiveVarcharWritable) object).getHiveVarchar();
                            }
                            // TODO: HIVE-13624 Do we need maxLength checking?
                            byte[] bytes = hiveVarchar.getValue().getBytes();
                            ((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
                        }
                        break;
                    case CHAR:
                        {
                            // UNDONE: Performance problem with conversion to String, then bytes...
                            // We store CHAR type stripped of pads.
                            HiveChar hiveChar;
                            if (object instanceof HiveChar) {
                                hiveChar = (HiveChar) object;
                            } else {
                                hiveChar = ((HiveCharWritable) object).getHiveChar();
                            }
                            // TODO: HIVE-13624 Do we need maxLength checking?
                            // We store CHAR in vector row batch with padding stripped.
                            byte[] bytes = hiveChar.getStrippedValue().getBytes();
                            ((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
                        }
                        break;
                    case DECIMAL:
                        if (object instanceof HiveDecimal) {
                            ((DecimalColumnVector) columnVector).set(batchIndex, (HiveDecimal) object);
                        } else {
                            ((DecimalColumnVector) columnVector).set(batchIndex, (HiveDecimalWritable) object);
                        }
                        break;
                    case INTERVAL_YEAR_MONTH:
                        if (object instanceof HiveIntervalYearMonth) {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((HiveIntervalYearMonth) object).getTotalMonths();
                        } else {
                            ((LongColumnVector) columnVector).vector[batchIndex] = ((HiveIntervalYearMonthWritable) object).getHiveIntervalYearMonth().getTotalMonths();
                        }
                        break;
                    case INTERVAL_DAY_TIME:
                        if (object instanceof HiveIntervalDayTime) {
                            ((IntervalDayTimeColumnVector) columnVector).set(batchIndex, (HiveIntervalDayTime) object);
                        } else {
                            ((IntervalDayTimeColumnVector) columnVector).set(batchIndex, ((HiveIntervalDayTimeWritable) object).getHiveIntervalDayTime());
                        }
                        break;
                    default:
                        throw new RuntimeException("Primitive category " + targetPrimitiveCategory.name() + " not supported");
                }
            }
            break;
        case LIST:
            {
                final ListColumnVector listColumnVector = (ListColumnVector) columnVector;
                final ListTypeInfo listTypeInfo = (ListTypeInfo) targetTypeInfo;
                final TypeInfo elementTypeInfo = listTypeInfo.getListElementTypeInfo();
                final List list = (List) object;
                final int size = list.size();
                final int childCount = listColumnVector.childCount;
                listColumnVector.offsets[batchIndex] = childCount;
                listColumnVector.lengths[batchIndex] = size;
                listColumnVector.childCount = childCount + size;
                listColumnVector.child.ensureSize(childCount + size, true);
                for (int i = 0; i < size; i++) {
                    assignRowColumn(listColumnVector.child, childCount + i, elementTypeInfo, list.get(i));
                }
            }
            break;
        case MAP:
            {
                final MapColumnVector mapColumnVector = (MapColumnVector) columnVector;
                final MapTypeInfo mapTypeInfo = (MapTypeInfo) targetTypeInfo;
                final Map<Object, Object> map = (Map<Object, Object>) object;
                final int size = map.size();
                int childCount = mapColumnVector.childCount;
                mapColumnVector.offsets[batchIndex] = childCount;
                mapColumnVector.lengths[batchIndex] = size;
                mapColumnVector.keys.ensureSize(childCount + size, true);
                mapColumnVector.values.ensureSize(childCount + size, true);
                for (Map.Entry<Object, Object> entry : map.entrySet()) {
                    assignRowColumn(mapColumnVector.keys, childCount, mapTypeInfo.getMapKeyTypeInfo(), entry.getKey());
                    assignRowColumn(mapColumnVector.values, childCount, mapTypeInfo.getMapValueTypeInfo(), entry.getValue());
                    childCount++;
                }
                mapColumnVector.childCount = childCount;
            }
            break;
        case STRUCT:
            {
                final StructColumnVector structColumnVector = (StructColumnVector) columnVector;
                final StructTypeInfo targetStructTypeInfo = (StructTypeInfo) targetTypeInfo;
                final List<TypeInfo> targetFieldTypeInfos = targetStructTypeInfo.getAllStructFieldTypeInfos();
                final int size = targetFieldTypeInfos.size();
                if (object instanceof List) {
                    final List struct = (List) object;
                    for (int i = 0; i < size; i++) {
                        assignRowColumn(structColumnVector.fields[i], batchIndex, targetFieldTypeInfos.get(i), struct.get(i));
                    }
                } else {
                    final Object[] array = (Object[]) object;
                    for (int i = 0; i < size; i++) {
                        assignRowColumn(structColumnVector.fields[i], batchIndex, targetFieldTypeInfos.get(i), array[i]);
                    }
                }
            }
            break;
        case UNION:
            {
                final StandardUnion union = (StandardUnion) object;
                final UnionColumnVector unionColumnVector = (UnionColumnVector) columnVector;
                final UnionTypeInfo unionTypeInfo = (UnionTypeInfo) targetTypeInfo;
                final List<TypeInfo> objectTypeInfos = unionTypeInfo.getAllUnionObjectTypeInfos();
                final byte tag = union.getTag();
                unionColumnVector.tags[batchIndex] = tag;
                assignRowColumn(unionColumnVector.fields[tag], batchIndex, objectTypeInfos.get(tag), union.getObject());
            }
            break;
        default:
            throw new RuntimeException("Category " + targetTypeInfo.getCategory().name() + " not supported");
    }
    /*
     * We always set the null flag to false when there is a value.
     */
    columnVector.isNull[batchIndex] = false;
}
Also used : HiveChar(org.apache.hadoop.hive.common.type.HiveChar) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Timestamp(java.sql.Timestamp) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) List(java.util.List) LongWritable(org.apache.hadoop.io.LongWritable) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) Text(org.apache.hadoop.io.Text) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) Date(java.sql.Date) FloatWritable(org.apache.hadoop.io.FloatWritable) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) BooleanWritable(org.apache.hadoop.io.BooleanWritable) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) StandardUnion(org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector.StandardUnion) Map(java.util.Map) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo)

Example 35 with Category

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category in project hive by apache.

the class VectorAssignRow method initConvertSourceEntry.

/*
   * Initialize one column's source conversion related arrays.
   * Assumes initTargetEntry has already been called.
   */
private void initConvertSourceEntry(int logicalColumnIndex, TypeInfo convertSourceTypeInfo) {
    isConvert[logicalColumnIndex] = true;
    final Category convertSourceCategory = convertSourceTypeInfo.getCategory();
    convertSourceOI[logicalColumnIndex] = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(convertSourceTypeInfo);
    if (convertSourceCategory == Category.PRIMITIVE) {
        // These need to be based on the target.
        final PrimitiveCategory targetPrimitiveCategory = ((PrimitiveTypeInfo) targetTypeInfos[logicalColumnIndex]).getPrimitiveCategory();
        switch(targetPrimitiveCategory) {
            case DATE:
                convertTargetWritables[logicalColumnIndex] = new DateWritable();
                break;
            case STRING:
                convertTargetWritables[logicalColumnIndex] = new Text();
                break;
            default:
                // No additional data type specific setting.
                break;
        }
    }
}
Also used : PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) Category(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) Text(org.apache.hadoop.io.Text) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)

Aggregations

PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)44 Category (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category)33 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)31 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)27 StructTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo)26 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)26 ArrayList (java.util.ArrayList)22 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)20 BytesWritable (org.apache.hadoop.io.BytesWritable)19 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)18 ListTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo)18 MapTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo)18 Text (org.apache.hadoop.io.Text)18 DateWritable (org.apache.hadoop.hive.serde2.io.DateWritable)17 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)17 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)16 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)16 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)16 UnionTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo)16 IntWritable (org.apache.hadoop.io.IntWritable)16