Search in sources :

Example 16 with Category

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category in project hive by apache.

the class VectorRandomRowSource method getWritableObject.

public static Object getWritableObject(int column, Object object, List<ObjectInspector> primitiveObjectInspectorList, PrimitiveCategory[] primitiveCategories, PrimitiveTypeInfo[] primitiveTypeInfos) {
    ObjectInspector objectInspector = primitiveObjectInspectorList.get(column);
    PrimitiveCategory primitiveCategory = primitiveCategories[column];
    PrimitiveTypeInfo primitiveTypeInfo = primitiveTypeInfos[column];
    switch(primitiveCategory) {
        case BOOLEAN:
            return ((WritableBooleanObjectInspector) objectInspector).create((boolean) object);
        case BYTE:
            return ((WritableByteObjectInspector) objectInspector).create((byte) object);
        case SHORT:
            return ((WritableShortObjectInspector) objectInspector).create((short) object);
        case INT:
            return ((WritableIntObjectInspector) objectInspector).create((int) object);
        case LONG:
            return ((WritableLongObjectInspector) objectInspector).create((long) object);
        case DATE:
            return ((WritableDateObjectInspector) objectInspector).create((Date) object);
        case FLOAT:
            return ((WritableFloatObjectInspector) objectInspector).create((float) object);
        case DOUBLE:
            return ((WritableDoubleObjectInspector) objectInspector).create((double) object);
        case STRING:
            return ((WritableStringObjectInspector) objectInspector).create((String) object);
        case CHAR:
            {
                WritableHiveCharObjectInspector writableCharObjectInspector = new WritableHiveCharObjectInspector((CharTypeInfo) primitiveTypeInfo);
                return writableCharObjectInspector.create((HiveChar) object);
            }
        case VARCHAR:
            {
                WritableHiveVarcharObjectInspector writableVarcharObjectInspector = new WritableHiveVarcharObjectInspector((VarcharTypeInfo) primitiveTypeInfo);
                return writableVarcharObjectInspector.create((HiveVarchar) object);
            }
        case BINARY:
            return PrimitiveObjectInspectorFactory.writableBinaryObjectInspector.create((byte[]) object);
        case TIMESTAMP:
            return ((WritableTimestampObjectInspector) objectInspector).create((Timestamp) object);
        case INTERVAL_YEAR_MONTH:
            return ((WritableHiveIntervalYearMonthObjectInspector) objectInspector).create((HiveIntervalYearMonth) object);
        case INTERVAL_DAY_TIME:
            return ((WritableHiveIntervalDayTimeObjectInspector) objectInspector).create((HiveIntervalDayTime) object);
        case DECIMAL:
            {
                WritableHiveDecimalObjectInspector writableDecimalObjectInspector = new WritableHiveDecimalObjectInspector((DecimalTypeInfo) primitiveTypeInfo);
                HiveDecimalWritable result = (HiveDecimalWritable) writableDecimalObjectInspector.create((HiveDecimal) object);
                return result;
            }
        default:
            throw new Error("Unknown primitive category " + primitiveCategory);
    }
}
Also used : VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) WritableHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveVarcharObjectInspector) WritableLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableLongObjectInspector) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) WritableHiveIntervalDayTimeObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveIntervalDayTimeObjectInspector) WritableHiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveDecimalObjectInspector) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) WritableFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableFloatObjectInspector) WritableStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector) WritableShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableShortObjectInspector) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) WritableHiveIntervalYearMonthObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveIntervalYearMonthObjectInspector) WritableHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveCharObjectInspector) WritableIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIntObjectInspector) WritableByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableByteObjectInspector) WritableHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveCharObjectInspector) WritableHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveVarcharObjectInspector) WritableBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBooleanObjectInspector) WritableTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableTimestampObjectInspector) WritableHiveIntervalDayTimeObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveIntervalDayTimeObjectInspector) WritableShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableShortObjectInspector) WritableDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDoubleObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) WritableHiveIntervalYearMonthObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveIntervalYearMonthObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) WritableFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableFloatObjectInspector) WritableLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableLongObjectInspector) WritableDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDateObjectInspector) WritableHiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveDecimalObjectInspector) WritableStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector) WritableTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableTimestampObjectInspector) WritableBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBooleanObjectInspector) WritableByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableByteObjectInspector) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) WritableIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIntObjectInspector) WritableDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDateObjectInspector) WritableDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDoubleObjectInspector)

Example 17 with Category

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category in project hive by apache.

the class VectorRandomRowSource method randomObject.

public static Object randomObject(int column, Random r, PrimitiveCategory[] primitiveCategories, PrimitiveTypeInfo[] primitiveTypeInfos, String[] alphabets, boolean addEscapables, String needsEscapeStr) {
    PrimitiveCategory primitiveCategory = primitiveCategories[column];
    PrimitiveTypeInfo primitiveTypeInfo = primitiveTypeInfos[column];
    try {
        switch(primitiveCategory) {
            case BOOLEAN:
                return Boolean.valueOf(r.nextInt(1) == 1);
            case BYTE:
                return Byte.valueOf((byte) r.nextInt());
            case SHORT:
                return Short.valueOf((short) r.nextInt());
            case INT:
                return Integer.valueOf(r.nextInt());
            case LONG:
                return Long.valueOf(r.nextLong());
            case DATE:
                return RandomTypeUtil.getRandDate(r);
            case FLOAT:
                return Float.valueOf(r.nextFloat() * 10 - 5);
            case DOUBLE:
                return Double.valueOf(r.nextDouble() * 10 - 5);
            case STRING:
            case CHAR:
            case VARCHAR:
                {
                    String result;
                    if (alphabets != null && alphabets[column] != null) {
                        result = RandomTypeUtil.getRandString(r, alphabets[column], r.nextInt(10));
                    } else {
                        result = RandomTypeUtil.getRandString(r);
                    }
                    if (addEscapables && result.length() > 0) {
                        int escapeCount = 1 + r.nextInt(2);
                        for (int i = 0; i < escapeCount; i++) {
                            int index = r.nextInt(result.length());
                            String begin = result.substring(0, index);
                            String end = result.substring(index);
                            Character needsEscapeChar = needsEscapeStr.charAt(r.nextInt(needsEscapeStr.length()));
                            result = begin + needsEscapeChar + end;
                        }
                    }
                    switch(primitiveCategory) {
                        case STRING:
                            return result;
                        case CHAR:
                            return new HiveChar(result, ((CharTypeInfo) primitiveTypeInfo).getLength());
                        case VARCHAR:
                            return new HiveVarchar(result, ((VarcharTypeInfo) primitiveTypeInfo).getLength());
                        default:
                            throw new Error("Unknown primitive category " + primitiveCategory);
                    }
                }
            case BINARY:
                return getRandBinary(r, 1 + r.nextInt(100));
            case TIMESTAMP:
                return RandomTypeUtil.getRandTimestamp(r);
            case INTERVAL_YEAR_MONTH:
                return getRandIntervalYearMonth(r);
            case INTERVAL_DAY_TIME:
                return getRandIntervalDayTime(r);
            case DECIMAL:
                return getRandHiveDecimal(r, (DecimalTypeInfo) primitiveTypeInfo);
            default:
                throw new Error("Unknown primitive category " + primitiveCategory);
        }
    } catch (Exception e) {
        throw new RuntimeException("randomObject failed on column " + column + " type " + primitiveCategory, e);
    }
}
Also used : VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Example 18 with Category

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category in project hive by apache.

the class LazyBinaryDeserializeRead method readNextField.

/*
   * Reads the the next field.
   *
   * Afterwards, reading is positioned to the next field.
   *
   * @return  Return true when the field was not null and data is put in the appropriate
   *          current* member.
   *          Otherwise, false when the field is null.
   *
   */
@Override
public boolean readNextField() throws IOException {
    if (fieldIndex >= fieldCount) {
        return false;
    }
    fieldStart = offset;
    if (fieldIndex == 0) {
        // the NULL byte.
        if (offset >= end) {
            throw new EOFException();
        }
        nullByte = bytes[offset++];
    }
    // NOTE: The bit is set to 1 if a field is NOT NULL.    boolean isNull;
    if ((nullByte & (1 << (fieldIndex % 8))) == 0) {
        // Logically move past this field.
        fieldIndex++;
        // Every 8 fields we read a new NULL byte.
        if (fieldIndex < fieldCount) {
            if ((fieldIndex % 8) == 0) {
                // Get next null byte.
                if (offset >= end) {
                    throw new EOFException();
                }
                nullByte = bytes[offset++];
            }
        }
        return false;
    } else {
        // Make sure there is at least one byte that can be read for a value.
        if (offset >= end) {
            throw new EOFException();
        }
        /*
       * We have a field and are positioned to it.  Read it.
       */
        switch(primitiveCategories[fieldIndex]) {
            case BOOLEAN:
                // No check needed for single byte read.
                currentBoolean = (bytes[offset++] != 0);
                break;
            case BYTE:
                // No check needed for single byte read.
                currentByte = bytes[offset++];
                break;
            case SHORT:
                // Last item -- ok to be at end.
                if (offset + 2 > end) {
                    throw new EOFException();
                }
                currentShort = LazyBinaryUtils.byteArrayToShort(bytes, offset);
                offset += 2;
                break;
            case INT:
                // Parse the first byte of a vint/vlong to determine the number of bytes.
                if (offset + WritableUtils.decodeVIntSize(bytes[offset]) > end) {
                    throw new EOFException();
                }
                LazyBinaryUtils.readVInt(bytes, offset, tempVInt);
                offset += tempVInt.length;
                currentInt = tempVInt.value;
                break;
            case LONG:
                // Parse the first byte of a vint/vlong to determine the number of bytes.
                if (offset + WritableUtils.decodeVIntSize(bytes[offset]) > end) {
                    throw new EOFException();
                }
                LazyBinaryUtils.readVLong(bytes, offset, tempVLong);
                offset += tempVLong.length;
                currentLong = tempVLong.value;
                break;
            case FLOAT:
                // Last item -- ok to be at end.
                if (offset + 4 > end) {
                    throw new EOFException();
                }
                currentFloat = Float.intBitsToFloat(LazyBinaryUtils.byteArrayToInt(bytes, offset));
                offset += 4;
                break;
            case DOUBLE:
                // Last item -- ok to be at end.
                if (offset + 8 > end) {
                    throw new EOFException();
                }
                currentDouble = Double.longBitsToDouble(LazyBinaryUtils.byteArrayToLong(bytes, offset));
                offset += 8;
                break;
            case BINARY:
            case STRING:
            case CHAR:
            case VARCHAR:
                {
                    // Parse the first byte of a vint/vlong to determine the number of bytes.
                    if (offset + WritableUtils.decodeVIntSize(bytes[offset]) > end) {
                        throw new EOFException();
                    }
                    LazyBinaryUtils.readVInt(bytes, offset, tempVInt);
                    offset += tempVInt.length;
                    int saveStart = offset;
                    int length = tempVInt.value;
                    offset += length;
                    // Last item -- ok to be at end.
                    if (offset > end) {
                        throw new EOFException();
                    }
                    currentBytes = bytes;
                    currentBytesStart = saveStart;
                    currentBytesLength = length;
                }
                break;
            case DATE:
                // Parse the first byte of a vint/vlong to determine the number of bytes.
                if (offset + WritableUtils.decodeVIntSize(bytes[offset]) > end) {
                    throw new EOFException();
                }
                LazyBinaryUtils.readVInt(bytes, offset, tempVInt);
                offset += tempVInt.length;
                currentDateWritable.set(tempVInt.value);
                break;
            case TIMESTAMP:
                {
                    int length = TimestampWritable.getTotalLength(bytes, offset);
                    int saveStart = offset;
                    offset += length;
                    // Last item -- ok to be at end.
                    if (offset > end) {
                        throw new EOFException();
                    }
                    currentTimestampWritable.set(bytes, saveStart);
                }
                break;
            case INTERVAL_YEAR_MONTH:
                // Parse the first byte of a vint/vlong to determine the number of bytes.
                if (offset + WritableUtils.decodeVIntSize(bytes[offset]) > end) {
                    throw new EOFException();
                }
                LazyBinaryUtils.readVInt(bytes, offset, tempVInt);
                offset += tempVInt.length;
                currentHiveIntervalYearMonthWritable.set(tempVInt.value);
                break;
            case INTERVAL_DAY_TIME:
                // Parse the first byte of a vint/vlong to determine the number of bytes.
                if (offset + WritableUtils.decodeVIntSize(bytes[offset]) >= end) {
                    throw new EOFException();
                }
                LazyBinaryUtils.readVLong(bytes, offset, tempVLong);
                offset += tempVLong.length;
                // Parse the first byte of a vint/vlong to determine the number of bytes.
                if (offset + WritableUtils.decodeVIntSize(bytes[offset]) > end) {
                    throw new EOFException();
                }
                LazyBinaryUtils.readVInt(bytes, offset, tempVInt);
                offset += tempVInt.length;
                currentHiveIntervalDayTimeWritable.set(tempVLong.value, tempVInt.value);
                break;
            case DECIMAL:
                {
                    // Parse the first byte of a vint/vlong to determine the number of bytes.
                    if (offset + WritableUtils.decodeVIntSize(bytes[offset]) >= end) {
                        throw new EOFException();
                    }
                    LazyBinaryUtils.readVInt(bytes, offset, tempVInt);
                    offset += tempVInt.length;
                    int readScale = tempVInt.value;
                    // Parse the first byte of a vint/vlong to determine the number of bytes.
                    if (offset + WritableUtils.decodeVIntSize(bytes[offset]) > end) {
                        throw new EOFException();
                    }
                    LazyBinaryUtils.readVInt(bytes, offset, tempVInt);
                    offset += tempVInt.length;
                    int saveStart = offset;
                    offset += tempVInt.value;
                    // Last item -- ok to be at end.
                    if (offset > end) {
                        throw new EOFException();
                    }
                    int length = offset - saveStart;
                    //   scale = 2, length = 6, value = -6065716379.11
                    //   \002\006\255\114\197\131\083\105
                    //           \255\114\197\131\083\105
                    currentHiveDecimalWritable.setFromBigIntegerBytesAndScale(bytes, saveStart, length, readScale);
                    boolean decimalIsNull = !currentHiveDecimalWritable.isSet();
                    if (!decimalIsNull) {
                        DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfos[fieldIndex];
                        int precision = decimalTypeInfo.getPrecision();
                        int scale = decimalTypeInfo.getScale();
                        decimalIsNull = !currentHiveDecimalWritable.mutateEnforcePrecisionScale(precision, scale);
                    }
                    if (decimalIsNull) {
                        // Logically move past this field.
                        fieldIndex++;
                        // Every 8 fields we read a new NULL byte.
                        if (fieldIndex < fieldCount) {
                            if ((fieldIndex % 8) == 0) {
                                // Get next null byte.
                                if (offset >= end) {
                                    throw new EOFException();
                                }
                                nullByte = bytes[offset++];
                            }
                        }
                        return false;
                    }
                }
                break;
            default:
                throw new Error("Unexpected primitive category " + primitiveCategories[fieldIndex].name());
        }
    }
    // Logically move past this field.
    fieldIndex++;
    // Every 8 fields we read a new NULL byte.
    if (fieldIndex < fieldCount) {
        if ((fieldIndex % 8) == 0) {
            // Get next null byte.
            if (offset >= end) {
                throw new EOFException();
            }
            nullByte = bytes[offset++];
        }
    }
    return true;
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) EOFException(java.io.EOFException)

Example 19 with Category

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category in project hive by apache.

the class AccumuloRowSerializer method writeWithLevel.

/**
 * Recursively serialize an Object using its {@link ObjectInspector}, respecting the
 * separators defined by the {@link LazySerDeParameters}.
 * @param oi ObjectInspector for the current object
 * @param value The current object
 * @param output A buffer output is written to
 * @param mapping The mapping for this Hive column
 * @param level The current level/offset for the SerDe separator
 * @throws IOException
 */
protected void writeWithLevel(ObjectInspector oi, Object value, ByteStream.Output output, ColumnMapping mapping, int level) throws IOException {
    switch(oi.getCategory()) {
        case PRIMITIVE:
            if (mapping.getEncoding() == ColumnEncoding.BINARY) {
                this.writeBinary(output, value, (PrimitiveObjectInspector) oi);
            } else {
                this.writeString(output, value, (PrimitiveObjectInspector) oi);
            }
            return;
        case LIST:
            char separator = (char) serDeParams.getSeparators()[level];
            ListObjectInspector loi = (ListObjectInspector) oi;
            List<?> list = loi.getList(value);
            ObjectInspector eoi = loi.getListElementObjectInspector();
            if (list == null) {
                log.debug("No objects found when serializing list");
                return;
            } else {
                for (int i = 0; i < list.size(); i++) {
                    if (i > 0) {
                        output.write(separator);
                    }
                    writeWithLevel(eoi, list.get(i), output, mapping, level + 1);
                }
            }
            return;
        case MAP:
            char sep = (char) serDeParams.getSeparators()[level];
            char keyValueSeparator = (char) serDeParams.getSeparators()[level + 1];
            MapObjectInspector moi = (MapObjectInspector) oi;
            ObjectInspector koi = moi.getMapKeyObjectInspector();
            ObjectInspector voi = moi.getMapValueObjectInspector();
            Map<?, ?> map = moi.getMap(value);
            if (map == null) {
                log.debug("No object found when serializing map");
                return;
            } else {
                boolean first = true;
                for (Map.Entry<?, ?> entry : map.entrySet()) {
                    if (first) {
                        first = false;
                    } else {
                        output.write(sep);
                    }
                    writeWithLevel(koi, entry.getKey(), output, mapping, level + 2);
                    output.write(keyValueSeparator);
                    writeWithLevel(voi, entry.getValue(), output, mapping, level + 2);
                }
            }
            return;
        case STRUCT:
            sep = (char) serDeParams.getSeparators()[level];
            StructObjectInspector soi = (StructObjectInspector) oi;
            List<? extends StructField> fields = soi.getAllStructFieldRefs();
            list = soi.getStructFieldsDataAsList(value);
            if (list == null) {
                log.debug("No object found when serializing struct");
                return;
            } else {
                for (int i = 0; i < list.size(); i++) {
                    if (i > 0) {
                        output.write(sep);
                    }
                    writeWithLevel(fields.get(i).getFieldObjectInspector(), list.get(i), output, mapping, level + 1);
                }
            }
            return;
        default:
            throw new RuntimeException("Unknown category type: " + oi.getCategory());
    }
}
Also used : ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) Map(java.util.Map) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 20 with Category

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category in project hive by apache.

the class HBaseRowSerializer method serialize.

private boolean serialize(Object obj, ObjectInspector objInspector, int level, ByteStream.Output ss) throws IOException {
    switch(objInspector.getCategory()) {
        case PRIMITIVE:
            LazyUtils.writePrimitiveUTF8(ss, obj, (PrimitiveObjectInspector) objInspector, escaped, escapeChar, needsEscape);
            return true;
        case LIST:
            char separator = (char) separators[level];
            ListObjectInspector loi = (ListObjectInspector) objInspector;
            List<?> list = loi.getList(obj);
            ObjectInspector eoi = loi.getListElementObjectInspector();
            if (list == null) {
                return false;
            } else {
                for (int i = 0; i < list.size(); i++) {
                    if (i > 0) {
                        ss.write(separator);
                    }
                    Object currentItem = list.get(i);
                    if (currentItem != null) {
                        serialize(currentItem, eoi, level + 1, ss);
                    }
                }
            }
            return true;
        case MAP:
            char sep = (char) separators[level];
            char keyValueSeparator = (char) separators[level + 1];
            MapObjectInspector moi = (MapObjectInspector) objInspector;
            ObjectInspector koi = moi.getMapKeyObjectInspector();
            ObjectInspector voi = moi.getMapValueObjectInspector();
            Map<?, ?> map = moi.getMap(obj);
            if (map == null) {
                return false;
            } else {
                boolean first = true;
                for (Map.Entry<?, ?> entry : map.entrySet()) {
                    if (first) {
                        first = false;
                    } else {
                        ss.write(sep);
                    }
                    serialize(entry.getKey(), koi, level + 2, ss);
                    Object currentValue = entry.getValue();
                    if (currentValue != null) {
                        ss.write(keyValueSeparator);
                        serialize(currentValue, voi, level + 2, ss);
                    }
                }
            }
            return true;
        case STRUCT:
            sep = (char) separators[level];
            StructObjectInspector soi = (StructObjectInspector) objInspector;
            List<? extends StructField> fields = soi.getAllStructFieldRefs();
            list = soi.getStructFieldsDataAsList(obj);
            if (list == null) {
                return false;
            } else {
                for (int i = 0; i < list.size(); i++) {
                    if (i > 0) {
                        ss.write(sep);
                    }
                    Object currentItem = list.get(i);
                    if (currentItem != null) {
                        serialize(currentItem, fields.get(i).getFieldObjectInspector(), level + 1, ss);
                    }
                }
            }
            return true;
        case UNION:
            // union type currently not totally supported. See HIVE-2390
            return false;
        default:
            throw new RuntimeException("Unknown category type: " + objInspector.getCategory());
    }
}
Also used : ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) Map(java.util.Map) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Aggregations

PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)44 Category (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category)33 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)31 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)27 StructTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo)26 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)26 ArrayList (java.util.ArrayList)22 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)20 BytesWritable (org.apache.hadoop.io.BytesWritable)19 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)18 ListTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo)18 MapTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo)18 Text (org.apache.hadoop.io.Text)18 DateWritable (org.apache.hadoop.hive.serde2.io.DateWritable)17 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)17 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)16 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)16 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)16 UnionTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo)16 IntWritable (org.apache.hadoop.io.IntWritable)16