Search in sources :

Example 16 with MapObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector in project hive by apache.

the class HBaseRowSerializer method serialize.

private boolean serialize(Object obj, ObjectInspector objInspector, int level, ByteStream.Output ss) throws IOException {
    switch(objInspector.getCategory()) {
        case PRIMITIVE:
            LazyUtils.writePrimitiveUTF8(ss, obj, (PrimitiveObjectInspector) objInspector, escaped, escapeChar, needsEscape);
            return true;
        case LIST:
            char separator = (char) separators[level];
            ListObjectInspector loi = (ListObjectInspector) objInspector;
            List<?> list = loi.getList(obj);
            ObjectInspector eoi = loi.getListElementObjectInspector();
            if (list == null) {
                return false;
            } else {
                for (int i = 0; i < list.size(); i++) {
                    if (i > 0) {
                        ss.write(separator);
                    }
                    Object currentItem = list.get(i);
                    if (currentItem != null) {
                        serialize(currentItem, eoi, level + 1, ss);
                    }
                }
            }
            return true;
        case MAP:
            char sep = (char) separators[level];
            char keyValueSeparator = (char) separators[level + 1];
            MapObjectInspector moi = (MapObjectInspector) objInspector;
            ObjectInspector koi = moi.getMapKeyObjectInspector();
            ObjectInspector voi = moi.getMapValueObjectInspector();
            Map<?, ?> map = moi.getMap(obj);
            if (map == null) {
                return false;
            } else {
                boolean first = true;
                for (Map.Entry<?, ?> entry : map.entrySet()) {
                    if (first) {
                        first = false;
                    } else {
                        ss.write(sep);
                    }
                    serialize(entry.getKey(), koi, level + 2, ss);
                    Object currentValue = entry.getValue();
                    if (currentValue != null) {
                        ss.write(keyValueSeparator);
                        serialize(currentValue, voi, level + 2, ss);
                    }
                }
            }
            return true;
        case STRUCT:
            sep = (char) separators[level];
            StructObjectInspector soi = (StructObjectInspector) objInspector;
            List<? extends StructField> fields = soi.getAllStructFieldRefs();
            list = soi.getStructFieldsDataAsList(obj);
            if (list == null) {
                return false;
            } else {
                for (int i = 0; i < list.size(); i++) {
                    if (i > 0) {
                        ss.write(sep);
                    }
                    Object currentItem = list.get(i);
                    if (currentItem != null) {
                        serialize(currentItem, fields.get(i).getFieldObjectInspector(), level + 1, ss);
                    }
                }
            }
            return true;
        case UNION:
            // union type currently not totally supported. See HIVE-2390
            return false;
        default:
            throw new RuntimeException("Unknown category type: " + objInspector.getCategory());
    }
}
Also used : ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) Map(java.util.Map) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 17 with MapObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector in project hive by apache.

the class HBaseRowSerializer method serializeField.

private void serializeField(Object value, StructField field, ColumnMapping colMap, Put put) throws IOException {
    if (value == null) {
        // a null object, we do not serialize it
        return;
    }
    // Get the field objectInspector and the field object.
    ObjectInspector foi = field.getFieldObjectInspector();
    // If the field corresponds to a column family in HBase
    if (colMap.qualifierName == null) {
        MapObjectInspector moi = (MapObjectInspector) foi;
        Map<?, ?> map = moi.getMap(value);
        if (map == null) {
            return;
        }
        ObjectInspector koi = moi.getMapKeyObjectInspector();
        ObjectInspector voi = moi.getMapValueObjectInspector();
        for (Map.Entry<?, ?> entry : map.entrySet()) {
            // Get the Key
            // Map keys are required to be primitive and may be serialized in binary format
            byte[] columnQualifierBytes = serialize(entry.getKey(), koi, 3, colMap.binaryStorage.get(0));
            if (columnQualifierBytes == null) {
                continue;
            }
            // Map values may be serialized in binary format when they are primitive and binary
            // serialization is the option selected
            byte[] bytes = serialize(entry.getValue(), voi, 3, colMap.binaryStorage.get(1));
            if (bytes == null) {
                continue;
            }
            put.addColumn(colMap.familyNameBytes, columnQualifierBytes, bytes);
        }
    } else {
        byte[] bytes;
        // delimited way.
        if (!foi.getCategory().equals(ObjectInspector.Category.PRIMITIVE) && colMap.isCategory(ObjectInspector.Category.PRIMITIVE)) {
            // we always serialize the String type using the escaped algorithm for LazyString
            bytes = serialize(SerDeUtils.getJSONString(value, foi), PrimitiveObjectInspectorFactory.javaStringObjectInspector, 1, false);
        } else {
            // use the serialization option switch to write primitive values as either a variable
            // length UTF8 string or a fixed width bytes if serializing in binary format
            bytes = serialize(value, foi, 1, colMap.binaryStorage.get(0));
        }
        if (bytes == null) {
            return;
        }
        put.addColumn(colMap.familyNameBytes, colMap.qualifierNameBytes, bytes);
    }
}
Also used : ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) Map(java.util.Map)

Example 18 with MapObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector in project hive by apache.

the class VectorAssignRow method assignConvertRowColumn.

private void assignConvertRowColumn(ColumnVector columnVector, int batchIndex, TypeInfo targetTypeInfo, ObjectInspector sourceObjectInspector, Writable convertTargetWritable, Object object) {
    final Category targetCategory = targetTypeInfo.getCategory();
    if (targetCategory == null) {
        /*
       * This is a column that we don't want (i.e. not included) -- we are done.
       */
        return;
    }
    if (object == null) {
        VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
        return;
    }
    try {
        switch(targetCategory) {
            case PRIMITIVE:
                final PrimitiveObjectInspector sourcePrimitiveOI = (PrimitiveObjectInspector) sourceObjectInspector;
                final PrimitiveCategory targetPrimitiveCategory = ((PrimitiveTypeInfo) targetTypeInfo).getPrimitiveCategory();
                switch(targetPrimitiveCategory) {
                    case VOID:
                        VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                        return;
                    case BOOLEAN:
                        ((LongColumnVector) columnVector).vector[batchIndex] = (PrimitiveObjectInspectorUtils.getBoolean(object, sourcePrimitiveOI) ? 1 : 0);
                        break;
                    case BYTE:
                        ((LongColumnVector) columnVector).vector[batchIndex] = PrimitiveObjectInspectorUtils.getByte(object, sourcePrimitiveOI);
                        break;
                    case SHORT:
                        ((LongColumnVector) columnVector).vector[batchIndex] = PrimitiveObjectInspectorUtils.getShort(object, sourcePrimitiveOI);
                        break;
                    case INT:
                        ((LongColumnVector) columnVector).vector[batchIndex] = PrimitiveObjectInspectorUtils.getInt(object, sourcePrimitiveOI);
                        break;
                    case LONG:
                        ((LongColumnVector) columnVector).vector[batchIndex] = PrimitiveObjectInspectorUtils.getLong(object, sourcePrimitiveOI);
                        break;
                    case TIMESTAMP:
                        {
                            final Timestamp timestamp = PrimitiveObjectInspectorUtils.getTimestamp(object, sourcePrimitiveOI);
                            if (timestamp == null) {
                                VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                                return;
                            }
                            ((TimestampColumnVector) columnVector).set(batchIndex, timestamp.toSqlTimestamp());
                        }
                        break;
                    case DATE:
                        {
                            final Date date = PrimitiveObjectInspectorUtils.getDate(object, sourcePrimitiveOI);
                            if (date == null) {
                                VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                                return;
                            }
                            DateWritableV2 dateWritable = (DateWritableV2) convertTargetWritable;
                            if (dateWritable == null) {
                                dateWritable = new DateWritableV2();
                            }
                            dateWritable.set(date);
                            ((LongColumnVector) columnVector).vector[batchIndex] = dateWritable.getDays();
                        }
                        break;
                    case FLOAT:
                        ((DoubleColumnVector) columnVector).vector[batchIndex] = PrimitiveObjectInspectorUtils.getFloat(object, sourcePrimitiveOI);
                        break;
                    case DOUBLE:
                        ((DoubleColumnVector) columnVector).vector[batchIndex] = PrimitiveObjectInspectorUtils.getDouble(object, sourcePrimitiveOI);
                        break;
                    case BINARY:
                        {
                            final BytesWritable bytesWritable = PrimitiveObjectInspectorUtils.getBinary(object, sourcePrimitiveOI);
                            if (bytesWritable == null) {
                                VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                                return;
                            }
                            ((BytesColumnVector) columnVector).setVal(batchIndex, bytesWritable.getBytes(), 0, bytesWritable.getLength());
                        }
                        break;
                    case STRING:
                        {
                            final String string = PrimitiveObjectInspectorUtils.getString(object, sourcePrimitiveOI);
                            if (string == null) {
                                VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                                return;
                            }
                            Text text = (Text) convertTargetWritable;
                            if (text == null) {
                                text = new Text();
                            }
                            text.set(string);
                            ((BytesColumnVector) columnVector).setVal(batchIndex, text.getBytes(), 0, text.getLength());
                        }
                        break;
                    case VARCHAR:
                        {
                            // UNDONE: Performance problem with conversion to String, then bytes...
                            final HiveVarchar hiveVarchar = PrimitiveObjectInspectorUtils.getHiveVarchar(object, sourcePrimitiveOI);
                            if (hiveVarchar == null) {
                                VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                                return;
                            }
                            // TODO: Do we need maxLength checking?
                            byte[] bytes = hiveVarchar.getValue().getBytes();
                            ((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
                        }
                        break;
                    case CHAR:
                        {
                            // UNDONE: Performance problem with conversion to String, then bytes...
                            final HiveChar hiveChar = PrimitiveObjectInspectorUtils.getHiveChar(object, sourcePrimitiveOI);
                            if (hiveChar == null) {
                                VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                                return;
                            }
                            // We store CHAR in vector row batch with padding stripped.
                            // TODO: Do we need maxLength checking?
                            final byte[] bytes = hiveChar.getStrippedValue().getBytes();
                            ((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
                        }
                        break;
                    case DECIMAL:
                        {
                            final HiveDecimal hiveDecimal = PrimitiveObjectInspectorUtils.getHiveDecimal(object, sourcePrimitiveOI);
                            if (hiveDecimal == null) {
                                VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                                return;
                            }
                            if (columnVector instanceof Decimal64ColumnVector) {
                                Decimal64ColumnVector dec64ColVector = (Decimal64ColumnVector) columnVector;
                                dec64ColVector.set(batchIndex, hiveDecimal);
                                if (dec64ColVector.isNull[batchIndex]) {
                                    return;
                                }
                            } else {
                                ((DecimalColumnVector) columnVector).set(batchIndex, hiveDecimal);
                            }
                        }
                        break;
                    case INTERVAL_YEAR_MONTH:
                        {
                            final HiveIntervalYearMonth intervalYearMonth = PrimitiveObjectInspectorUtils.getHiveIntervalYearMonth(object, sourcePrimitiveOI);
                            if (intervalYearMonth == null) {
                                VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                                return;
                            }
                            ((LongColumnVector) columnVector).vector[batchIndex] = intervalYearMonth.getTotalMonths();
                        }
                        break;
                    case INTERVAL_DAY_TIME:
                        {
                            final HiveIntervalDayTime intervalDayTime = PrimitiveObjectInspectorUtils.getHiveIntervalDayTime(object, sourcePrimitiveOI);
                            if (intervalDayTime == null) {
                                VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                                return;
                            }
                            ((IntervalDayTimeColumnVector) columnVector).set(batchIndex, intervalDayTime);
                        }
                        break;
                    default:
                        throw new RuntimeException("Primitive category " + targetPrimitiveCategory.name() + " not supported");
                }
                break;
            case LIST:
                {
                    final ListColumnVector listColumnVector = (ListColumnVector) columnVector;
                    final ListObjectInspector sourceListOI = (ListObjectInspector) sourceObjectInspector;
                    final ObjectInspector sourceElementOI = sourceListOI.getListElementObjectInspector();
                    final int size = sourceListOI.getListLength(object);
                    final TypeInfo targetElementTypeInfo = ((ListTypeInfo) targetTypeInfo).getListElementTypeInfo();
                    listColumnVector.offsets[batchIndex] = listColumnVector.childCount;
                    listColumnVector.childCount += size;
                    listColumnVector.ensureSize(listColumnVector.childCount, true);
                    listColumnVector.lengths[batchIndex] = size;
                    for (int i = 0; i < size; i++) {
                        final Object element = sourceListOI.getListElement(object, i);
                        final int offset = (int) (listColumnVector.offsets[batchIndex] + i);
                        assignConvertRowColumn(listColumnVector.child, offset, targetElementTypeInfo, sourceElementOI, null, element);
                    }
                }
                break;
            case MAP:
                {
                    final MapColumnVector mapColumnVector = (MapColumnVector) columnVector;
                    final MapObjectInspector mapObjectInspector = (MapObjectInspector) sourceObjectInspector;
                    final MapTypeInfo mapTypeInfo = (MapTypeInfo) targetTypeInfo;
                    final Map<?, ?> map = mapObjectInspector.getMap(object);
                    for (Map.Entry<?, ?> entry : map.entrySet()) {
                        assignConvertRowColumn(mapColumnVector.keys, batchIndex, mapTypeInfo.getMapKeyTypeInfo(), mapObjectInspector.getMapKeyObjectInspector(), null, entry.getKey());
                        assignConvertRowColumn(mapColumnVector.values, batchIndex, mapTypeInfo.getMapValueTypeInfo(), mapObjectInspector.getMapValueObjectInspector(), null, entry.getValue());
                    }
                }
                break;
            case STRUCT:
                {
                    final StructColumnVector structColumnVector = (StructColumnVector) columnVector;
                    final StructObjectInspector sourceStructOI = (StructObjectInspector) sourceObjectInspector;
                    final List<? extends StructField> sourceFields = sourceStructOI.getAllStructFieldRefs();
                    final StructTypeInfo targetStructTypeInfo = (StructTypeInfo) targetTypeInfo;
                    final List<TypeInfo> targetTypeInfos = targetStructTypeInfo.getAllStructFieldTypeInfos();
                    final int size = targetTypeInfos.size();
                    for (int i = 0; i < size; i++) {
                        if (i < sourceFields.size()) {
                            final StructField sourceStructField = sourceFields.get(i);
                            final ObjectInspector sourceFieldOI = sourceStructField.getFieldObjectInspector();
                            final Object sourceData = sourceStructOI.getStructFieldData(object, sourceStructField);
                            assignConvertRowColumn(structColumnVector.fields[i], batchIndex, targetTypeInfos.get(i), sourceFieldOI, null, sourceData);
                        } else {
                            final ColumnVector fieldColumnVector = structColumnVector.fields[i];
                            VectorizedBatchUtil.setNullColIsNullValue(fieldColumnVector, batchIndex);
                        }
                    }
                }
                break;
            case UNION:
                {
                    final UnionColumnVector unionColumnVector = (UnionColumnVector) columnVector;
                    final UnionObjectInspector unionObjectInspector = (UnionObjectInspector) sourceObjectInspector;
                    final UnionTypeInfo unionTypeInfo = (UnionTypeInfo) targetTypeInfo;
                    final int tag = unionObjectInspector.getTag(object);
                    assignConvertRowColumn(unionColumnVector.fields[tag], batchIndex, unionTypeInfo.getAllUnionObjectTypeInfos().get(tag), unionObjectInspector.getObjectInspectors().get(tag), null, unionObjectInspector.getField(tag));
                }
                break;
            default:
                throw new RuntimeException("Category " + targetCategory.name() + " not supported");
        }
    } catch (NumberFormatException e) {
        // Some of the conversion methods throw this exception on numeric parsing errors.
        VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
        return;
    }
    // We always set the null flag to false when there is a value.
    columnVector.isNull[batchIndex] = false;
}
Also used : PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) Category(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) List(java.util.List) ArrayList(java.util.ArrayList) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime) UnionObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector) UnionObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) BytesWritable(org.apache.hadoop.io.BytesWritable) Text(org.apache.hadoop.io.Text) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) Date(org.apache.hadoop.hive.common.type.Date) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) Map(java.util.Map) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo)

Example 19 with MapObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector in project hive by apache.

the class LlapRowRecordReader method convertValue.

static Object convertValue(Object val, ObjectInspector oi) {
    if (val == null) {
        return null;
    }
    Object convertedVal = null;
    ObjectInspector.Category oiCategory = oi.getCategory();
    switch(oiCategory) {
        case PRIMITIVE:
            convertedVal = convertPrimitive(val, (PrimitiveObjectInspector) oi);
            break;
        case LIST:
            ListObjectInspector loi = (ListObjectInspector) oi;
            int listSize = loi.getListLength(val);
            // Per ListObjectInpsector.getListLength(), -1 length means null list.
            if (listSize < 0) {
                return null;
            }
            List<Object> convertedList = new ArrayList<Object>(listSize);
            ObjectInspector listElementOI = loi.getListElementObjectInspector();
            for (int idx = 0; idx < listSize; ++idx) {
                convertedList.add(convertValue(loi.getListElement(val, idx), listElementOI));
            }
            convertedVal = convertedList;
            break;
        case MAP:
            MapObjectInspector moi = (MapObjectInspector) oi;
            int mapSize = moi.getMapSize(val);
            // Per MapObjectInpsector.getMapSize(), -1 length means null map.
            if (mapSize < 0) {
                return null;
            }
            Map<Object, Object> convertedMap = new LinkedHashMap<Object, Object>(mapSize);
            ObjectInspector mapKeyOI = moi.getMapKeyObjectInspector();
            ObjectInspector mapValOI = moi.getMapValueObjectInspector();
            Map<?, ?> mapCol = moi.getMap(val);
            for (Object mapKey : mapCol.keySet()) {
                Object convertedMapKey = convertValue(mapKey, mapKeyOI);
                Object convertedMapVal = convertValue(mapCol.get(mapKey), mapValOI);
                convertedMap.put(convertedMapKey, convertedMapVal);
            }
            convertedVal = convertedMap;
            break;
        case STRUCT:
            StructObjectInspector soi = (StructObjectInspector) oi;
            List<Object> convertedRow = new ArrayList<Object>();
            for (StructField structField : soi.getAllStructFieldRefs()) {
                Object convertedFieldValue = convertValue(soi.getStructFieldData(val, structField), structField.getFieldObjectInspector());
                convertedRow.add(convertedFieldValue);
            }
            convertedVal = convertedRow;
            break;
        default:
            throw new IllegalArgumentException("Cannot convert type " + oiCategory);
    }
    return convertedVal;
}
Also used : ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) Category(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 20 with MapObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector in project hive by apache.

the class InternalUtil method getObjectInspector.

private static ObjectInspector getObjectInspector(TypeInfo type) throws IOException {
    switch(type.getCategory()) {
        case PRIMITIVE:
            PrimitiveTypeInfo primitiveType = (PrimitiveTypeInfo) type;
            return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(primitiveType);
        case MAP:
            MapTypeInfo mapType = (MapTypeInfo) type;
            MapObjectInspector mapInspector = ObjectInspectorFactory.getStandardMapObjectInspector(getObjectInspector(mapType.getMapKeyTypeInfo()), getObjectInspector(mapType.getMapValueTypeInfo()));
            return mapInspector;
        case LIST:
            ListTypeInfo listType = (ListTypeInfo) type;
            ListObjectInspector listInspector = ObjectInspectorFactory.getStandardListObjectInspector(getObjectInspector(listType.getListElementTypeInfo()));
            return listInspector;
        case STRUCT:
            StructTypeInfo structType = (StructTypeInfo) type;
            List<TypeInfo> fieldTypes = structType.getAllStructFieldTypeInfos();
            List<ObjectInspector> fieldInspectors = new ArrayList<ObjectInspector>();
            for (TypeInfo fieldType : fieldTypes) {
                fieldInspectors.add(getObjectInspector(fieldType));
            }
            StructObjectInspector structInspector = ObjectInspectorFactory.getStandardStructObjectInspector(structType.getAllStructFieldNames(), fieldInspectors);
            return structInspector;
        default:
            throw new IOException("Unknown field schema type");
    }
}
Also used : ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ArrayList(java.util.ArrayList) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) IOException(java.io.IOException) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Aggregations

MapObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector)55 ListObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector)47 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)46 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)45 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)34 Map (java.util.Map)28 ArrayList (java.util.ArrayList)24 LongObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector)23 BinaryObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector)21 ByteObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector)21 DoubleObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector)21 FloatObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector)21 IntObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector)21 ShortObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector)21 TimestampObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector)21 BooleanObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector)20 StringObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector)20 DateObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector)19 HiveDecimalObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector)19 StructField (org.apache.hadoop.hive.serde2.objectinspector.StructField)17