Search in sources :

Example 51 with MapObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector in project metacat by Netflix.

the class HiveTypeConverter method getCanonicalType.

/**
 * Returns the canonical type.
 *
 * @param fieldInspector inspector
 * @return type
 */
Type getCanonicalType(final ObjectInspector fieldInspector) {
    switch(fieldInspector.getCategory()) {
        case PRIMITIVE:
            return getPrimitiveType(fieldInspector);
        case MAP:
            final MapObjectInspector mapObjectInspector = TypeUtils.checkType(fieldInspector, MapObjectInspector.class, "fieldInspector");
            final Type keyType = getCanonicalType(mapObjectInspector.getMapKeyObjectInspector());
            final Type valueType = getCanonicalType(mapObjectInspector.getMapValueObjectInspector());
            if (keyType == null || valueType == null) {
                return null;
            }
            return TypeRegistry.getTypeRegistry().getParameterizedType(TypeEnum.MAP, ImmutableList.of(keyType.getTypeSignature(), valueType.getTypeSignature()), ImmutableList.of());
        case LIST:
            final ListObjectInspector listObjectInspector = TypeUtils.checkType(fieldInspector, ListObjectInspector.class, "fieldInspector");
            final Type elementType = getCanonicalType(listObjectInspector.getListElementObjectInspector());
            if (elementType == null) {
                return null;
            }
            return TypeRegistry.getTypeRegistry().getParameterizedType(TypeEnum.ARRAY, ImmutableList.of(elementType.getTypeSignature()), ImmutableList.of());
        case STRUCT:
            final StructObjectInspector structObjectInspector = TypeUtils.checkType(fieldInspector, StructObjectInspector.class, "fieldInspector");
            final List<TypeSignature> fieldTypes = new ArrayList<>();
            final List<Object> fieldNames = new ArrayList<>();
            for (StructField field : structObjectInspector.getAllStructFieldRefs()) {
                fieldNames.add(field.getFieldName());
                final Type fieldType = getCanonicalType(field.getFieldObjectInspector());
                if (fieldType == null) {
                    return null;
                }
                fieldTypes.add(fieldType.getTypeSignature());
            }
            return TypeRegistry.getTypeRegistry().getParameterizedType(TypeEnum.ROW, fieldTypes, fieldNames);
        default:
            log.info("Currently unsupported type {}, returning Unknown type", fieldInspector.getTypeName());
            return BaseType.UNKNOWN;
    }
}
Also used : DecimalType(com.netflix.metacat.common.type.DecimalType) Type(com.netflix.metacat.common.type.Type) CharType(com.netflix.metacat.common.type.CharType) VarcharType(com.netflix.metacat.common.type.VarcharType) ParametricType(com.netflix.metacat.common.type.ParametricType) BaseType(com.netflix.metacat.common.type.BaseType) RowType(com.netflix.metacat.common.type.RowType) MapType(com.netflix.metacat.common.type.MapType) TypeSignature(com.netflix.metacat.common.type.TypeSignature) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) ArrayList(java.util.ArrayList) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 52 with MapObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector in project presto-hive-apache by prestodb.

the class JsonSerDe method buildJSONString.

// TODO : code section copied over from SerDeUtils because of non-standard json production there
// should use quotes for all field names. We should fix this there, and then remove this copy.
// See http://jackson.codehaus.org/1.7.3/javadoc/org/codehaus/jackson/JsonParser.Feature.html#ALLOW_UNQUOTED_FIELD_NAMES
// for details - trying to enable Jackson to ignore that doesn't seem to work(compilation failure
// when attempting to use that feature, so having to change the production itself.
// Also, throws IOException when Binary is detected.
private static void buildJSONString(StringBuilder sb, Object o, ObjectInspector oi) throws IOException {
    switch(oi.getCategory()) {
        case PRIMITIVE:
            {
                PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
                if (o == null) {
                    sb.append("null");
                } else {
                    switch(poi.getPrimitiveCategory()) {
                        case BOOLEAN:
                            {
                                boolean b = ((BooleanObjectInspector) poi).get(o);
                                sb.append(b ? "true" : "false");
                                break;
                            }
                        case BYTE:
                            {
                                sb.append(((ByteObjectInspector) poi).get(o));
                                break;
                            }
                        case SHORT:
                            {
                                sb.append(((ShortObjectInspector) poi).get(o));
                                break;
                            }
                        case INT:
                            {
                                sb.append(((IntObjectInspector) poi).get(o));
                                break;
                            }
                        case LONG:
                            {
                                sb.append(((LongObjectInspector) poi).get(o));
                                break;
                            }
                        case FLOAT:
                            {
                                sb.append(((FloatObjectInspector) poi).get(o));
                                break;
                            }
                        case DOUBLE:
                            {
                                sb.append(((DoubleObjectInspector) poi).get(o));
                                break;
                            }
                        case STRING:
                            {
                                String s = SerDeUtils.escapeString(((StringObjectInspector) poi).getPrimitiveJavaObject(o));
                                appendWithQuotes(sb, s);
                                break;
                            }
                        case BINARY:
                            {
                                throw new IOException("JsonSerDe does not support BINARY type");
                            }
                        case DATE:
                            Date d = ((DateObjectInspector) poi).getPrimitiveJavaObject(o);
                            appendWithQuotes(sb, d.toString());
                            break;
                        case TIMESTAMP:
                            {
                                Timestamp t = ((TimestampObjectInspector) poi).getPrimitiveJavaObject(o);
                                appendWithQuotes(sb, t.toString());
                                break;
                            }
                        case DECIMAL:
                            sb.append(((HiveDecimalObjectInspector) poi).getPrimitiveJavaObject(o));
                            break;
                        case VARCHAR:
                            {
                                String s = SerDeUtils.escapeString(((HiveVarcharObjectInspector) poi).getPrimitiveJavaObject(o).toString());
                                appendWithQuotes(sb, s);
                                break;
                            }
                        case CHAR:
                            {
                                // this should use HiveChar.getPaddedValue() but it's protected; currently (v0.13)
                                // HiveChar.toString() returns getPaddedValue()
                                String s = SerDeUtils.escapeString(((HiveCharObjectInspector) poi).getPrimitiveJavaObject(o).toString());
                                appendWithQuotes(sb, s);
                                break;
                            }
                        default:
                            throw new RuntimeException("Unknown primitive type: " + poi.getPrimitiveCategory());
                    }
                }
                break;
            }
        case LIST:
            {
                ListObjectInspector loi = (ListObjectInspector) oi;
                ObjectInspector listElementObjectInspector = loi.getListElementObjectInspector();
                List<?> olist = loi.getList(o);
                if (olist == null) {
                    sb.append("null");
                } else {
                    sb.append(SerDeUtils.LBRACKET);
                    for (int i = 0; i < olist.size(); i++) {
                        if (i > 0) {
                            sb.append(SerDeUtils.COMMA);
                        }
                        buildJSONString(sb, olist.get(i), listElementObjectInspector);
                    }
                    sb.append(SerDeUtils.RBRACKET);
                }
                break;
            }
        case MAP:
            {
                MapObjectInspector moi = (MapObjectInspector) oi;
                ObjectInspector mapKeyObjectInspector = moi.getMapKeyObjectInspector();
                ObjectInspector mapValueObjectInspector = moi.getMapValueObjectInspector();
                Map<?, ?> omap = moi.getMap(o);
                if (omap == null) {
                    sb.append("null");
                } else {
                    sb.append(SerDeUtils.LBRACE);
                    boolean first = true;
                    for (Object entry : omap.entrySet()) {
                        if (first) {
                            first = false;
                        } else {
                            sb.append(SerDeUtils.COMMA);
                        }
                        Map.Entry<?, ?> e = (Map.Entry<?, ?>) entry;
                        StringBuilder keyBuilder = new StringBuilder();
                        buildJSONString(keyBuilder, e.getKey(), mapKeyObjectInspector);
                        String keyString = keyBuilder.toString().trim();
                        if ((!keyString.isEmpty()) && (keyString.charAt(0) != SerDeUtils.QUOTE)) {
                            appendWithQuotes(sb, keyString);
                        } else {
                            sb.append(keyString);
                        }
                        sb.append(SerDeUtils.COLON);
                        buildJSONString(sb, e.getValue(), mapValueObjectInspector);
                    }
                    sb.append(SerDeUtils.RBRACE);
                }
                break;
            }
        case STRUCT:
            {
                StructObjectInspector soi = (StructObjectInspector) oi;
                List<? extends StructField> structFields = soi.getAllStructFieldRefs();
                if (o == null) {
                    sb.append("null");
                } else {
                    sb.append(SerDeUtils.LBRACE);
                    for (int i = 0; i < structFields.size(); i++) {
                        if (i > 0) {
                            sb.append(SerDeUtils.COMMA);
                        }
                        appendWithQuotes(sb, structFields.get(i).getFieldName());
                        sb.append(SerDeUtils.COLON);
                        buildJSONString(sb, soi.getStructFieldData(o, structFields.get(i)), structFields.get(i).getFieldObjectInspector());
                    }
                    sb.append(SerDeUtils.RBRACE);
                }
                break;
            }
        case UNION:
            {
                UnionObjectInspector uoi = (UnionObjectInspector) oi;
                if (o == null) {
                    sb.append("null");
                } else {
                    sb.append(SerDeUtils.LBRACE);
                    sb.append(uoi.getTag(o));
                    sb.append(SerDeUtils.COLON);
                    buildJSONString(sb, uoi.getField(o), uoi.getObjectInspectors().get(uoi.getTag(o)));
                    sb.append(SerDeUtils.RBRACE);
                }
                break;
            }
        default:
            throw new RuntimeException("Unknown type in ObjectInspector!");
    }
}
Also used : UnionObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) BooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector) ShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) FloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) HiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector) HiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) ByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector) DoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) IOException(java.io.IOException) Timestamp(java.sql.Timestamp) Date(java.sql.Date) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) HiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) List(java.util.List) ArrayList(java.util.ArrayList) BooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector) HiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) UnionObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 53 with MapObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector in project presto by prestodb.

the class TestDataWritableWriter method writeValue.

/**
 * It writes the field value to the Parquet RecordConsumer. It detects the field type, and calls
 * the correct write function.
 *
 * @param value The writable object that contains the value.
 * @param inspector The object inspector used to get the correct value type.
 * @param type Type that contains information about the type schema.
 */
private void writeValue(final Object value, final ObjectInspector inspector, final Type type) {
    if (type.isPrimitive()) {
        checkInspectorCategory(inspector, ObjectInspector.Category.PRIMITIVE);
        writePrimitive(value, (PrimitiveObjectInspector) inspector);
    } else {
        GroupType groupType = type.asGroupType();
        OriginalType originalType = type.getOriginalType();
        if (originalType != null && originalType.equals(OriginalType.LIST)) {
            checkInspectorCategory(inspector, ObjectInspector.Category.LIST);
            if (singleLevelArray) {
                writeSingleLevelArray(value, (ListObjectInspector) inspector, groupType);
            } else {
                writeArray(value, (ListObjectInspector) inspector, groupType);
            }
        } else if (originalType != null && (originalType.equals(OriginalType.MAP) || originalType.equals(OriginalType.MAP_KEY_VALUE))) {
            checkInspectorCategory(inspector, ObjectInspector.Category.MAP);
            writeMap(value, (MapObjectInspector) inspector, groupType);
        } else {
            checkInspectorCategory(inspector, ObjectInspector.Category.STRUCT);
            writeGroup(value, (StructObjectInspector) inspector, groupType);
        }
    }
}
Also used : OriginalType(org.apache.parquet.schema.OriginalType) GroupType(org.apache.parquet.schema.GroupType) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 54 with MapObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector in project presto by prestodb.

the class TestDataWritableWriter method writeMap.

/**
 * It writes a map type and its key-pair values to the Parquet RecordConsumer.
 * This is called when the original type (MAP) is detected by writeValue().
 * This function assumes the following schema:
 * optional group mapCol (MAP) {
 * repeated group map (MAP_KEY_VALUE) {
 * required TYPE key;
 * optional TYPE value;
 * }
 * }
 *
 * @param value The object that contains the map key-values.
 * @param inspector The object inspector used to get the correct value type.
 * @param type Type that contains information about the group (MAP) schema.
 */
private void writeMap(final Object value, final MapObjectInspector inspector, final GroupType type) {
    // Get the internal map structure (MAP_KEY_VALUE)
    GroupType repeatedType = type.getType(0).asGroupType();
    recordConsumer.startGroup();
    Map<?, ?> mapValues = inspector.getMap(value);
    if (mapValues != null && mapValues.size() > 0) {
        recordConsumer.startField(repeatedType.getName(), 0);
        Type keyType = repeatedType.getType(0);
        String keyName = keyType.getName();
        ObjectInspector keyInspector = inspector.getMapKeyObjectInspector();
        Type valuetype = repeatedType.getType(1);
        String valueName = valuetype.getName();
        ObjectInspector valueInspector = inspector.getMapValueObjectInspector();
        for (Map.Entry<?, ?> keyValue : mapValues.entrySet()) {
            recordConsumer.startGroup();
            if (keyValue != null) {
                // write key element
                Object keyElement = keyValue.getKey();
                recordConsumer.startField(keyName, 0);
                writeValue(keyElement, keyInspector, keyType);
                recordConsumer.endField(keyName, 0);
                // write value element
                Object valueElement = keyValue.getValue();
                if (valueElement != null) {
                    recordConsumer.startField(valueName, 1);
                    writeValue(valueElement, valueInspector, valuetype);
                    recordConsumer.endField(valueName, 1);
                }
            }
            recordConsumer.endGroup();
        }
        recordConsumer.endField(repeatedType.getName(), 0);
    }
    recordConsumer.endGroup();
}
Also used : OriginalType(org.apache.parquet.schema.OriginalType) GroupType(org.apache.parquet.schema.GroupType) Type(org.apache.parquet.schema.Type) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) HiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector) HiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) BooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) BinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) FloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) ByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector) DoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) GroupType(org.apache.parquet.schema.GroupType) Map(java.util.Map)

Example 55 with MapObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector in project flink by apache.

the class HiveInspectors method getConversion.

/**
 * Get conversion for converting Flink object to Hive object from an ObjectInspector and the
 * corresponding Flink DataType.
 */
public static HiveObjectConversion getConversion(ObjectInspector inspector, LogicalType dataType, HiveShim hiveShim) {
    if (inspector instanceof PrimitiveObjectInspector) {
        HiveObjectConversion conversion;
        if (inspector instanceof BooleanObjectInspector || inspector instanceof StringObjectInspector || inspector instanceof ByteObjectInspector || inspector instanceof ShortObjectInspector || inspector instanceof IntObjectInspector || inspector instanceof LongObjectInspector || inspector instanceof FloatObjectInspector || inspector instanceof DoubleObjectInspector || inspector instanceof BinaryObjectInspector || inspector instanceof VoidObjectInspector) {
            conversion = IdentityConversion.INSTANCE;
        } else if (inspector instanceof DateObjectInspector) {
            conversion = hiveShim::toHiveDate;
        } else if (inspector instanceof TimestampObjectInspector) {
            conversion = hiveShim::toHiveTimestamp;
        } else if (inspector instanceof HiveCharObjectInspector) {
            conversion = o -> o == null ? null : new HiveChar((String) o, ((CharType) dataType).getLength());
        } else if (inspector instanceof HiveVarcharObjectInspector) {
            conversion = o -> o == null ? null : new HiveVarchar((String) o, ((VarCharType) dataType).getLength());
        } else if (inspector instanceof HiveDecimalObjectInspector) {
            conversion = o -> o == null ? null : HiveDecimal.create((BigDecimal) o);
        } else {
            throw new FlinkHiveUDFException("Unsupported primitive object inspector " + inspector.getClass().getName());
        }
        // currently this happens for constant arguments for UDFs
        if (((PrimitiveObjectInspector) inspector).preferWritable()) {
            conversion = new WritableHiveObjectConversion(conversion, hiveShim);
        }
        return conversion;
    }
    if (inspector instanceof ListObjectInspector) {
        HiveObjectConversion eleConvert = getConversion(((ListObjectInspector) inspector).getListElementObjectInspector(), ((ArrayType) dataType).getElementType(), hiveShim);
        return o -> {
            if (o == null) {
                return null;
            }
            Object[] array = (Object[]) o;
            List<Object> result = new ArrayList<>();
            for (Object ele : array) {
                result.add(eleConvert.toHiveObject(ele));
            }
            return result;
        };
    }
    if (inspector instanceof MapObjectInspector) {
        MapObjectInspector mapInspector = (MapObjectInspector) inspector;
        MapType kvType = (MapType) dataType;
        HiveObjectConversion keyConversion = getConversion(mapInspector.getMapKeyObjectInspector(), kvType.getKeyType(), hiveShim);
        HiveObjectConversion valueConversion = getConversion(mapInspector.getMapValueObjectInspector(), kvType.getValueType(), hiveShim);
        return o -> {
            if (o == null) {
                return null;
            }
            Map<Object, Object> map = (Map) o;
            Map<Object, Object> result = new HashMap<>(map.size());
            for (Map.Entry<Object, Object> entry : map.entrySet()) {
                result.put(keyConversion.toHiveObject(entry.getKey()), valueConversion.toHiveObject(entry.getValue()));
            }
            return result;
        };
    }
    if (inspector instanceof StructObjectInspector) {
        StructObjectInspector structInspector = (StructObjectInspector) inspector;
        List<? extends StructField> structFields = structInspector.getAllStructFieldRefs();
        List<RowType.RowField> rowFields = ((RowType) dataType).getFields();
        HiveObjectConversion[] conversions = new HiveObjectConversion[structFields.size()];
        for (int i = 0; i < structFields.size(); i++) {
            conversions[i] = getConversion(structFields.get(i).getFieldObjectInspector(), rowFields.get(i).getType(), hiveShim);
        }
        return o -> {
            if (o == null) {
                return null;
            }
            Row row = (Row) o;
            List<Object> result = new ArrayList<>(row.getArity());
            for (int i = 0; i < row.getArity(); i++) {
                result.add(conversions[i].toHiveObject(row.getField(i)));
            }
            return result;
        };
    }
    throw new FlinkHiveUDFException(String.format("Flink doesn't support convert object conversion for %s yet", inspector));
}
Also used : DataType(org.apache.flink.table.types.DataType) VoidObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector) Array(java.lang.reflect.Array) WritableConstantHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveVarcharObjectInspector) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) WritableConstantTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantTimestampObjectInspector) MapType(org.apache.flink.table.types.logical.MapType) WritableConstantDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDateObjectInspector) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) WritableConstantByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantByteObjectInspector) CharType(org.apache.flink.table.types.logical.CharType) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) BooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) BigDecimal(java.math.BigDecimal) ShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector) Map(java.util.Map) WritableConstantLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantLongObjectInspector) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) HiveTypeUtil(org.apache.flink.table.catalog.hive.util.HiveTypeUtil) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) WritableConstantHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveCharObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) FloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) WritableConstantBinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBinaryObjectInspector) List(java.util.List) HiveReflectionUtils(org.apache.flink.table.catalog.hive.util.HiveReflectionUtils) LogicalType(org.apache.flink.table.types.logical.LogicalType) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) WritableConstantDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDoubleObjectInspector) WritableConstantFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantFloatObjectInspector) WritableConstantBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBooleanObjectInspector) Row(org.apache.flink.types.Row) DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) TypeInfoUtils(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) HiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector) HiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector) HashMap(java.util.HashMap) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) RowType(org.apache.flink.table.types.logical.RowType) WritableConstantHiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveDecimalObjectInspector) Constructor(java.lang.reflect.Constructor) HiveShim(org.apache.flink.table.catalog.hive.client.HiveShim) ArrayList(java.util.ArrayList) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) Nonnull(javax.annotation.Nonnull) WritableConstantShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantShortObjectInspector) BinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector) WritableConstantStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantStringObjectInspector) VarCharType(org.apache.flink.table.types.logical.VarCharType) ArrayType(org.apache.flink.table.types.logical.ArrayType) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) FlinkHiveUDFException(org.apache.flink.table.functions.hive.FlinkHiveUDFException) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) WritableConstantIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantIntObjectInspector) ObjectInspectorFactory(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory) PrimitiveObjectInspectorFactory(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) ByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector) DoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector) Internal(org.apache.flink.annotation.Internal) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) WritableConstantLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantLongObjectInspector) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) WritableConstantDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDateObjectInspector) DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) WritableConstantIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantIntObjectInspector) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) WritableConstantBinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBinaryObjectInspector) BinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector) RowType(org.apache.flink.table.types.logical.RowType) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) WritableConstantStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantStringObjectInspector) MapType(org.apache.flink.table.types.logical.MapType) FloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector) WritableConstantFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantFloatObjectInspector) WritableConstantByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantByteObjectInspector) ByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector) WritableConstantTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantTimestampObjectInspector) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) ShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector) WritableConstantShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantShortObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) WritableConstantHiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveDecimalObjectInspector) List(java.util.List) ArrayList(java.util.ArrayList) WritableConstantHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveCharObjectInspector) HiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector) FlinkHiveUDFException(org.apache.flink.table.functions.hive.FlinkHiveUDFException) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) BigDecimal(java.math.BigDecimal) WritableConstantDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDoubleObjectInspector) DoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector) WritableConstantHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveVarcharObjectInspector) HiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) VoidObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector) CharType(org.apache.flink.table.types.logical.CharType) VarCharType(org.apache.flink.table.types.logical.VarCharType) Row(org.apache.flink.types.Row) BooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector) WritableConstantBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBooleanObjectInspector) Map(java.util.Map) HashMap(java.util.HashMap) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Aggregations

MapObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector)50 ListObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector)42 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)41 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)40 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)30 Map (java.util.Map)25 ArrayList (java.util.ArrayList)22 LongObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector)21 ByteObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector)19 DoubleObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector)19 FloatObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector)19 IntObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector)19 ShortObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector)19 TimestampObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector)19 BinaryObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector)18 BooleanObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector)18 StringObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector)18 DateObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector)17 HiveDecimalObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector)17 StructField (org.apache.hadoop.hive.serde2.objectinspector.StructField)16