Search in sources :

Example 6 with CollectionDataType

use of org.apache.flink.table.types.CollectionDataType in project flink by apache.

the class DataTypeJsonDeserializer method deserializeClass.

private static DataType deserializeClass(LogicalType logicalType, @Nullable JsonNode parentNode, SerdeContext serdeContext) {
    if (parentNode == null) {
        return DataTypes.of(logicalType);
    }
    final DataType dataType;
    switch(logicalType.getTypeRoot()) {
        case ARRAY:
        case MULTISET:
            final DataType elementDataType = deserializeClass(logicalType.getChildren().get(0), parentNode.get(FIELD_NAME_ELEMENT_CLASS), serdeContext);
            dataType = new CollectionDataType(logicalType, elementDataType);
            break;
        case MAP:
            final MapType mapType = (MapType) logicalType;
            final DataType keyDataType = deserializeClass(mapType.getKeyType(), parentNode.get(FIELD_NAME_KEY_CLASS), serdeContext);
            final DataType valueDataType = deserializeClass(mapType.getValueType(), parentNode.get(FIELD_NAME_VALUE_CLASS), serdeContext);
            dataType = new KeyValueDataType(mapType, keyDataType, valueDataType);
            break;
        case ROW:
        case STRUCTURED_TYPE:
            final List<String> fieldNames = LogicalTypeChecks.getFieldNames(logicalType);
            final List<LogicalType> fieldTypes = LogicalTypeChecks.getFieldTypes(logicalType);
            final ArrayNode fieldNodes = (ArrayNode) parentNode.get(FIELD_NAME_FIELDS);
            final Map<String, JsonNode> fieldNodesByName = new HashMap<>();
            if (fieldNodes != null) {
                fieldNodes.forEach(fieldNode -> fieldNodesByName.put(fieldNode.get(FIELD_NAME_FIELD_NAME).asText(), fieldNode));
            }
            final List<DataType> fieldDataTypes = IntStream.range(0, fieldNames.size()).mapToObj(i -> {
                final String fieldName = fieldNames.get(i);
                final LogicalType fieldType = fieldTypes.get(i);
                return deserializeClass(fieldType, fieldNodesByName.get(fieldName), serdeContext);
            }).collect(Collectors.toList());
            dataType = new FieldsDataType(logicalType, fieldDataTypes);
            break;
        case DISTINCT_TYPE:
            final DistinctType distinctType = (DistinctType) logicalType;
            dataType = deserializeClass(distinctType.getSourceType(), parentNode, serdeContext);
            break;
        default:
            dataType = DataTypes.of(logicalType);
    }
    if (!parentNode.has(FIELD_NAME_CONVERSION_CLASS)) {
        return dataType;
    }
    final Class<?> conversionClass = loadClass(parentNode.get(FIELD_NAME_CONVERSION_CLASS).asText(), serdeContext, String.format("conversion class of data type '%s'", dataType));
    return dataType.bridgedTo(conversionClass);
}
Also used : IntStream(java.util.stream.IntStream) FIELD_NAME_ELEMENT_CLASS(org.apache.flink.table.planner.plan.nodes.exec.serde.DataTypeJsonSerializer.FIELD_NAME_ELEMENT_CLASS) DataType(org.apache.flink.table.types.DataType) KeyValueDataType(org.apache.flink.table.types.KeyValueDataType) FIELD_NAME_VALUE_CLASS(org.apache.flink.table.planner.plan.nodes.exec.serde.DataTypeJsonSerializer.FIELD_NAME_VALUE_CLASS) JsonParser(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonParser) HashMap(java.util.HashMap) JsonNode(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode) MapType(org.apache.flink.table.types.logical.MapType) ArrayNode(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ArrayNode) DeserializationContext(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.DeserializationContext) FIELD_NAME_FIELD_NAME(org.apache.flink.table.planner.plan.nodes.exec.serde.DataTypeJsonSerializer.FIELD_NAME_FIELD_NAME) FIELD_NAME_CONVERSION_CLASS(org.apache.flink.table.planner.plan.nodes.exec.serde.DataTypeJsonSerializer.FIELD_NAME_CONVERSION_CLASS) FIELD_NAME_FIELDS(org.apache.flink.table.planner.plan.nodes.exec.serde.DataTypeJsonSerializer.FIELD_NAME_FIELDS) FieldsDataType(org.apache.flink.table.types.FieldsDataType) Map(java.util.Map) FIELD_NAME_KEY_CLASS(org.apache.flink.table.planner.plan.nodes.exec.serde.DataTypeJsonSerializer.FIELD_NAME_KEY_CLASS) Nullable(javax.annotation.Nullable) DataTypes(org.apache.flink.table.api.DataTypes) IOException(java.io.IOException) Collectors(java.util.stream.Collectors) List(java.util.List) FIELD_NAME_TYPE(org.apache.flink.table.planner.plan.nodes.exec.serde.DataTypeJsonSerializer.FIELD_NAME_TYPE) CollectionDataType(org.apache.flink.table.types.CollectionDataType) DistinctType(org.apache.flink.table.types.logical.DistinctType) LogicalType(org.apache.flink.table.types.logical.LogicalType) JsonSerdeUtil.loadClass(org.apache.flink.table.planner.plan.nodes.exec.serde.JsonSerdeUtil.loadClass) Internal(org.apache.flink.annotation.Internal) StdDeserializer(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.deser.std.StdDeserializer) LogicalTypeChecks(org.apache.flink.table.types.logical.utils.LogicalTypeChecks) FieldsDataType(org.apache.flink.table.types.FieldsDataType) HashMap(java.util.HashMap) CollectionDataType(org.apache.flink.table.types.CollectionDataType) KeyValueDataType(org.apache.flink.table.types.KeyValueDataType) LogicalType(org.apache.flink.table.types.logical.LogicalType) JsonNode(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode) MapType(org.apache.flink.table.types.logical.MapType) DistinctType(org.apache.flink.table.types.logical.DistinctType) DataType(org.apache.flink.table.types.DataType) KeyValueDataType(org.apache.flink.table.types.KeyValueDataType) FieldsDataType(org.apache.flink.table.types.FieldsDataType) CollectionDataType(org.apache.flink.table.types.CollectionDataType) ArrayNode(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ArrayNode)

Example 7 with CollectionDataType

use of org.apache.flink.table.types.CollectionDataType in project flink by apache.

the class DataTypeJsonSerializer method serializeClass.

private static void serializeClass(DataType dataType, JsonGenerator jsonGenerator) throws IOException {
    // skip the conversion class if only nested types contain custom conversion classes
    if (!isDefaultClass(dataType)) {
        jsonGenerator.writeStringField(FIELD_NAME_CONVERSION_CLASS, dataType.getConversionClass().getName());
    }
    // internal classes only contain nested internal classes
    if (isInternal(dataType, false)) {
        return;
    }
    switch(dataType.getLogicalType().getTypeRoot()) {
        case ARRAY:
        case MULTISET:
            final CollectionDataType collectionDataType = (CollectionDataType) dataType;
            serializeFieldIfNotDefaultClass(collectionDataType.getElementDataType(), FIELD_NAME_ELEMENT_CLASS, jsonGenerator);
            break;
        case MAP:
            final KeyValueDataType keyValueDataType = (KeyValueDataType) dataType;
            serializeFieldIfNotDefaultClass(keyValueDataType.getKeyDataType(), FIELD_NAME_KEY_CLASS, jsonGenerator);
            serializeFieldIfNotDefaultClass(keyValueDataType.getValueDataType(), FIELD_NAME_VALUE_CLASS, jsonGenerator);
            break;
        case ROW:
        case STRUCTURED_TYPE:
            final List<Field> nonDefaultFields = DataType.getFields(dataType).stream().filter(field -> !isDefaultClassNested(field.getDataType())).collect(Collectors.toList());
            if (nonDefaultFields.isEmpty()) {
                break;
            }
            jsonGenerator.writeFieldName(FIELD_NAME_FIELDS);
            jsonGenerator.writeStartArray();
            for (Field nonDefaultField : nonDefaultFields) {
                jsonGenerator.writeStartObject();
                jsonGenerator.writeStringField(FIELD_NAME_FIELD_NAME, nonDefaultField.getName());
                serializeClass(nonDefaultField.getDataType(), jsonGenerator);
                jsonGenerator.writeEndObject();
            }
            jsonGenerator.writeEndArray();
            break;
        case DISTINCT_TYPE:
            final DataType sourceDataType = dataType.getChildren().get(0);
            if (!isDefaultClassNested(sourceDataType)) {
                serializeClass(sourceDataType, jsonGenerator);
            }
            break;
        default:
    }
}
Also used : DataType(org.apache.flink.table.types.DataType) KeyValueDataType(org.apache.flink.table.types.KeyValueDataType) JsonGenerator(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonGenerator) IOException(java.io.IOException) Collectors(java.util.stream.Collectors) Field(org.apache.flink.table.api.DataTypes.Field) Objects(java.util.Objects) List(java.util.List) CollectionDataType(org.apache.flink.table.types.CollectionDataType) DataTypeUtils.isInternal(org.apache.flink.table.types.utils.DataTypeUtils.isInternal) SerializerProvider(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.SerializerProvider) StdSerializer(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ser.std.StdSerializer) Internal(org.apache.flink.annotation.Internal) Field(org.apache.flink.table.api.DataTypes.Field) CollectionDataType(org.apache.flink.table.types.CollectionDataType) KeyValueDataType(org.apache.flink.table.types.KeyValueDataType) DataType(org.apache.flink.table.types.DataType) KeyValueDataType(org.apache.flink.table.types.KeyValueDataType) CollectionDataType(org.apache.flink.table.types.CollectionDataType)

Example 8 with CollectionDataType

use of org.apache.flink.table.types.CollectionDataType in project flink by apache.

the class TypeInfoDataTypeConverter method fromDataTypeToTypeInfo.

public static TypeInformation<?> fromDataTypeToTypeInfo(DataType dataType) {
    Class<?> clazz = dataType.getConversionClass();
    if (clazz.isPrimitive()) {
        final TypeInformation<?> foundTypeInfo = primitiveDataTypeTypeInfoMap.get(clazz.getName());
        if (foundTypeInfo != null) {
            return foundTypeInfo;
        }
    }
    LogicalType logicalType = fromDataTypeToLogicalType(dataType);
    switch(logicalType.getTypeRoot()) {
        case TIMESTAMP_WITHOUT_TIME_ZONE:
            TimestampType timestampType = (TimestampType) logicalType;
            int precision = timestampType.getPrecision();
            if (timestampType.getKind() == TimestampKind.REGULAR) {
                return clazz == TimestampData.class ? new TimestampDataTypeInfo(precision) : (clazz == LocalDateTime.class ? ((3 == precision) ? Types.LOCAL_DATE_TIME : new LegacyLocalDateTimeTypeInfo(precision)) : ((3 == precision) ? Types.SQL_TIMESTAMP : new LegacyTimestampTypeInfo(precision)));
            } else {
                return TypeConversions.fromDataTypeToLegacyInfo(dataType);
            }
        case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
            LocalZonedTimestampType lzTs = (LocalZonedTimestampType) logicalType;
            int precisionLzTs = lzTs.getPrecision();
            if (lzTs.getKind() == TimestampKind.REGULAR) {
                return clazz == TimestampData.class ? new TimestampDataTypeInfo(precisionLzTs) : (clazz == Instant.class ? ((3 == precisionLzTs) ? Types.INSTANT : new LegacyInstantTypeInfo(precisionLzTs)) : TypeConversions.fromDataTypeToLegacyInfo(dataType));
            } else {
                return TypeConversions.fromDataTypeToLegacyInfo(dataType);
            }
        case DECIMAL:
            DecimalType decimalType = (DecimalType) logicalType;
            return clazz == DecimalData.class ? new DecimalDataTypeInfo(decimalType.getPrecision(), decimalType.getScale()) : new BigDecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale());
        case CHAR:
        case // ignore precision
        VARCHAR:
            return clazz == StringData.class ? StringDataTypeInfo.INSTANCE : BasicTypeInfo.STRING_TYPE_INFO;
        case BINARY:
        case // ignore precision
        VARBINARY:
            return PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO;
        case INTERVAL_YEAR_MONTH:
            return TimeIntervalTypeInfo.INTERVAL_MONTHS;
        case INTERVAL_DAY_TIME:
            return TimeIntervalTypeInfo.INTERVAL_MILLIS;
        case ARRAY:
            if (dataType instanceof CollectionDataType && !isPrimitive(((CollectionDataType) dataType).getElementDataType().getLogicalType())) {
                return ObjectArrayTypeInfo.getInfoFor(fromDataTypeToTypeInfo(((CollectionDataType) dataType).getElementDataType()));
            } else {
                return TypeConversions.fromDataTypeToLegacyInfo(dataType);
            }
        case MAP:
            KeyValueDataType mapType = (KeyValueDataType) dataType;
            return new MapTypeInfo(fromDataTypeToTypeInfo(mapType.getKeyDataType()), fromDataTypeToTypeInfo(mapType.getValueDataType()));
        case MULTISET:
            return MultisetTypeInfo.getInfoFor(fromDataTypeToTypeInfo(((CollectionDataType) dataType).getElementDataType()));
        case ROW:
            if (RowData.class.isAssignableFrom(dataType.getConversionClass())) {
                return InternalTypeInfo.of((RowType) fromDataTypeToLogicalType(dataType));
            } else if (Row.class == dataType.getConversionClass()) {
                RowType logicalRowType = (RowType) logicalType;
                return new RowTypeInfo(dataType.getChildren().stream().map(TypeInfoDataTypeConverter::fromDataTypeToTypeInfo).toArray(TypeInformation[]::new), logicalRowType.getFieldNames().toArray(new String[0]));
            } else {
                return TypeConversions.fromDataTypeToLegacyInfo(dataType);
            }
        case RAW:
            if (logicalType instanceof RawType) {
                return ExternalTypeInfo.of(dataType);
            }
            return TypeConversions.fromDataTypeToLegacyInfo(dataType);
        default:
            return TypeConversions.fromDataTypeToLegacyInfo(dataType);
    }
}
Also used : LocalDateTime(java.time.LocalDateTime) TimestampData(org.apache.flink.table.data.TimestampData) DecimalDataTypeInfo(org.apache.flink.table.runtime.typeutils.DecimalDataTypeInfo) CollectionDataType(org.apache.flink.table.types.CollectionDataType) LogicalTypeDataTypeConverter.fromDataTypeToLogicalType(org.apache.flink.table.runtime.types.LogicalTypeDataTypeConverter.fromDataTypeToLogicalType) LogicalType(org.apache.flink.table.types.logical.LogicalType) RowType(org.apache.flink.table.types.logical.RowType) RowTypeInfo(org.apache.flink.api.java.typeutils.RowTypeInfo) LegacyInstantTypeInfo(org.apache.flink.table.runtime.typeutils.LegacyInstantTypeInfo) DecimalData(org.apache.flink.table.data.DecimalData) TimestampType(org.apache.flink.table.types.logical.TimestampType) LocalZonedTimestampType(org.apache.flink.table.types.logical.LocalZonedTimestampType) BigDecimalTypeInfo(org.apache.flink.table.runtime.typeutils.BigDecimalTypeInfo) RawType(org.apache.flink.table.types.logical.RawType) TimestampDataTypeInfo(org.apache.flink.table.runtime.typeutils.TimestampDataTypeInfo) Instant(java.time.Instant) LocalZonedTimestampType(org.apache.flink.table.types.logical.LocalZonedTimestampType) KeyValueDataType(org.apache.flink.table.types.KeyValueDataType) LegacyLocalDateTimeTypeInfo(org.apache.flink.table.runtime.typeutils.LegacyLocalDateTimeTypeInfo) MapTypeInfo(org.apache.flink.api.java.typeutils.MapTypeInfo) LegacyTimestampTypeInfo(org.apache.flink.table.runtime.typeutils.LegacyTimestampTypeInfo) DecimalType(org.apache.flink.table.types.logical.DecimalType) Row(org.apache.flink.types.Row) StringData(org.apache.flink.table.data.StringData)

Aggregations

CollectionDataType (org.apache.flink.table.types.CollectionDataType)8 KeyValueDataType (org.apache.flink.table.types.KeyValueDataType)7 DataType (org.apache.flink.table.types.DataType)5 LogicalType (org.apache.flink.table.types.logical.LogicalType)5 LocalDateTime (java.time.LocalDateTime)3 IOException (java.io.IOException)2 List (java.util.List)2 Collectors (java.util.stream.Collectors)2 Internal (org.apache.flink.annotation.Internal)2 DecimalData (org.apache.flink.table.data.DecimalData)2 StringData (org.apache.flink.table.data.StringData)2 FieldsDataType (org.apache.flink.table.types.FieldsDataType)2 Timestamp (java.sql.Timestamp)1 Instant (java.time.Instant)1 LocalTime (java.time.LocalTime)1 HashMap (java.util.HashMap)1 Map (java.util.Map)1 Objects (java.util.Objects)1 IntStream (java.util.stream.IntStream)1 Nullable (javax.annotation.Nullable)1