use of org.apache.flink.table.types.CollectionDataType in project flink by apache.
the class DataTypeJsonDeserializer method deserializeClass.
private static DataType deserializeClass(LogicalType logicalType, @Nullable JsonNode parentNode, SerdeContext serdeContext) {
if (parentNode == null) {
return DataTypes.of(logicalType);
}
final DataType dataType;
switch(logicalType.getTypeRoot()) {
case ARRAY:
case MULTISET:
final DataType elementDataType = deserializeClass(logicalType.getChildren().get(0), parentNode.get(FIELD_NAME_ELEMENT_CLASS), serdeContext);
dataType = new CollectionDataType(logicalType, elementDataType);
break;
case MAP:
final MapType mapType = (MapType) logicalType;
final DataType keyDataType = deserializeClass(mapType.getKeyType(), parentNode.get(FIELD_NAME_KEY_CLASS), serdeContext);
final DataType valueDataType = deserializeClass(mapType.getValueType(), parentNode.get(FIELD_NAME_VALUE_CLASS), serdeContext);
dataType = new KeyValueDataType(mapType, keyDataType, valueDataType);
break;
case ROW:
case STRUCTURED_TYPE:
final List<String> fieldNames = LogicalTypeChecks.getFieldNames(logicalType);
final List<LogicalType> fieldTypes = LogicalTypeChecks.getFieldTypes(logicalType);
final ArrayNode fieldNodes = (ArrayNode) parentNode.get(FIELD_NAME_FIELDS);
final Map<String, JsonNode> fieldNodesByName = new HashMap<>();
if (fieldNodes != null) {
fieldNodes.forEach(fieldNode -> fieldNodesByName.put(fieldNode.get(FIELD_NAME_FIELD_NAME).asText(), fieldNode));
}
final List<DataType> fieldDataTypes = IntStream.range(0, fieldNames.size()).mapToObj(i -> {
final String fieldName = fieldNames.get(i);
final LogicalType fieldType = fieldTypes.get(i);
return deserializeClass(fieldType, fieldNodesByName.get(fieldName), serdeContext);
}).collect(Collectors.toList());
dataType = new FieldsDataType(logicalType, fieldDataTypes);
break;
case DISTINCT_TYPE:
final DistinctType distinctType = (DistinctType) logicalType;
dataType = deserializeClass(distinctType.getSourceType(), parentNode, serdeContext);
break;
default:
dataType = DataTypes.of(logicalType);
}
if (!parentNode.has(FIELD_NAME_CONVERSION_CLASS)) {
return dataType;
}
final Class<?> conversionClass = loadClass(parentNode.get(FIELD_NAME_CONVERSION_CLASS).asText(), serdeContext, String.format("conversion class of data type '%s'", dataType));
return dataType.bridgedTo(conversionClass);
}
use of org.apache.flink.table.types.CollectionDataType in project flink by apache.
the class DataTypeJsonSerializer method serializeClass.
private static void serializeClass(DataType dataType, JsonGenerator jsonGenerator) throws IOException {
// skip the conversion class if only nested types contain custom conversion classes
if (!isDefaultClass(dataType)) {
jsonGenerator.writeStringField(FIELD_NAME_CONVERSION_CLASS, dataType.getConversionClass().getName());
}
// internal classes only contain nested internal classes
if (isInternal(dataType, false)) {
return;
}
switch(dataType.getLogicalType().getTypeRoot()) {
case ARRAY:
case MULTISET:
final CollectionDataType collectionDataType = (CollectionDataType) dataType;
serializeFieldIfNotDefaultClass(collectionDataType.getElementDataType(), FIELD_NAME_ELEMENT_CLASS, jsonGenerator);
break;
case MAP:
final KeyValueDataType keyValueDataType = (KeyValueDataType) dataType;
serializeFieldIfNotDefaultClass(keyValueDataType.getKeyDataType(), FIELD_NAME_KEY_CLASS, jsonGenerator);
serializeFieldIfNotDefaultClass(keyValueDataType.getValueDataType(), FIELD_NAME_VALUE_CLASS, jsonGenerator);
break;
case ROW:
case STRUCTURED_TYPE:
final List<Field> nonDefaultFields = DataType.getFields(dataType).stream().filter(field -> !isDefaultClassNested(field.getDataType())).collect(Collectors.toList());
if (nonDefaultFields.isEmpty()) {
break;
}
jsonGenerator.writeFieldName(FIELD_NAME_FIELDS);
jsonGenerator.writeStartArray();
for (Field nonDefaultField : nonDefaultFields) {
jsonGenerator.writeStartObject();
jsonGenerator.writeStringField(FIELD_NAME_FIELD_NAME, nonDefaultField.getName());
serializeClass(nonDefaultField.getDataType(), jsonGenerator);
jsonGenerator.writeEndObject();
}
jsonGenerator.writeEndArray();
break;
case DISTINCT_TYPE:
final DataType sourceDataType = dataType.getChildren().get(0);
if (!isDefaultClassNested(sourceDataType)) {
serializeClass(sourceDataType, jsonGenerator);
}
break;
default:
}
}
use of org.apache.flink.table.types.CollectionDataType in project flink by apache.
the class TypeInfoDataTypeConverter method fromDataTypeToTypeInfo.
public static TypeInformation<?> fromDataTypeToTypeInfo(DataType dataType) {
Class<?> clazz = dataType.getConversionClass();
if (clazz.isPrimitive()) {
final TypeInformation<?> foundTypeInfo = primitiveDataTypeTypeInfoMap.get(clazz.getName());
if (foundTypeInfo != null) {
return foundTypeInfo;
}
}
LogicalType logicalType = fromDataTypeToLogicalType(dataType);
switch(logicalType.getTypeRoot()) {
case TIMESTAMP_WITHOUT_TIME_ZONE:
TimestampType timestampType = (TimestampType) logicalType;
int precision = timestampType.getPrecision();
if (timestampType.getKind() == TimestampKind.REGULAR) {
return clazz == TimestampData.class ? new TimestampDataTypeInfo(precision) : (clazz == LocalDateTime.class ? ((3 == precision) ? Types.LOCAL_DATE_TIME : new LegacyLocalDateTimeTypeInfo(precision)) : ((3 == precision) ? Types.SQL_TIMESTAMP : new LegacyTimestampTypeInfo(precision)));
} else {
return TypeConversions.fromDataTypeToLegacyInfo(dataType);
}
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
LocalZonedTimestampType lzTs = (LocalZonedTimestampType) logicalType;
int precisionLzTs = lzTs.getPrecision();
if (lzTs.getKind() == TimestampKind.REGULAR) {
return clazz == TimestampData.class ? new TimestampDataTypeInfo(precisionLzTs) : (clazz == Instant.class ? ((3 == precisionLzTs) ? Types.INSTANT : new LegacyInstantTypeInfo(precisionLzTs)) : TypeConversions.fromDataTypeToLegacyInfo(dataType));
} else {
return TypeConversions.fromDataTypeToLegacyInfo(dataType);
}
case DECIMAL:
DecimalType decimalType = (DecimalType) logicalType;
return clazz == DecimalData.class ? new DecimalDataTypeInfo(decimalType.getPrecision(), decimalType.getScale()) : new BigDecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale());
case CHAR:
case // ignore precision
VARCHAR:
return clazz == StringData.class ? StringDataTypeInfo.INSTANCE : BasicTypeInfo.STRING_TYPE_INFO;
case BINARY:
case // ignore precision
VARBINARY:
return PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO;
case INTERVAL_YEAR_MONTH:
return TimeIntervalTypeInfo.INTERVAL_MONTHS;
case INTERVAL_DAY_TIME:
return TimeIntervalTypeInfo.INTERVAL_MILLIS;
case ARRAY:
if (dataType instanceof CollectionDataType && !isPrimitive(((CollectionDataType) dataType).getElementDataType().getLogicalType())) {
return ObjectArrayTypeInfo.getInfoFor(fromDataTypeToTypeInfo(((CollectionDataType) dataType).getElementDataType()));
} else {
return TypeConversions.fromDataTypeToLegacyInfo(dataType);
}
case MAP:
KeyValueDataType mapType = (KeyValueDataType) dataType;
return new MapTypeInfo(fromDataTypeToTypeInfo(mapType.getKeyDataType()), fromDataTypeToTypeInfo(mapType.getValueDataType()));
case MULTISET:
return MultisetTypeInfo.getInfoFor(fromDataTypeToTypeInfo(((CollectionDataType) dataType).getElementDataType()));
case ROW:
if (RowData.class.isAssignableFrom(dataType.getConversionClass())) {
return InternalTypeInfo.of((RowType) fromDataTypeToLogicalType(dataType));
} else if (Row.class == dataType.getConversionClass()) {
RowType logicalRowType = (RowType) logicalType;
return new RowTypeInfo(dataType.getChildren().stream().map(TypeInfoDataTypeConverter::fromDataTypeToTypeInfo).toArray(TypeInformation[]::new), logicalRowType.getFieldNames().toArray(new String[0]));
} else {
return TypeConversions.fromDataTypeToLegacyInfo(dataType);
}
case RAW:
if (logicalType instanceof RawType) {
return ExternalTypeInfo.of(dataType);
}
return TypeConversions.fromDataTypeToLegacyInfo(dataType);
default:
return TypeConversions.fromDataTypeToLegacyInfo(dataType);
}
}
Aggregations