use of org.apache.flink.table.types.KeyValueDataType in project flink by apache.
the class DataFormatConverters method getConverterForDataType.
/**
* Get {@link DataFormatConverter} for {@link DataType}.
*
* @param originDataType DataFormatConverter is oriented to Java format, while LogicalType has
* lost its specific Java format. Only DataType retains all its Java format information.
*/
public static DataFormatConverter getConverterForDataType(DataType originDataType) {
DataType dataType = originDataType.nullable();
DataFormatConverter converter = TYPE_TO_CONVERTER.get(dataType);
if (converter != null) {
return converter;
}
Class<?> clazz = dataType.getConversionClass();
LogicalType logicalType = dataType.getLogicalType();
switch(logicalType.getTypeRoot()) {
case CHAR:
case VARCHAR:
if (clazz == String.class) {
return StringConverter.INSTANCE;
} else if (clazz == StringData.class) {
return StringDataConverter.INSTANCE;
} else {
throw new RuntimeException("Not support class for VARCHAR: " + clazz);
}
case BINARY:
case VARBINARY:
return PrimitiveByteArrayConverter.INSTANCE;
case DECIMAL:
Tuple2<Integer, Integer> ps = getPrecision(logicalType);
if (clazz == BigDecimal.class) {
return new BigDecimalConverter(ps.f0, ps.f1);
} else if (clazz == DecimalData.class) {
return new DecimalDataConverter(ps.f0, ps.f1);
} else {
throw new RuntimeException("Not support conversion class for DECIMAL: " + clazz);
}
case TIMESTAMP_WITHOUT_TIME_ZONE:
int precisionOfTS = getDateTimePrecision(logicalType);
if (clazz == Timestamp.class) {
return new TimestampConverter(precisionOfTS);
} else if (clazz == LocalDateTime.class) {
return new LocalDateTimeConverter(precisionOfTS);
} else if (clazz == TimestampData.class) {
return new TimestampDataConverter(precisionOfTS);
} else {
throw new RuntimeException("Not support conversion class for TIMESTAMP WITHOUT TIME ZONE: " + clazz);
}
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
int precisionOfLZTS = getDateTimePrecision(logicalType);
if (clazz == Instant.class) {
return new InstantConverter(precisionOfLZTS);
} else if (clazz == Long.class || clazz == long.class) {
return new LongTimestampDataConverter(precisionOfLZTS);
} else if (clazz == TimestampData.class) {
return new TimestampDataConverter(precisionOfLZTS);
} else if (clazz == Timestamp.class) {
return new TimestampLtzConverter(precisionOfLZTS);
} else {
throw new RuntimeException("Not support conversion class for TIMESTAMP WITH LOCAL TIME ZONE: " + clazz);
}
case ARRAY:
if (clazz == ArrayData.class) {
return ArrayDataConverter.INSTANCE;
} else if (clazz == boolean[].class) {
return PrimitiveBooleanArrayConverter.INSTANCE;
} else if (clazz == short[].class) {
return PrimitiveShortArrayConverter.INSTANCE;
} else if (clazz == int[].class) {
return PrimitiveIntArrayConverter.INSTANCE;
} else if (clazz == long[].class) {
return PrimitiveLongArrayConverter.INSTANCE;
} else if (clazz == float[].class) {
return PrimitiveFloatArrayConverter.INSTANCE;
} else if (clazz == double[].class) {
return PrimitiveDoubleArrayConverter.INSTANCE;
}
if (dataType instanceof CollectionDataType) {
return new ObjectArrayConverter(((CollectionDataType) dataType).getElementDataType().bridgedTo(clazz.getComponentType()));
} else {
BasicArrayTypeInfo typeInfo = (BasicArrayTypeInfo) ((LegacyTypeInformationType) dataType.getLogicalType()).getTypeInformation();
return new ObjectArrayConverter(fromLegacyInfoToDataType(typeInfo.getComponentInfo()).bridgedTo(clazz.getComponentType()));
}
case MAP:
if (clazz == MapData.class) {
return MapDataConverter.INSTANCE;
}
KeyValueDataType keyValueDataType = (KeyValueDataType) dataType;
return new MapConverter(keyValueDataType.getKeyDataType(), keyValueDataType.getValueDataType());
case MULTISET:
if (clazz == MapData.class) {
return MapDataConverter.INSTANCE;
}
CollectionDataType collectionDataType = (CollectionDataType) dataType;
return new MapConverter(collectionDataType.getElementDataType(), DataTypes.INT().bridgedTo(Integer.class));
case ROW:
case STRUCTURED_TYPE:
TypeInformation<?> asTypeInfo = fromDataTypeToTypeInfo(dataType);
if (asTypeInfo instanceof InternalTypeInfo && clazz == RowData.class) {
LogicalType realLogicalType = ((InternalTypeInfo<?>) asTypeInfo).toLogicalType();
return new RowDataConverter(getFieldCount(realLogicalType));
}
// legacy
CompositeType compositeType = (CompositeType) asTypeInfo;
DataType[] fieldTypes = Stream.iterate(0, x -> x + 1).limit(compositeType.getArity()).map((Function<Integer, TypeInformation>) compositeType::getTypeAt).map(TypeConversions::fromLegacyInfoToDataType).toArray(DataType[]::new);
if (clazz == RowData.class) {
return new RowDataConverter(compositeType.getArity());
} else if (clazz == Row.class) {
return new RowConverter(fieldTypes);
} else if (Tuple.class.isAssignableFrom(clazz)) {
return new TupleConverter((Class<Tuple>) clazz, fieldTypes);
} else if (CaseClassConverter.PRODUCT_CLASS != null && CaseClassConverter.PRODUCT_CLASS.isAssignableFrom(clazz)) {
return new CaseClassConverter((TupleTypeInfoBase) compositeType, fieldTypes);
} else if (compositeType instanceof PojoTypeInfo) {
return new PojoConverter((PojoTypeInfo) compositeType, fieldTypes);
} else {
throw new IllegalStateException("Cannot find a converter for type " + compositeType + ". If the target should be a converter to scala.Product, then you might have a scala classpath issue.");
}
case RAW:
if (logicalType instanceof RawType) {
final RawType<?> rawType = (RawType<?>) logicalType;
if (clazz == RawValueData.class) {
return RawValueDataConverter.INSTANCE;
} else {
return new GenericConverter<>(rawType.getTypeSerializer());
}
}
// legacy
TypeInformation typeInfo = logicalType instanceof LegacyTypeInformationType ? ((LegacyTypeInformationType) logicalType).getTypeInformation() : ((TypeInformationRawType) logicalType).getTypeInformation();
// planner type info
if (typeInfo instanceof StringDataTypeInfo) {
return StringDataConverter.INSTANCE;
} else if (typeInfo instanceof DecimalDataTypeInfo) {
DecimalDataTypeInfo decimalType = (DecimalDataTypeInfo) typeInfo;
return new DecimalDataConverter(decimalType.precision(), decimalType.scale());
} else if (typeInfo instanceof BigDecimalTypeInfo) {
BigDecimalTypeInfo decimalType = (BigDecimalTypeInfo) typeInfo;
return new BigDecimalConverter(decimalType.precision(), decimalType.scale());
} else if (typeInfo instanceof TimestampDataTypeInfo) {
TimestampDataTypeInfo timestampDataTypeInfo = (TimestampDataTypeInfo) typeInfo;
return new TimestampDataConverter(timestampDataTypeInfo.getPrecision());
} else if (typeInfo instanceof LegacyLocalDateTimeTypeInfo) {
LegacyLocalDateTimeTypeInfo dateTimeType = (LegacyLocalDateTimeTypeInfo) typeInfo;
return new LocalDateTimeConverter(dateTimeType.getPrecision());
} else if (typeInfo instanceof LegacyTimestampTypeInfo) {
LegacyTimestampTypeInfo timestampType = (LegacyTimestampTypeInfo) typeInfo;
return new TimestampConverter(timestampType.getPrecision());
} else if (typeInfo instanceof LegacyInstantTypeInfo) {
LegacyInstantTypeInfo instantTypeInfo = (LegacyInstantTypeInfo) typeInfo;
return new InstantConverter(instantTypeInfo.getPrecision());
}
if (clazz == RawValueData.class) {
return RawValueDataConverter.INSTANCE;
}
return new GenericConverter(typeInfo.createSerializer(new ExecutionConfig()));
default:
throw new RuntimeException("Not support dataType: " + dataType);
}
}
use of org.apache.flink.table.types.KeyValueDataType in project flink by apache.
the class LegacyTypeInfoDataTypeConverter method toLegacyTypeInfo.
public static TypeInformation<?> toLegacyTypeInfo(DataType dataType) {
// time indicators first as their hashCode/equals is shared with those of regular timestamps
if (canConvertToTimeAttributeTypeInfo(dataType)) {
return convertToTimeAttributeTypeInfo(dataType.getLogicalType());
}
// check in the map but relax the nullability constraint as every not null data type can be
// stored in the corresponding nullable type information
final TypeInformation<?> foundTypeInfo = dataTypeTypeInfoMap.get(dataType.nullable().bridgedTo(primitiveToWrapper(dataType.getConversionClass())));
if (foundTypeInfo != null) {
return foundTypeInfo;
}
// we are relaxing the constraint for DECIMAL, CHAR, VARCHAR, TIMESTAMP_WITHOUT_TIME_ZONE to
// support value literals in legacy planner
LogicalType logicalType = dataType.getLogicalType();
if (logicalType.is(DECIMAL)) {
return Types.BIG_DEC;
} else if (logicalType.is(CHAR)) {
return Types.STRING;
} else if (logicalType.is(VARCHAR)) {
return Types.STRING;
} else // relax the precision constraint as Timestamp can store the highest precision
if (logicalType.is(TIMESTAMP_WITHOUT_TIME_ZONE) && dataType.getConversionClass() == Timestamp.class) {
return Types.SQL_TIMESTAMP;
} else // relax the precision constraint as LocalDateTime can store the highest precision
if (logicalType.is(TIMESTAMP_WITHOUT_TIME_ZONE) && dataType.getConversionClass() == LocalDateTime.class) {
return Types.LOCAL_DATE_TIME;
} else // convert proctime back
if (logicalType.is(TIMESTAMP_WITH_LOCAL_TIME_ZONE) && dataType.getConversionClass() == Timestamp.class) {
return Types.SQL_TIMESTAMP;
} else // relax the precision constraint as LocalTime can store the highest precision
if (logicalType.is(TIME_WITHOUT_TIME_ZONE) && dataType.getConversionClass() == LocalTime.class) {
return Types.LOCAL_TIME;
} else if (canConvertToLegacyTypeInfo(dataType)) {
return convertToLegacyTypeInfo(dataType);
} else if (canConvertToRowTypeInfo(dataType)) {
return convertToRowTypeInfo((FieldsDataType) dataType);
} else // this could also match for basic array type info but this is covered by legacy type info
if (canConvertToObjectArrayTypeInfo(dataType)) {
return convertToObjectArrayTypeInfo((CollectionDataType) dataType);
} else if (canConvertToMultisetTypeInfo(dataType)) {
return convertToMultisetTypeInfo((CollectionDataType) dataType);
} else if (canConvertToMapTypeInfo(dataType)) {
return convertToMapTypeInfo((KeyValueDataType) dataType);
} else // makes the raw type accessible in the legacy planner
if (canConvertToRawTypeInfo(dataType)) {
return convertToRawTypeInfo(dataType);
}
throw new TableException(String.format("Unsupported conversion from data type '%s' (conversion class: %s) to type information. Only data types " + "that originated from type information fully support a reverse conversion.", dataType, dataType.getConversionClass().getName()));
}
use of org.apache.flink.table.types.KeyValueDataType in project flink by apache.
the class ValuesOperationFactory method convertMapToExpectedType.
private Optional<ResolvedExpression> convertMapToExpectedType(ResolvedExpression sourceExpression, KeyValueDataType targetDataType, ExpressionResolver.PostResolverFactory postResolverFactory) {
DataType keyTargetDataType = targetDataType.getKeyDataType();
DataType valueTargetDataType = targetDataType.getValueDataType();
List<ResolvedExpression> resolvedChildren = sourceExpression.getResolvedChildren();
ResolvedExpression[] castedChildren = new ResolvedExpression[resolvedChildren.size()];
for (int i = 0; i < resolvedChildren.size(); i++) {
Optional<ResolvedExpression> castedChild = convertToExpectedType(resolvedChildren.get(i), i % 2 == 0 ? keyTargetDataType : valueTargetDataType, postResolverFactory);
if (castedChild.isPresent()) {
castedChildren[i] = castedChild.get();
} else {
return Optional.empty();
}
}
return Optional.of(postResolverFactory.map(targetDataType, castedChildren));
}
use of org.apache.flink.table.types.KeyValueDataType in project flink by apache.
the class DataTypePrecisionFixer method visit.
@Override
public DataType visit(KeyValueDataType keyValueDataType) {
DataType keyType = keyValueDataType.getKeyDataType();
DataType valueType = keyValueDataType.getValueDataType();
if (logicalType.getTypeRoot() == LogicalTypeRoot.MAP) {
MapType mapType = (MapType) logicalType;
DataType newKeyType = keyType.accept(new DataTypePrecisionFixer(mapType.getKeyType()));
DataType newValueType = valueType.accept(new DataTypePrecisionFixer(mapType.getValueType()));
return DataTypes.MAP(newKeyType, newValueType).bridgedTo(keyValueDataType.getConversionClass());
}
throw new UnsupportedOperationException("Unsupported logical type : " + logicalType);
}
use of org.apache.flink.table.types.KeyValueDataType in project flink by apache.
the class DataTypes method MAP.
/**
* Data type of an associative array that maps keys (including {@code NULL}) to values
* (including {@code NULL}). A map cannot contain duplicate keys; each key can map to at most
* one value.
*
* <p>There is no restriction of key types; it is the responsibility of the user to ensure
* uniqueness. The map type is an extension to the SQL standard.
*
* @see MapType
*/
public static DataType MAP(DataType keyDataType, DataType valueDataType) {
Preconditions.checkNotNull(keyDataType, "Key data type must not be null.");
Preconditions.checkNotNull(valueDataType, "Value data type must not be null.");
return new KeyValueDataType(new MapType(keyDataType.getLogicalType(), valueDataType.getLogicalType()), keyDataType, valueDataType);
}
Aggregations