use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.
the class LogicalTypeMerging method findCommonNullableType.
@SuppressWarnings("ConstantConditions")
@Nullable
private static LogicalType findCommonNullableType(List<LogicalType> normalizedTypes, boolean hasRawType, boolean hasNullType) {
// all RAW types must be equal
if (hasRawType) {
return findExactlySameType(normalizedTypes);
}
LogicalType resultType = null;
for (LogicalType type : normalizedTypes) {
final LogicalTypeRoot typeRoot = type.getTypeRoot();
// NULL does not affect the result of this loop
if (typeRoot == NULL) {
continue;
}
// result type candidate
if (resultType == null) {
resultType = type;
}
// find special patterns
final LogicalType patternType = findCommonTypePattern(resultType, type);
if (patternType != null) {
resultType = patternType;
continue;
}
// for types of family CONSTRUCTED
if (typeRoot == ARRAY) {
return findCommonArrayType(normalizedTypes);
} else if (typeRoot == MULTISET) {
return findCommonMultisetType(normalizedTypes);
} else if (typeRoot == MAP) {
return findCommonMapType(normalizedTypes);
} else if (typeRoot == ROW) {
return findCommonRowType(normalizedTypes);
}
// this simplifies the following lines as we compare same interval families for example
if (!areSimilarTypes(resultType, type)) {
return null;
}
// for types of family CHARACTER_STRING or BINARY_STRING
if (type.is(CHARACTER_STRING) | type.is(BINARY_STRING)) {
final int length = combineLength(resultType, type);
if (resultType.isAnyOf(VARCHAR, VARBINARY)) {
// variable length types remain variable length types
resultType = createStringType(resultType.getTypeRoot(), length);
} else if (getLength(resultType) != getLength(type)) {
// padding/modification of strings
if (resultType.is(CHAR)) {
resultType = createStringType(VARCHAR, length);
} else if (resultType.is(BINARY)) {
resultType = createStringType(VARBINARY, length);
}
} else {
// for same type with same length
resultType = createStringType(typeRoot, length);
}
} else // for EXACT_NUMERIC types
if (type.is(EXACT_NUMERIC)) {
if (resultType.is(EXACT_NUMERIC)) {
resultType = createCommonExactNumericType(resultType, type);
} else if (resultType.is(APPROXIMATE_NUMERIC)) {
// the result is already approximate
if (typeRoot == DECIMAL) {
// in case of DECIMAL we enforce DOUBLE
resultType = new DoubleType();
}
} else {
return null;
}
} else // for APPROXIMATE_NUMERIC types
if (type.is(APPROXIMATE_NUMERIC)) {
if (resultType.is(APPROXIMATE_NUMERIC)) {
resultType = createCommonApproximateNumericType(resultType, type);
} else if (resultType.is(EXACT_NUMERIC)) {
// the result was exact so far
if (typeRoot == DECIMAL) {
// in case of DECIMAL we enforce DOUBLE
resultType = new DoubleType();
} else {
// enforce an approximate result
resultType = type;
}
} else {
return null;
}
} else // for DATE
if (type.is(DATE)) {
if (resultType.is(DATE)) {
// for enabling findCommonTypePattern
resultType = new DateType();
} else {
return null;
}
} else // for TIME
if (type.is(TIME)) {
if (resultType.is(TIME)) {
resultType = new TimeType(combinePrecision(resultType, type));
} else {
return null;
}
} else // for TIMESTAMP
if (type.is(TIMESTAMP)) {
if (resultType.is(TIMESTAMP)) {
resultType = createCommonTimestampType(resultType, type);
} else {
return null;
}
} else // for day-time intervals
if (typeRoot == INTERVAL_DAY_TIME) {
resultType = createCommonDayTimeIntervalType((DayTimeIntervalType) resultType, (DayTimeIntervalType) type);
} else // for year-month intervals
if (typeRoot == INTERVAL_YEAR_MONTH) {
resultType = createCommonYearMonthIntervalType((YearMonthIntervalType) resultType, (YearMonthIntervalType) type);
} else // other types are handled by findCommonCastableType
{
return null;
}
}
// NULL type only
if (resultType == null && hasNullType) {
return new NullType();
}
return resultType;
}
use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.
the class LogicalTypeDuplicator method visit.
@Override
public LogicalType visit(StructuredType structuredType) {
final StructuredType.Builder builder = instantiateStructuredBuilder(structuredType);
builder.attributes(duplicateStructuredAttributes(structuredType));
builder.setNullable(structuredType.isNullable());
builder.setFinal(structuredType.isFinal());
builder.setInstantiable(structuredType.isInstantiable());
builder.comparison(structuredType.getComparison());
structuredType.getSuperType().ifPresent(st -> {
final LogicalType visited = st.accept(this);
if (!(visited instanceof StructuredType)) {
throw new TableException("Unexpected super type. Structured type expected but was: " + visited);
}
builder.superType((StructuredType) visited);
});
structuredType.getDescription().ifPresent(builder::description);
return builder.build();
}
use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.
the class LegacyTypeInfoDataTypeConverter method toLegacyTypeInfo.
public static TypeInformation<?> toLegacyTypeInfo(DataType dataType) {
// time indicators first as their hashCode/equals is shared with those of regular timestamps
if (canConvertToTimeAttributeTypeInfo(dataType)) {
return convertToTimeAttributeTypeInfo(dataType.getLogicalType());
}
// check in the map but relax the nullability constraint as every not null data type can be
// stored in the corresponding nullable type information
final TypeInformation<?> foundTypeInfo = dataTypeTypeInfoMap.get(dataType.nullable().bridgedTo(primitiveToWrapper(dataType.getConversionClass())));
if (foundTypeInfo != null) {
return foundTypeInfo;
}
// we are relaxing the constraint for DECIMAL, CHAR, VARCHAR, TIMESTAMP_WITHOUT_TIME_ZONE to
// support value literals in legacy planner
LogicalType logicalType = dataType.getLogicalType();
if (logicalType.is(DECIMAL)) {
return Types.BIG_DEC;
} else if (logicalType.is(CHAR)) {
return Types.STRING;
} else if (logicalType.is(VARCHAR)) {
return Types.STRING;
} else // relax the precision constraint as Timestamp can store the highest precision
if (logicalType.is(TIMESTAMP_WITHOUT_TIME_ZONE) && dataType.getConversionClass() == Timestamp.class) {
return Types.SQL_TIMESTAMP;
} else // relax the precision constraint as LocalDateTime can store the highest precision
if (logicalType.is(TIMESTAMP_WITHOUT_TIME_ZONE) && dataType.getConversionClass() == LocalDateTime.class) {
return Types.LOCAL_DATE_TIME;
} else // convert proctime back
if (logicalType.is(TIMESTAMP_WITH_LOCAL_TIME_ZONE) && dataType.getConversionClass() == Timestamp.class) {
return Types.SQL_TIMESTAMP;
} else // relax the precision constraint as LocalTime can store the highest precision
if (logicalType.is(TIME_WITHOUT_TIME_ZONE) && dataType.getConversionClass() == LocalTime.class) {
return Types.LOCAL_TIME;
} else if (canConvertToLegacyTypeInfo(dataType)) {
return convertToLegacyTypeInfo(dataType);
} else if (canConvertToRowTypeInfo(dataType)) {
return convertToRowTypeInfo((FieldsDataType) dataType);
} else // this could also match for basic array type info but this is covered by legacy type info
if (canConvertToObjectArrayTypeInfo(dataType)) {
return convertToObjectArrayTypeInfo((CollectionDataType) dataType);
} else if (canConvertToMultisetTypeInfo(dataType)) {
return convertToMultisetTypeInfo((CollectionDataType) dataType);
} else if (canConvertToMapTypeInfo(dataType)) {
return convertToMapTypeInfo((KeyValueDataType) dataType);
} else // makes the raw type accessible in the legacy planner
if (canConvertToRawTypeInfo(dataType)) {
return convertToRawTypeInfo(dataType);
}
throw new TableException(String.format("Unsupported conversion from data type '%s' (conversion class: %s) to type information. Only data types " + "that originated from type information fully support a reverse conversion.", dataType, dataType.getConversionClass().getName()));
}
use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.
the class ComparableTypeStrategy method inferInputTypes.
@Override
public Optional<List<DataType>> inferInputTypes(CallContext callContext, boolean throwOnFailure) {
final List<DataType> argumentDataTypes = callContext.getArgumentDataTypes();
if (argumentDataTypes.size() == 1) {
final LogicalType argType = argumentDataTypes.get(0).getLogicalType();
if (!areComparable(argType, argType)) {
if (throwOnFailure) {
throw callContext.newValidationError("Type '%s' should support %s comparison with itself.", argType, comparisonToString());
}
return Optional.empty();
}
} else {
for (int i = 0; i < argumentDataTypes.size() - 1; i++) {
final LogicalType firstType = argumentDataTypes.get(i).getLogicalType();
final LogicalType secondType = argumentDataTypes.get(i + 1).getLogicalType();
if (!areComparable(firstType, secondType)) {
if (throwOnFailure) {
throw callContext.newValidationError("All types in a comparison should support %s comparison with each other. " + "Can not compare %s with %s", comparisonToString(), firstType, secondType);
}
return Optional.empty();
}
}
}
return Optional.of(argumentDataTypes);
}
use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.
the class DecimalScale0TypeStrategy method inferType.
@Override
public Optional<DataType> inferType(CallContext callContext) {
final DataType argumentDataType = callContext.getArgumentDataTypes().get(0);
final LogicalType argumentType = argumentDataType.getLogicalType();
// a hack to make legacy types possible until we drop them
if (argumentType instanceof LegacyTypeInformationType) {
return Optional.of(argumentDataType);
}
if (argumentType.is(LogicalTypeRoot.DECIMAL)) {
if (hasScale(argumentType, 0)) {
return Optional.of(argumentDataType);
}
final LogicalType inferredType = new DecimalType(argumentType.isNullable(), getPrecision(argumentType), 0);
return Optional.of(fromLogicalToDataType(inferredType));
}
return Optional.empty();
}
Aggregations