Search in sources :

Example 1 with RowField

use of org.apache.flink.table.types.logical.RowType.RowField in project flink by apache.

the class DynamicSinkUtils method createConsumedType.

/**
 * Returns the {@link DataType} that a sink should consume as the output from the runtime.
 *
 * <p>The format looks as follows: {@code PHYSICAL COLUMNS + PERSISTED METADATA COLUMNS}
 */
private static RowType createConsumedType(ResolvedSchema schema, DynamicTableSink sink) {
    final Map<String, DataType> metadataMap = extractMetadataMap(sink);
    final Stream<RowField> physicalFields = schema.getColumns().stream().filter(Column::isPhysical).map(c -> new RowField(c.getName(), c.getDataType().getLogicalType()));
    final Stream<RowField> metadataFields = createRequiredMetadataKeys(schema, sink).stream().map(k -> new RowField(k, metadataMap.get(k).getLogicalType()));
    final List<RowField> rowFields = Stream.concat(physicalFields, metadataFields).collect(Collectors.toList());
    return new RowType(false, rowFields);
}
Also used : RowField(org.apache.flink.table.types.logical.RowType.RowField) DataType(org.apache.flink.table.types.DataType) RelDataType(org.apache.calcite.rel.type.RelDataType) RowType(org.apache.flink.table.types.logical.RowType)

Example 2 with RowField

use of org.apache.flink.table.types.logical.RowType.RowField in project flink by apache.

the class DynamicSinkUtils method validateSchemaAndApplyImplicitCast.

/**
 * Checks if the given query can be written into the given sink's table schema.
 *
 * <p>It checks whether field types are compatible (types should be equal including precisions).
 * If types are not compatible, but can be implicitly cast, a cast projection will be applied.
 * Otherwise, an exception will be thrown.
 */
public static RelNode validateSchemaAndApplyImplicitCast(RelNode query, ResolvedSchema sinkSchema, String tableDebugName, DataTypeFactory dataTypeFactory, FlinkTypeFactory typeFactory) {
    final RowType queryType = FlinkTypeFactory.toLogicalRowType(query.getRowType());
    final List<RowField> queryFields = queryType.getFields();
    final RowType sinkType = (RowType) fixSinkDataType(dataTypeFactory, sinkSchema.toSinkRowDataType()).getLogicalType();
    final List<RowField> sinkFields = sinkType.getFields();
    if (queryFields.size() != sinkFields.size()) {
        throw createSchemaMismatchException("Different number of columns.", tableDebugName, queryFields, sinkFields);
    }
    boolean requiresCasting = false;
    for (int i = 0; i < sinkFields.size(); i++) {
        final LogicalType queryColumnType = queryFields.get(i).getType();
        final LogicalType sinkColumnType = sinkFields.get(i).getType();
        if (!supportsImplicitCast(queryColumnType, sinkColumnType)) {
            throw createSchemaMismatchException(String.format("Incompatible types for sink column '%s' at position %s.", sinkFields.get(i).getName(), i), tableDebugName, queryFields, sinkFields);
        }
        if (!supportsAvoidingCast(queryColumnType, sinkColumnType)) {
            requiresCasting = true;
        }
    }
    if (requiresCasting) {
        final RelDataType castRelDataType = typeFactory.buildRelNodeRowType(sinkType);
        return RelOptUtil.createCastRel(query, castRelDataType, true);
    }
    return query;
}
Also used : RowField(org.apache.flink.table.types.logical.RowType.RowField) RowType(org.apache.flink.table.types.logical.RowType) LogicalType(org.apache.flink.table.types.logical.LogicalType) RelDataType(org.apache.calcite.rel.type.RelDataType) RelHint(org.apache.calcite.rel.hint.RelHint)

Example 3 with RowField

use of org.apache.flink.table.types.logical.RowType.RowField in project flink by apache.

the class LogicalTypeUtils method renameRowFields.

/**
 * Renames the fields of the given {@link RowType}.
 */
public static RowType renameRowFields(RowType rowType, List<String> newFieldNames) {
    Preconditions.checkArgument(rowType.getFieldCount() == newFieldNames.size(), "Row length and new names must match.");
    final List<RowField> newFields = IntStream.range(0, rowType.getFieldCount()).mapToObj(pos -> {
        final RowField oldField = rowType.getFields().get(pos);
        return new RowField(newFieldNames.get(pos), oldField.getType(), oldField.getDescription().orElse(null));
    }).collect(Collectors.toList());
    return new RowType(rowType.isNullable(), newFields);
}
Also used : RowField(org.apache.flink.table.types.logical.RowType.RowField) LocalZonedTimestampType(org.apache.flink.table.types.logical.LocalZonedTimestampType) IntStream(java.util.stream.IntStream) RowData(org.apache.flink.table.data.RowData) TimestampData(org.apache.flink.table.data.TimestampData) MapData(org.apache.flink.table.data.MapData) StructuredType(org.apache.flink.table.types.logical.StructuredType) DecimalData(org.apache.flink.table.data.DecimalData) RowType(org.apache.flink.table.types.logical.RowType) Preconditions(org.apache.flink.util.Preconditions) Collectors(java.util.stream.Collectors) StringData(org.apache.flink.table.data.StringData) TimestampType(org.apache.flink.table.types.logical.TimestampType) ArrayData(org.apache.flink.table.data.ArrayData) List(java.util.List) DistinctType(org.apache.flink.table.types.logical.DistinctType) LogicalType(org.apache.flink.table.types.logical.LogicalType) RawValueData(org.apache.flink.table.data.RawValueData) RowField(org.apache.flink.table.types.logical.RowType.RowField) Internal(org.apache.flink.annotation.Internal) ZonedTimestampType(org.apache.flink.table.types.logical.ZonedTimestampType) RowType(org.apache.flink.table.types.logical.RowType)

Example 4 with RowField

use of org.apache.flink.table.types.logical.RowType.RowField in project flink by apache.

the class DataTypes method ROW.

/**
 * Data type of a sequence of fields. A field consists of a field name, field type, and an
 * optional description. The most specific type of a row of a table is a row type. In this case,
 * each column of the row corresponds to the field of the row type that has the same ordinal
 * position as the column.
 *
 * <p>Compared to the SQL standard, an optional field description simplifies the handling with
 * complex structures.
 *
 * <p>Use {@link #FIELD(String, DataType)} or {@link #FIELD(String, DataType, String)} to
 * construct fields.
 *
 * @see RowType
 */
public static DataType ROW(Field... fields) {
    final List<RowField> logicalFields = Stream.of(fields).map(f -> Preconditions.checkNotNull(f, "Field definition must not be null.")).map(f -> new RowField(f.name, f.dataType.getLogicalType(), f.description)).collect(Collectors.toList());
    final List<DataType> fieldDataTypes = Stream.of(fields).map(f -> f.dataType).collect(Collectors.toList());
    return new FieldsDataType(new RowType(logicalFields), fieldDataTypes);
}
Also used : RowField(org.apache.flink.table.types.logical.RowType.RowField) DataType(org.apache.flink.table.types.DataType) AtomicDataType(org.apache.flink.table.types.AtomicDataType) Arrays(java.util.Arrays) BiFunction(java.util.function.BiFunction) StructuredType(org.apache.flink.table.types.logical.StructuredType) MapType(org.apache.flink.table.types.logical.MapType) CharType(org.apache.flink.table.types.logical.CharType) RawType(org.apache.flink.table.types.logical.RawType) DecimalType(org.apache.flink.table.types.logical.DecimalType) BooleanType(org.apache.flink.table.types.logical.BooleanType) RowField(org.apache.flink.table.types.logical.RowType.RowField) Map(java.util.Map) ExtractionUtils.validateStructuredClass(org.apache.flink.table.types.extraction.ExtractionUtils.validateStructuredClass) TimeType(org.apache.flink.table.types.logical.TimeType) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation) StructuredAttribute(org.apache.flink.table.types.logical.StructuredType.StructuredAttribute) LocalZonedTimestampType(org.apache.flink.table.types.logical.LocalZonedTimestampType) TypeSerializer(org.apache.flink.api.common.typeutils.TypeSerializer) DayTimeIntervalType(org.apache.flink.table.types.logical.DayTimeIntervalType) Preconditions(org.apache.flink.util.Preconditions) Collectors(java.util.stream.Collectors) NullType(org.apache.flink.table.types.logical.NullType) Objects(java.util.Objects) List(java.util.List) Stream(java.util.stream.Stream) CollectionDataType(org.apache.flink.table.types.CollectionDataType) LogicalType(org.apache.flink.table.types.logical.LogicalType) Optional(java.util.Optional) TypeConversions(org.apache.flink.table.types.utils.TypeConversions) DataTypeFactory(org.apache.flink.table.catalog.DataTypeFactory) IntStream(java.util.stream.IntStream) BinaryType(org.apache.flink.table.types.logical.BinaryType) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) KeyValueDataType(org.apache.flink.table.types.KeyValueDataType) TinyIntType(org.apache.flink.table.types.logical.TinyIntType) IntType(org.apache.flink.table.types.logical.IntType) PublicEvolving(org.apache.flink.annotation.PublicEvolving) HashMap(java.util.HashMap) FloatType(org.apache.flink.table.types.logical.FloatType) RowType(org.apache.flink.table.types.logical.RowType) TimestampType(org.apache.flink.table.types.logical.TimestampType) DoubleType(org.apache.flink.table.types.logical.DoubleType) FieldsDataType(org.apache.flink.table.types.FieldsDataType) YearMonthIntervalType(org.apache.flink.table.types.logical.YearMonthIntervalType) TypeInfoDataTypeConverter(org.apache.flink.table.types.utils.TypeInfoDataTypeConverter) Nullable(javax.annotation.Nullable) AbstractDataType(org.apache.flink.table.types.AbstractDataType) MultisetType(org.apache.flink.table.types.logical.MultisetType) BigIntType(org.apache.flink.table.types.logical.BigIntType) DateType(org.apache.flink.table.types.logical.DateType) VarCharType(org.apache.flink.table.types.logical.VarCharType) ArrayType(org.apache.flink.table.types.logical.ArrayType) SmallIntType(org.apache.flink.table.types.logical.SmallIntType) UnresolvedDataType(org.apache.flink.table.types.UnresolvedDataType) DataTypeHint(org.apache.flink.table.annotation.DataTypeHint) DayTimeResolution(org.apache.flink.table.types.logical.DayTimeIntervalType.DayTimeResolution) YearMonthResolution(org.apache.flink.table.types.logical.YearMonthIntervalType.YearMonthResolution) VarBinaryType(org.apache.flink.table.types.logical.VarBinaryType) ZonedTimestampType(org.apache.flink.table.types.logical.ZonedTimestampType) FieldsDataType(org.apache.flink.table.types.FieldsDataType) DataType(org.apache.flink.table.types.DataType) AtomicDataType(org.apache.flink.table.types.AtomicDataType) CollectionDataType(org.apache.flink.table.types.CollectionDataType) KeyValueDataType(org.apache.flink.table.types.KeyValueDataType) FieldsDataType(org.apache.flink.table.types.FieldsDataType) AbstractDataType(org.apache.flink.table.types.AbstractDataType) UnresolvedDataType(org.apache.flink.table.types.UnresolvedDataType) RowType(org.apache.flink.table.types.logical.RowType)

Example 5 with RowField

use of org.apache.flink.table.types.logical.RowType.RowField in project flink by apache.

the class LogicalTypeJsonDeserializer method deserializeRow.

private static LogicalType deserializeRow(JsonNode logicalTypeNode, SerdeContext serdeContext) {
    final ArrayNode fieldNodes = (ArrayNode) logicalTypeNode.get(FIELD_NAME_FIELDS);
    final List<RowField> fields = new ArrayList<>();
    for (JsonNode fieldNode : fieldNodes) {
        final String fieldName = fieldNode.get(FIELD_NAME_FIELD_NAME).asText();
        final LogicalType fieldType = deserialize(fieldNode.get(FIELD_NAME_FIELD_TYPE), serdeContext);
        final String fieldDescription;
        if (fieldNode.has(FIELD_NAME_FIELD_DESCRIPTION)) {
            fieldDescription = fieldNode.get(FIELD_NAME_FIELD_DESCRIPTION).asText();
        } else {
            fieldDescription = null;
        }
        fields.add(new RowField(fieldName, fieldType, fieldDescription));
    }
    return new RowType(fields);
}
Also used : RowField(org.apache.flink.table.types.logical.RowType.RowField) ArrayList(java.util.ArrayList) LogicalType(org.apache.flink.table.types.logical.LogicalType) RowType(org.apache.flink.table.types.logical.RowType) JsonNode(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode) ArrayNode(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ArrayNode)

Aggregations

RowType (org.apache.flink.table.types.logical.RowType)7 RowField (org.apache.flink.table.types.logical.RowType.RowField)7 LogicalType (org.apache.flink.table.types.logical.LogicalType)5 List (java.util.List)3 Collectors (java.util.stream.Collectors)3 IntStream (java.util.stream.IntStream)3 RelDataType (org.apache.calcite.rel.type.RelDataType)3 DataType (org.apache.flink.table.types.DataType)3 LocalZonedTimestampType (org.apache.flink.table.types.logical.LocalZonedTimestampType)3 StructuredType (org.apache.flink.table.types.logical.StructuredType)3 TimestampType (org.apache.flink.table.types.logical.TimestampType)3 ZonedTimestampType (org.apache.flink.table.types.logical.ZonedTimestampType)3 Preconditions (org.apache.flink.util.Preconditions)3 Internal (org.apache.flink.annotation.Internal)2 ArrayData (org.apache.flink.table.data.ArrayData)2 DecimalData (org.apache.flink.table.data.DecimalData)2 MapData (org.apache.flink.table.data.MapData)2 RawValueData (org.apache.flink.table.data.RawValueData)2 RowData (org.apache.flink.table.data.RowData)2 StringData (org.apache.flink.table.data.StringData)2