use of org.apache.flink.table.types.logical.RowType.RowField in project flink by apache.
the class DynamicSinkUtils method createConsumedType.
/**
* Returns the {@link DataType} that a sink should consume as the output from the runtime.
*
* <p>The format looks as follows: {@code PHYSICAL COLUMNS + PERSISTED METADATA COLUMNS}
*/
private static RowType createConsumedType(ResolvedSchema schema, DynamicTableSink sink) {
final Map<String, DataType> metadataMap = extractMetadataMap(sink);
final Stream<RowField> physicalFields = schema.getColumns().stream().filter(Column::isPhysical).map(c -> new RowField(c.getName(), c.getDataType().getLogicalType()));
final Stream<RowField> metadataFields = createRequiredMetadataKeys(schema, sink).stream().map(k -> new RowField(k, metadataMap.get(k).getLogicalType()));
final List<RowField> rowFields = Stream.concat(physicalFields, metadataFields).collect(Collectors.toList());
return new RowType(false, rowFields);
}
use of org.apache.flink.table.types.logical.RowType.RowField in project flink by apache.
the class DynamicSinkUtils method validateSchemaAndApplyImplicitCast.
/**
* Checks if the given query can be written into the given sink's table schema.
*
* <p>It checks whether field types are compatible (types should be equal including precisions).
* If types are not compatible, but can be implicitly cast, a cast projection will be applied.
* Otherwise, an exception will be thrown.
*/
public static RelNode validateSchemaAndApplyImplicitCast(RelNode query, ResolvedSchema sinkSchema, String tableDebugName, DataTypeFactory dataTypeFactory, FlinkTypeFactory typeFactory) {
final RowType queryType = FlinkTypeFactory.toLogicalRowType(query.getRowType());
final List<RowField> queryFields = queryType.getFields();
final RowType sinkType = (RowType) fixSinkDataType(dataTypeFactory, sinkSchema.toSinkRowDataType()).getLogicalType();
final List<RowField> sinkFields = sinkType.getFields();
if (queryFields.size() != sinkFields.size()) {
throw createSchemaMismatchException("Different number of columns.", tableDebugName, queryFields, sinkFields);
}
boolean requiresCasting = false;
for (int i = 0; i < sinkFields.size(); i++) {
final LogicalType queryColumnType = queryFields.get(i).getType();
final LogicalType sinkColumnType = sinkFields.get(i).getType();
if (!supportsImplicitCast(queryColumnType, sinkColumnType)) {
throw createSchemaMismatchException(String.format("Incompatible types for sink column '%s' at position %s.", sinkFields.get(i).getName(), i), tableDebugName, queryFields, sinkFields);
}
if (!supportsAvoidingCast(queryColumnType, sinkColumnType)) {
requiresCasting = true;
}
}
if (requiresCasting) {
final RelDataType castRelDataType = typeFactory.buildRelNodeRowType(sinkType);
return RelOptUtil.createCastRel(query, castRelDataType, true);
}
return query;
}
use of org.apache.flink.table.types.logical.RowType.RowField in project flink by apache.
the class LogicalTypeUtils method renameRowFields.
/**
* Renames the fields of the given {@link RowType}.
*/
public static RowType renameRowFields(RowType rowType, List<String> newFieldNames) {
Preconditions.checkArgument(rowType.getFieldCount() == newFieldNames.size(), "Row length and new names must match.");
final List<RowField> newFields = IntStream.range(0, rowType.getFieldCount()).mapToObj(pos -> {
final RowField oldField = rowType.getFields().get(pos);
return new RowField(newFieldNames.get(pos), oldField.getType(), oldField.getDescription().orElse(null));
}).collect(Collectors.toList());
return new RowType(rowType.isNullable(), newFields);
}
use of org.apache.flink.table.types.logical.RowType.RowField in project flink by apache.
the class DataTypes method ROW.
/**
* Data type of a sequence of fields. A field consists of a field name, field type, and an
* optional description. The most specific type of a row of a table is a row type. In this case,
* each column of the row corresponds to the field of the row type that has the same ordinal
* position as the column.
*
* <p>Compared to the SQL standard, an optional field description simplifies the handling with
* complex structures.
*
* <p>Use {@link #FIELD(String, DataType)} or {@link #FIELD(String, DataType, String)} to
* construct fields.
*
* @see RowType
*/
public static DataType ROW(Field... fields) {
final List<RowField> logicalFields = Stream.of(fields).map(f -> Preconditions.checkNotNull(f, "Field definition must not be null.")).map(f -> new RowField(f.name, f.dataType.getLogicalType(), f.description)).collect(Collectors.toList());
final List<DataType> fieldDataTypes = Stream.of(fields).map(f -> f.dataType).collect(Collectors.toList());
return new FieldsDataType(new RowType(logicalFields), fieldDataTypes);
}
use of org.apache.flink.table.types.logical.RowType.RowField in project flink by apache.
the class LogicalTypeJsonDeserializer method deserializeRow.
private static LogicalType deserializeRow(JsonNode logicalTypeNode, SerdeContext serdeContext) {
final ArrayNode fieldNodes = (ArrayNode) logicalTypeNode.get(FIELD_NAME_FIELDS);
final List<RowField> fields = new ArrayList<>();
for (JsonNode fieldNode : fieldNodes) {
final String fieldName = fieldNode.get(FIELD_NAME_FIELD_NAME).asText();
final LogicalType fieldType = deserialize(fieldNode.get(FIELD_NAME_FIELD_TYPE), serdeContext);
final String fieldDescription;
if (fieldNode.has(FIELD_NAME_FIELD_DESCRIPTION)) {
fieldDescription = fieldNode.get(FIELD_NAME_FIELD_DESCRIPTION).asText();
} else {
fieldDescription = null;
}
fields.add(new RowField(fieldName, fieldType, fieldDescription));
}
return new RowType(fields);
}
Aggregations