Search in sources :

Example 1 with UnresolvedColumn

use of org.apache.flink.table.api.Schema.UnresolvedColumn in project flink by apache.

the class SchemaTranslator method createProducingResult.

/**
 * Converts the given {@link DataType} into the final {@link ProducingResult}.
 *
 * <p>This method serves three types of use cases:
 *
 * <ul>
 *   <li>1. Derive physical columns from the input schema.
 *   <li>2. Derive physical columns from the input schema but enrich with metadata column and
 *       primary key.
 *   <li>3. Entirely use declared schema.
 * </ul>
 */
public static ProducingResult createProducingResult(ResolvedSchema inputSchema, @Nullable Schema declaredSchema) {
    // the schema will be entirely derived from the input
    if (declaredSchema == null) {
        // go through data type to erase time attributes
        final DataType physicalDataType = inputSchema.toSourceRowDataType();
        final Schema schema = Schema.newBuilder().fromRowDataType(physicalDataType).build();
        return new ProducingResult(null, schema, null);
    }
    final List<UnresolvedColumn> declaredColumns = declaredSchema.getColumns();
    // thus, it only replaces physical columns with metadata rowtime or adds a primary key
    if (declaredColumns.stream().noneMatch(SchemaTranslator::isPhysical)) {
        // go through data type to erase time attributes
        final DataType sourceDataType = inputSchema.toSourceRowDataType();
        final DataType physicalDataType = patchDataTypeWithoutMetadataRowtime(sourceDataType, declaredColumns);
        final Schema.Builder builder = Schema.newBuilder();
        builder.fromRowDataType(physicalDataType);
        builder.fromSchema(declaredSchema);
        return new ProducingResult(null, builder.build(), null);
    }
    return new ProducingResult(null, declaredSchema, null);
}
Also used : UnresolvedColumn(org.apache.flink.table.api.Schema.UnresolvedColumn) Schema(org.apache.flink.table.api.Schema) DataType(org.apache.flink.table.types.DataType) AbstractDataType(org.apache.flink.table.types.AbstractDataType)

Example 2 with UnresolvedColumn

use of org.apache.flink.table.api.Schema.UnresolvedColumn in project flink by apache.

the class SchemaTranslator method createConsumingResult.

/**
 * Converts the given {@link DataType} and an optional declared {@link Schema} (possibly
 * incomplete) into the final {@link ConsumingResult}.
 *
 * <p>This method serves three types of use cases:
 *
 * <ul>
 *   <li>1. Derive physical columns from the input data type.
 *   <li>2. Derive physical columns but merge them with declared computed columns and other
 *       schema information.
 *   <li>3. Derive and enrich physical columns and merge other schema information (only if
 *       {@param mergePhysicalSchema} is set to {@code true}).
 * </ul>
 */
public static ConsumingResult createConsumingResult(DataTypeFactory dataTypeFactory, DataType inputDataType, @Nullable Schema declaredSchema, boolean mergePhysicalSchema) {
    final LogicalType inputType = inputDataType.getLogicalType();
    // we don't allow modifying the number of columns during enrichment, therefore we preserve
    // whether the original type was qualified as a top-level record or not
    final boolean isTopLevelRecord = LogicalTypeChecks.isCompositeType(inputType);
    // the schema will be entirely derived from the input
    if (declaredSchema == null) {
        final Schema.Builder builder = Schema.newBuilder();
        addPhysicalSourceDataTypeFields(builder, inputDataType, null);
        return new ConsumingResult(inputDataType, isTopLevelRecord, builder.build(), null);
    }
    final List<UnresolvedColumn> declaredColumns = declaredSchema.getColumns();
    final UnresolvedPrimaryKey declaredPrimaryKey = declaredSchema.getPrimaryKey().orElse(null);
    // thus, it only enriches the non-physical column parts
    if (declaredColumns.stream().noneMatch(SchemaTranslator::isPhysical)) {
        final Schema.Builder builder = Schema.newBuilder();
        addPhysicalSourceDataTypeFields(builder, inputDataType, declaredPrimaryKey);
        builder.fromSchema(declaredSchema);
        return new ConsumingResult(inputDataType, isTopLevelRecord, builder.build(), null);
    }
    if (!mergePhysicalSchema) {
        return new ConsumingResult(inputDataType, isTopLevelRecord, declaredSchema, null);
    }
    // the declared schema enriches the physical data type and the derived schema,
    // it possibly projects the result
    final DataType patchedDataType = patchDataTypeFromDeclaredSchema(dataTypeFactory, inputDataType, declaredColumns);
    final Schema patchedSchema = createPatchedSchema(isTopLevelRecord, patchedDataType, declaredSchema);
    final List<String> projections = extractProjections(patchedSchema, declaredSchema);
    return new ConsumingResult(patchedDataType, isTopLevelRecord, patchedSchema, projections);
}
Also used : UnresolvedPrimaryKey(org.apache.flink.table.api.Schema.UnresolvedPrimaryKey) UnresolvedColumn(org.apache.flink.table.api.Schema.UnresolvedColumn) Schema(org.apache.flink.table.api.Schema) LogicalType(org.apache.flink.table.types.logical.LogicalType) DataType(org.apache.flink.table.types.DataType) AbstractDataType(org.apache.flink.table.types.AbstractDataType)

Aggregations

Schema (org.apache.flink.table.api.Schema)2 UnresolvedColumn (org.apache.flink.table.api.Schema.UnresolvedColumn)2 AbstractDataType (org.apache.flink.table.types.AbstractDataType)2 DataType (org.apache.flink.table.types.DataType)2 UnresolvedPrimaryKey (org.apache.flink.table.api.Schema.UnresolvedPrimaryKey)1 LogicalType (org.apache.flink.table.types.logical.LogicalType)1