Search in sources :

Example 1 with WatermarkSpec

use of org.apache.flink.table.api.WatermarkSpec in project flink by apache.

the class DescriptorProperties method putTableSchema.

/**
 * Adds a table schema under the given key.
 */
public void putTableSchema(String key, TableSchema schema) {
    checkNotNull(key);
    checkNotNull(schema);
    final String[] fieldNames = schema.getFieldNames();
    final DataType[] fieldTypes = schema.getFieldDataTypes();
    final String[] fieldExpressions = schema.getTableColumns().stream().map(column -> {
        if (column instanceof ComputedColumn) {
            return ((ComputedColumn) column).getExpression();
        }
        return null;
    }).toArray(String[]::new);
    final String[] fieldMetadata = schema.getTableColumns().stream().map(column -> {
        if (column instanceof MetadataColumn) {
            return ((MetadataColumn) column).getMetadataAlias().orElse(column.getName());
        }
        return null;
    }).toArray(String[]::new);
    final String[] fieldVirtual = schema.getTableColumns().stream().map(column -> {
        if (column instanceof MetadataColumn) {
            return Boolean.toString(((MetadataColumn) column).isVirtual());
        }
        return null;
    }).toArray(String[]::new);
    final List<List<String>> values = new ArrayList<>();
    for (int i = 0; i < schema.getFieldCount(); i++) {
        values.add(Arrays.asList(fieldNames[i], fieldTypes[i].getLogicalType().asSerializableString(), fieldExpressions[i], fieldMetadata[i], fieldVirtual[i]));
    }
    putIndexedOptionalProperties(key, Arrays.asList(NAME, DATA_TYPE, EXPR, METADATA, VIRTUAL), values);
    if (!schema.getWatermarkSpecs().isEmpty()) {
        final List<List<String>> watermarkValues = new ArrayList<>();
        for (WatermarkSpec spec : schema.getWatermarkSpecs()) {
            watermarkValues.add(Arrays.asList(spec.getRowtimeAttribute(), spec.getWatermarkExpr(), spec.getWatermarkExprOutputType().getLogicalType().asSerializableString()));
        }
        putIndexedFixedProperties(key + '.' + WATERMARK, Arrays.asList(WATERMARK_ROWTIME, WATERMARK_STRATEGY_EXPR, WATERMARK_STRATEGY_DATA_TYPE), watermarkValues);
    }
    schema.getPrimaryKey().ifPresent(pk -> {
        putString(key + '.' + PRIMARY_KEY_NAME, pk.getName());
        putString(key + '.' + PRIMARY_KEY_COLUMNS, String.join(",", pk.getColumns()));
    });
}
Also used : DynamicTableFactory(org.apache.flink.table.factories.DynamicTableFactory) IntStream(java.util.stream.IntStream) DataType(org.apache.flink.table.types.DataType) Arrays(java.util.Arrays) EncodingUtils(org.apache.flink.table.utils.EncodingUtils) MetadataColumn(org.apache.flink.table.api.TableColumn.MetadataColumn) HashMap(java.util.HashMap) RowTypeInfo(org.apache.flink.api.java.typeutils.RowTypeInfo) Function(java.util.function.Function) Supplier(java.util.function.Supplier) MemorySize(org.apache.flink.configuration.MemorySize) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) BigDecimal(java.math.BigDecimal) InstantiationUtil(org.apache.flink.util.InstantiationUtil) Matcher(java.util.regex.Matcher) TableColumn(org.apache.flink.table.api.TableColumn) Duration(java.time.Duration) Map(java.util.Map) ConfigOption(org.apache.flink.configuration.ConfigOption) Preconditions.checkNotNull(org.apache.flink.util.Preconditions.checkNotNull) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation) ComputedColumn(org.apache.flink.table.api.TableColumn.ComputedColumn) CatalogPropertiesUtil(org.apache.flink.table.catalog.CatalogPropertiesUtil) TableException(org.apache.flink.table.api.TableException) WatermarkSpec(org.apache.flink.table.api.WatermarkSpec) Set(java.util.Set) TableSchema(org.apache.flink.table.api.TableSchema) Preconditions(org.apache.flink.util.Preconditions) Collectors(java.util.stream.Collectors) TypeStringUtils(org.apache.flink.table.utils.TypeStringUtils) Objects(java.util.Objects) Consumer(java.util.function.Consumer) List(java.util.List) TimeUtils(org.apache.flink.util.TimeUtils) Stream(java.util.stream.Stream) LogicalType(org.apache.flink.table.types.logical.LogicalType) ValidationException(org.apache.flink.table.api.ValidationException) LogicalTypeParser(org.apache.flink.table.types.logical.utils.LogicalTypeParser) Optional(java.util.Optional) Internal(org.apache.flink.annotation.Internal) TypeConversions(org.apache.flink.table.types.utils.TypeConversions) Pattern(java.util.regex.Pattern) LogicalTypeRoot(org.apache.flink.table.types.logical.LogicalTypeRoot) Collections(java.util.Collections) MetadataColumn(org.apache.flink.table.api.TableColumn.MetadataColumn) WatermarkSpec(org.apache.flink.table.api.WatermarkSpec) ArrayList(java.util.ArrayList) DataType(org.apache.flink.table.types.DataType) ComputedColumn(org.apache.flink.table.api.TableColumn.ComputedColumn) ArrayList(java.util.ArrayList) List(java.util.List)

Example 2 with WatermarkSpec

use of org.apache.flink.table.api.WatermarkSpec in project flink by apache.

the class TestValuesTableFactory method validateAndExtractRowtimeIndex.

private static int validateAndExtractRowtimeIndex(CatalogTable sinkTable, boolean dropLateEvent, boolean isInsertOnly) {
    if (!dropLateEvent) {
        return -1;
    } else if (!isInsertOnly) {
        throw new ValidationException("Option 'sink.drop-late-event' only works for insert-only sink now.");
    }
    TableSchema schema = sinkTable.getSchema();
    List<WatermarkSpec> watermarkSpecs = schema.getWatermarkSpecs();
    if (watermarkSpecs.size() == 0) {
        throw new ValidationException("Please define the watermark in the schema that is used to indicate the rowtime column. " + "The sink function will compare the rowtime and the current watermark to determine whether the event is late.");
    }
    String rowtimeName = watermarkSpecs.get(0).getRowtimeAttribute();
    return Arrays.asList(schema.getFieldNames()).indexOf(rowtimeName);
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) TableSchema(org.apache.flink.table.api.TableSchema) WatermarkSpec(org.apache.flink.table.api.WatermarkSpec)

Example 3 with WatermarkSpec

use of org.apache.flink.table.api.WatermarkSpec in project flink by apache.

the class TableSchemaUtils method builderWithGivenSchema.

/**
 * Creates a builder with given table schema.
 *
 * @param oriSchema Original schema
 * @return the builder with all the information from the given schema
 */
public static TableSchema.Builder builderWithGivenSchema(TableSchema oriSchema) {
    TableSchema.Builder builder = builderWithGivenColumns(oriSchema.getTableColumns());
    // Copy watermark specification.
    for (WatermarkSpec wms : oriSchema.getWatermarkSpecs()) {
        builder.watermark(wms.getRowtimeAttribute(), wms.getWatermarkExpr(), wms.getWatermarkExprOutputType());
    }
    // Copy primary key constraint.
    oriSchema.getPrimaryKey().map(pk -> builder.primaryKey(pk.getName(), pk.getColumns().toArray(new String[0])));
    return builder;
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) WatermarkSpec(org.apache.flink.table.api.WatermarkSpec)

Example 4 with WatermarkSpec

use of org.apache.flink.table.api.WatermarkSpec in project flink by apache.

the class TableSchemaUtils method dropConstraint.

/**
 * Creates a new schema but drop the constraint with given name.
 */
public static TableSchema dropConstraint(TableSchema oriSchema, String constraintName) {
    // Validate the constraint name is valid.
    Optional<UniqueConstraint> uniqueConstraintOpt = oriSchema.getPrimaryKey();
    if (!uniqueConstraintOpt.isPresent() || !uniqueConstraintOpt.get().getName().equals(constraintName)) {
        throw new ValidationException(String.format("Constraint %s to drop does not exist", constraintName));
    }
    TableSchema.Builder builder = builderWithGivenColumns(oriSchema.getTableColumns());
    // Copy watermark specification.
    for (WatermarkSpec wms : oriSchema.getWatermarkSpecs()) {
        builder.watermark(wms.getRowtimeAttribute(), wms.getWatermarkExpr(), wms.getWatermarkExprOutputType());
    }
    return builder.build();
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) TableSchema(org.apache.flink.table.api.TableSchema) WatermarkSpec(org.apache.flink.table.api.WatermarkSpec) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint)

Aggregations

TableSchema (org.apache.flink.table.api.TableSchema)4 WatermarkSpec (org.apache.flink.table.api.WatermarkSpec)4 ValidationException (org.apache.flink.table.api.ValidationException)3 BigDecimal (java.math.BigDecimal)1 Duration (java.time.Duration)1 ArrayList (java.util.ArrayList)1 Arrays (java.util.Arrays)1 Collections (java.util.Collections)1 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1 List (java.util.List)1 Map (java.util.Map)1 Objects (java.util.Objects)1 Optional (java.util.Optional)1 Set (java.util.Set)1 Consumer (java.util.function.Consumer)1 Function (java.util.function.Function)1 Supplier (java.util.function.Supplier)1 Matcher (java.util.regex.Matcher)1 Pattern (java.util.regex.Pattern)1