Search in sources :

Example 31 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class MergeTableLikeUtilTest method mergeConstraintsFromDerivedTable.

@Test
public void mergeConstraintsFromDerivedTable() {
    TableSchema sourceSchema = TableSchema.builder().add(TableColumn.physical("one", DataTypes.INT().notNull())).add(TableColumn.physical("two", DataTypes.STRING().notNull())).add(TableColumn.physical("three", DataTypes.FLOAT())).build();
    TableSchema mergedSchema = util.mergeTables(getDefaultMergingStrategies(), sourceSchema, Collections.emptyList(), Collections.emptyList(), primaryKey("one", "two"));
    TableSchema expectedSchema = TableSchema.builder().add(TableColumn.physical("one", DataTypes.INT().notNull())).add(TableColumn.physical("two", DataTypes.STRING().notNull())).add(TableColumn.physical("three", DataTypes.FLOAT())).primaryKey("PK_3531879", new String[] { "one", "two" }).build();
    assertThat(mergedSchema, equalTo(expectedSchema));
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) Test(org.junit.Test)

Example 32 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class MergeTableLikeUtilTest method mergeOverwritingWatermarksDuplicate.

@Test
public void mergeOverwritingWatermarksDuplicate() {
    TableSchema sourceSchema = TableSchema.builder().add(TableColumn.physical("one", DataTypes.INT())).add(TableColumn.physical("timestamp", DataTypes.TIMESTAMP())).watermark("timestamp", "timestamp - INTERVAL '5' SECOND", DataTypes.TIMESTAMP()).build();
    List<SqlWatermark> derivedWatermarkSpecs = Collections.singletonList(new SqlWatermark(SqlParserPos.ZERO, identifier("timestamp"), boundedStrategy("timestamp", "10")));
    Map<FeatureOption, MergingStrategy> mergingStrategies = getDefaultMergingStrategies();
    mergingStrategies.put(FeatureOption.WATERMARKS, MergingStrategy.OVERWRITING);
    TableSchema mergedSchema = util.mergeTables(mergingStrategies, sourceSchema, Collections.emptyList(), derivedWatermarkSpecs, null);
    TableSchema expectedSchema = TableSchema.builder().add(TableColumn.physical("one", DataTypes.INT())).add(TableColumn.physical("timestamp", DataTypes.TIMESTAMP())).watermark("timestamp", "`timestamp` - INTERVAL '10' SECOND", DataTypes.TIMESTAMP()).build();
    assertThat(mergedSchema, equalTo(expectedSchema));
}
Also used : FeatureOption(org.apache.flink.sql.parser.ddl.SqlTableLike.FeatureOption) TableSchema(org.apache.flink.table.api.TableSchema) SqlWatermark(org.apache.flink.sql.parser.ddl.SqlWatermark) MergingStrategy(org.apache.flink.sql.parser.ddl.SqlTableLike.MergingStrategy) Test(org.junit.Test)

Example 33 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class MergeTableLikeUtilTest method mergeConstraintsFromBaseTable.

@Test
public void mergeConstraintsFromBaseTable() {
    TableSchema sourceSchema = TableSchema.builder().add(TableColumn.physical("one", DataTypes.INT().notNull())).add(TableColumn.physical("two", DataTypes.STRING().notNull())).add(TableColumn.physical("three", DataTypes.FLOAT())).primaryKey("constraint-42", new String[] { "one", "two" }).build();
    TableSchema mergedSchema = util.mergeTables(getDefaultMergingStrategies(), sourceSchema, Collections.emptyList(), Collections.emptyList(), null);
    TableSchema expectedSchema = TableSchema.builder().add(TableColumn.physical("one", DataTypes.INT().notNull())).add(TableColumn.physical("two", DataTypes.STRING().notNull())).add(TableColumn.physical("three", DataTypes.FLOAT())).primaryKey("constraint-42", new String[] { "one", "two" }).build();
    assertThat(mergedSchema, equalTo(expectedSchema));
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) Test(org.junit.Test)

Example 34 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class SchemaValidator method deriveFieldMapping.

/**
 * Finds a table source field mapping.
 *
 * @param properties The properties describing a schema.
 * @param inputType The input type that a connector and/or format produces. This parameter can
 *     be used to resolve a rowtime field against an input field.
 */
public static Map<String, String> deriveFieldMapping(DescriptorProperties properties, Optional<TypeInformation<?>> inputType) {
    Map<String, String> mapping = new HashMap<>();
    TableSchema schema = properties.getTableSchema(SCHEMA);
    List<String> columnNames = new ArrayList<>();
    inputType.ifPresent(t -> columnNames.addAll(Arrays.asList(((CompositeType) t).getFieldNames())));
    // add all source fields first because rowtime might reference one of them
    columnNames.forEach(name -> mapping.put(name, name));
    // add all schema fields first for implicit mappings
    Arrays.stream(schema.getFieldNames()).forEach(name -> mapping.put(name, name));
    Map<String, String> names = properties.getIndexedProperty(SCHEMA, SCHEMA_NAME);
    for (int i = 0; i < names.size(); i++) {
        String name = properties.getString(SCHEMA + "." + i + "." + SCHEMA_NAME);
        Optional<String> source = properties.getOptionalString(SCHEMA + "." + i + "." + SCHEMA_FROM);
        if (source.isPresent()) {
            // add explicit mapping
            mapping.put(name, source.get());
        } else {
            // implicit mapping or time
            boolean isProctime = properties.getOptionalBoolean(SCHEMA + "." + i + "." + SCHEMA_PROCTIME).orElse(false);
            boolean isRowtime = properties.containsKey(SCHEMA + "." + i + "." + ROWTIME_TIMESTAMPS_TYPE);
            boolean isGeneratedColumn = properties.containsKey(SCHEMA + "." + i + "." + EXPR);
            // remove proctime/rowtime from mapping
            if (isProctime || isRowtime || isGeneratedColumn) {
                mapping.remove(name);
            } else // check for invalid fields
            if (!columnNames.contains(name)) {
                throw new ValidationException(format("Could not map the schema field '%s' to a field " + "from source. Please specify the source field from which it can be derived.", name));
            }
        }
    }
    return mapping;
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) TableSchema(org.apache.flink.table.api.TableSchema) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList)

Example 35 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class CatalogTableImpTest method testFromProperties.

@Test
public void testFromProperties() {
    TableSchema schema = createTableSchema();
    Map<String, String> prop = createProperties();
    CatalogTable table = new CatalogTableImpl(schema, createPartitionKeys(), prop, TEST);
    CatalogTableImpl tableFromProperties = CatalogTableImpl.fromProperties(table.toProperties());
    assertEquals(tableFromProperties.getOptions(), table.getOptions());
    assertEquals(tableFromProperties.getPartitionKeys(), table.getPartitionKeys());
    assertEquals(tableFromProperties.getSchema(), table.getSchema());
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) Test(org.junit.Test)

Aggregations

TableSchema (org.apache.flink.table.api.TableSchema)86 Test (org.junit.Test)54 HashMap (java.util.HashMap)26 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)21 SqlNode (org.apache.calcite.sql.SqlNode)19 ObjectPath (org.apache.flink.table.catalog.ObjectPath)19 CatalogTable (org.apache.flink.table.catalog.CatalogTable)18 DataType (org.apache.flink.table.types.DataType)16 ValidationException (org.apache.flink.table.api.ValidationException)14 TableColumn (org.apache.flink.table.api.TableColumn)10 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)10 ArrayList (java.util.ArrayList)9 List (java.util.List)9 Map (java.util.Map)9 FeatureOption (org.apache.flink.sql.parser.ddl.SqlTableLike.FeatureOption)9 MergingStrategy (org.apache.flink.sql.parser.ddl.SqlTableLike.MergingStrategy)9 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)8 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)8 Arrays (java.util.Arrays)7 Configuration (org.apache.flink.configuration.Configuration)7