Search in sources :

Example 16 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class TableFormatFactoryBaseTest method testSchemaDerivation.

@Test
public void testSchemaDerivation() {
    final Map<String, String> properties = new HashMap<>();
    properties.put("schema.0.name", "otherField");
    properties.put("schema.0.type", "VARCHAR");
    properties.put("schema.0.from", "csvField");
    properties.put("schema.1.name", "abcField");
    properties.put("schema.1.type", "VARCHAR");
    properties.put("schema.2.name", "p");
    properties.put("schema.2.type", "TIMESTAMP");
    properties.put("schema.2.proctime", "true");
    properties.put("schema.3.name", "r");
    properties.put("schema.3.type", "TIMESTAMP");
    properties.put("schema.3.rowtime.timestamps.type", "from-source");
    properties.put("schema.3.rowtime.watermarks.type", "from-source");
    final TableSchema actualSchema = TableFormatFactoryBase.deriveSchema(properties);
    final TableSchema expectedSchema = TableSchema.builder().field("csvField", // aliased
    Types.STRING).field("abcField", Types.STRING).build();
    assertEquals(expectedSchema, actualSchema);
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) HashMap(java.util.HashMap) Test(org.junit.Test)

Example 17 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class CatalogTableImpl method fromProperties.

/**
 * Construct a {@link CatalogTableImpl} from complete properties that contains table schema.
 */
public static CatalogTableImpl fromProperties(Map<String, String> properties) {
    DescriptorProperties descriptorProperties = new DescriptorProperties(false);
    descriptorProperties.putProperties(properties);
    TableSchema tableSchema = descriptorProperties.getTableSchema(SCHEMA);
    List<String> partitionKeys = descriptorProperties.getPartitionKeys();
    return new CatalogTableImpl(tableSchema, partitionKeys, removeRedundant(properties, tableSchema, partitionKeys), "");
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) DescriptorProperties(org.apache.flink.table.descriptors.DescriptorProperties)

Example 18 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class TableFormatFactoryBase method deriveSchema.

// --------------------------------------------------------------------------------------------
/**
 * Finds the table schema that can be used for a format schema (without time attributes and
 * generated columns).
 */
public static TableSchema deriveSchema(Map<String, String> properties) {
    final DescriptorProperties descriptorProperties = new DescriptorProperties();
    descriptorProperties.putProperties(properties);
    final TableSchema.Builder builder = TableSchema.builder();
    final TableSchema tableSchema = descriptorProperties.getTableSchema(SCHEMA);
    for (int i = 0; i < tableSchema.getFieldCount(); i++) {
        final TableColumn tableColumn = tableSchema.getTableColumns().get(i);
        final String fieldName = tableColumn.getName();
        final DataType dataType = tableColumn.getType();
        if (!tableColumn.isPhysical()) {
            // skip non-physical columns
            continue;
        }
        final boolean isProctime = descriptorProperties.getOptionalBoolean(SCHEMA + '.' + i + '.' + SCHEMA_PROCTIME).orElse(false);
        final String timestampKey = SCHEMA + '.' + i + '.' + ROWTIME_TIMESTAMPS_TYPE;
        final boolean isRowtime = descriptorProperties.containsKey(timestampKey);
        if (!isProctime && !isRowtime) {
            // check for aliasing
            final String aliasName = descriptorProperties.getOptionalString(SCHEMA + '.' + i + '.' + SCHEMA_FROM).orElse(fieldName);
            builder.field(aliasName, dataType);
        } else // only use the rowtime attribute if it references a field
        if (isRowtime && descriptorProperties.isValue(timestampKey, ROWTIME_TIMESTAMPS_TYPE_VALUE_FROM_FIELD)) {
            final String aliasName = descriptorProperties.getString(SCHEMA + '.' + i + '.' + ROWTIME_TIMESTAMPS_FROM);
            builder.field(aliasName, dataType);
        }
    }
    return builder.build();
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) DescriptorProperties(org.apache.flink.table.descriptors.DescriptorProperties) DataType(org.apache.flink.table.types.DataType) TableColumn(org.apache.flink.table.api.TableColumn)

Example 19 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class CatalogSchemaTable method getRowType.

@Override
public RelDataType getRowType(RelDataTypeFactory typeFactory) {
    final FlinkTypeFactory flinkTypeFactory = (FlinkTypeFactory) typeFactory;
    final ResolvedSchema schema = contextResolvedTable.getResolvedSchema();
    // The following block is a workaround to support tables defined by
    // TableEnvironment.connect() and
    // the actual table sources implement DefinedProctimeAttribute/DefinedRowtimeAttributes.
    // It should be removed after we remove DefinedProctimeAttribute/DefinedRowtimeAttributes.
    Optional<TableSource<?>> sourceOpt = findAndCreateTableSource();
    if (isStreamingMode && sourceOpt.isPresent() && schema.getColumns().stream().allMatch(Column::isPhysical) && schema.getWatermarkSpecs().isEmpty()) {
        TableSchema tableSchema = TableSchema.fromResolvedSchema(schema);
        TableSource<?> source = sourceOpt.get();
        if (TableSourceValidation.hasProctimeAttribute(source) || TableSourceValidation.hasRowtimeAttribute(source)) {
            // If the table is defined by TableEnvironment.connect(), and use the legacy
            // proctime and rowtime
            // descriptors, the TableSchema should fallback to
            // ConnectorCatalogTable#calculateSourceSchema
            tableSchema = ConnectorCatalogTable.calculateSourceSchema(source, false);
        }
        return TableSourceUtil.getSourceRowType(flinkTypeFactory, tableSchema, scala.Option.empty(), true);
    }
    final List<String> fieldNames = schema.getColumnNames();
    final List<LogicalType> fieldTypes = schema.getColumnDataTypes().stream().map(DataType::getLogicalType).map(PlannerTypeUtils::removeLegacyTypes).collect(Collectors.toList());
    return flinkTypeFactory.buildRelNodeRowType(fieldNames, fieldTypes);
}
Also used : TableSource(org.apache.flink.table.sources.TableSource) StreamTableSource(org.apache.flink.table.sources.StreamTableSource) FlinkTypeFactory(org.apache.flink.table.planner.calcite.FlinkTypeFactory) TableSchema(org.apache.flink.table.api.TableSchema) Column(org.apache.flink.table.catalog.Column) LogicalType(org.apache.flink.table.types.logical.LogicalType) DataType(org.apache.flink.table.types.DataType) RelDataType(org.apache.calcite.rel.type.RelDataType) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema)

Example 20 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class SqlToOperationConverterTest method testCreateTableWithMetadataColumn.

@Test
public void testCreateTableWithMetadataColumn() {
    final String sql = "CREATE TABLE tbl1 (\n" + "  a INT,\n" + "  b STRING,\n" + "  c INT METADATA,\n" + "  d INT METADATA FROM 'other.key',\n" + "  e INT METADATA VIRTUAL\n" + ")\n" + "  WITH (\n" + "    'connector' = 'kafka',\n" + "    'kafka.topic' = 'log.test'\n" + ")\n";
    final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
    final Operation operation = parse(sql, planner, getParserBySqlDialect(SqlDialect.DEFAULT));
    assertThat(operation).isInstanceOf(CreateTableOperation.class);
    final CreateTableOperation op = (CreateTableOperation) operation;
    final TableSchema actualSchema = op.getCatalogTable().getSchema();
    final TableSchema expectedSchema = TableSchema.builder().add(TableColumn.physical("a", DataTypes.INT())).add(TableColumn.physical("b", DataTypes.STRING())).add(TableColumn.metadata("c", DataTypes.INT())).add(TableColumn.metadata("d", DataTypes.INT(), "other.key")).add(TableColumn.metadata("e", DataTypes.INT(), true)).build();
    assertThat(actualSchema).isEqualTo(expectedSchema);
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) FlinkPlannerImpl(org.apache.flink.table.planner.calcite.FlinkPlannerImpl) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) ShowJarsOperation(org.apache.flink.table.operations.command.ShowJarsOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) EndStatementSetOperation(org.apache.flink.table.operations.EndStatementSetOperation) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) SetOperation(org.apache.flink.table.operations.command.SetOperation) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) Operation(org.apache.flink.table.operations.Operation) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) RemoveJarOperation(org.apache.flink.table.operations.command.RemoveJarOperation) BeginStatementSetOperation(org.apache.flink.table.operations.BeginStatementSetOperation) AddJarOperation(org.apache.flink.table.operations.command.AddJarOperation) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) ResetOperation(org.apache.flink.table.operations.command.ResetOperation) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) Test(org.junit.Test)

Aggregations

TableSchema (org.apache.flink.table.api.TableSchema)86 Test (org.junit.Test)54 HashMap (java.util.HashMap)26 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)21 SqlNode (org.apache.calcite.sql.SqlNode)19 ObjectPath (org.apache.flink.table.catalog.ObjectPath)19 CatalogTable (org.apache.flink.table.catalog.CatalogTable)18 DataType (org.apache.flink.table.types.DataType)16 ValidationException (org.apache.flink.table.api.ValidationException)14 TableColumn (org.apache.flink.table.api.TableColumn)10 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)10 ArrayList (java.util.ArrayList)9 List (java.util.List)9 Map (java.util.Map)9 FeatureOption (org.apache.flink.sql.parser.ddl.SqlTableLike.FeatureOption)9 MergingStrategy (org.apache.flink.sql.parser.ddl.SqlTableLike.MergingStrategy)9 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)8 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)8 Arrays (java.util.Arrays)7 Configuration (org.apache.flink.configuration.Configuration)7