Search in sources :

Example 1 with ComputedColumn

use of org.apache.flink.table.api.TableColumn.ComputedColumn in project flink by apache.

the class DescriptorProperties method putTableSchema.

/**
 * Adds a table schema under the given key.
 */
public void putTableSchema(String key, TableSchema schema) {
    checkNotNull(key);
    checkNotNull(schema);
    final String[] fieldNames = schema.getFieldNames();
    final DataType[] fieldTypes = schema.getFieldDataTypes();
    final String[] fieldExpressions = schema.getTableColumns().stream().map(column -> {
        if (column instanceof ComputedColumn) {
            return ((ComputedColumn) column).getExpression();
        }
        return null;
    }).toArray(String[]::new);
    final String[] fieldMetadata = schema.getTableColumns().stream().map(column -> {
        if (column instanceof MetadataColumn) {
            return ((MetadataColumn) column).getMetadataAlias().orElse(column.getName());
        }
        return null;
    }).toArray(String[]::new);
    final String[] fieldVirtual = schema.getTableColumns().stream().map(column -> {
        if (column instanceof MetadataColumn) {
            return Boolean.toString(((MetadataColumn) column).isVirtual());
        }
        return null;
    }).toArray(String[]::new);
    final List<List<String>> values = new ArrayList<>();
    for (int i = 0; i < schema.getFieldCount(); i++) {
        values.add(Arrays.asList(fieldNames[i], fieldTypes[i].getLogicalType().asSerializableString(), fieldExpressions[i], fieldMetadata[i], fieldVirtual[i]));
    }
    putIndexedOptionalProperties(key, Arrays.asList(NAME, DATA_TYPE, EXPR, METADATA, VIRTUAL), values);
    if (!schema.getWatermarkSpecs().isEmpty()) {
        final List<List<String>> watermarkValues = new ArrayList<>();
        for (WatermarkSpec spec : schema.getWatermarkSpecs()) {
            watermarkValues.add(Arrays.asList(spec.getRowtimeAttribute(), spec.getWatermarkExpr(), spec.getWatermarkExprOutputType().getLogicalType().asSerializableString()));
        }
        putIndexedFixedProperties(key + '.' + WATERMARK, Arrays.asList(WATERMARK_ROWTIME, WATERMARK_STRATEGY_EXPR, WATERMARK_STRATEGY_DATA_TYPE), watermarkValues);
    }
    schema.getPrimaryKey().ifPresent(pk -> {
        putString(key + '.' + PRIMARY_KEY_NAME, pk.getName());
        putString(key + '.' + PRIMARY_KEY_COLUMNS, String.join(",", pk.getColumns()));
    });
}
Also used : DynamicTableFactory(org.apache.flink.table.factories.DynamicTableFactory) IntStream(java.util.stream.IntStream) DataType(org.apache.flink.table.types.DataType) Arrays(java.util.Arrays) EncodingUtils(org.apache.flink.table.utils.EncodingUtils) MetadataColumn(org.apache.flink.table.api.TableColumn.MetadataColumn) HashMap(java.util.HashMap) RowTypeInfo(org.apache.flink.api.java.typeutils.RowTypeInfo) Function(java.util.function.Function) Supplier(java.util.function.Supplier) MemorySize(org.apache.flink.configuration.MemorySize) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) BigDecimal(java.math.BigDecimal) InstantiationUtil(org.apache.flink.util.InstantiationUtil) Matcher(java.util.regex.Matcher) TableColumn(org.apache.flink.table.api.TableColumn) Duration(java.time.Duration) Map(java.util.Map) ConfigOption(org.apache.flink.configuration.ConfigOption) Preconditions.checkNotNull(org.apache.flink.util.Preconditions.checkNotNull) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation) ComputedColumn(org.apache.flink.table.api.TableColumn.ComputedColumn) CatalogPropertiesUtil(org.apache.flink.table.catalog.CatalogPropertiesUtil) TableException(org.apache.flink.table.api.TableException) WatermarkSpec(org.apache.flink.table.api.WatermarkSpec) Set(java.util.Set) TableSchema(org.apache.flink.table.api.TableSchema) Preconditions(org.apache.flink.util.Preconditions) Collectors(java.util.stream.Collectors) TypeStringUtils(org.apache.flink.table.utils.TypeStringUtils) Objects(java.util.Objects) Consumer(java.util.function.Consumer) List(java.util.List) TimeUtils(org.apache.flink.util.TimeUtils) Stream(java.util.stream.Stream) LogicalType(org.apache.flink.table.types.logical.LogicalType) ValidationException(org.apache.flink.table.api.ValidationException) LogicalTypeParser(org.apache.flink.table.types.logical.utils.LogicalTypeParser) Optional(java.util.Optional) Internal(org.apache.flink.annotation.Internal) TypeConversions(org.apache.flink.table.types.utils.TypeConversions) Pattern(java.util.regex.Pattern) LogicalTypeRoot(org.apache.flink.table.types.logical.LogicalTypeRoot) Collections(java.util.Collections) MetadataColumn(org.apache.flink.table.api.TableColumn.MetadataColumn) WatermarkSpec(org.apache.flink.table.api.WatermarkSpec) ArrayList(java.util.ArrayList) DataType(org.apache.flink.table.types.DataType) ComputedColumn(org.apache.flink.table.api.TableColumn.ComputedColumn) ArrayList(java.util.ArrayList) List(java.util.List)

Example 2 with ComputedColumn

use of org.apache.flink.table.api.TableColumn.ComputedColumn in project flink by apache.

the class SqlToOperationConverterTest method testCreateTableWithComputedColumn.

@Test
public void testCreateTableWithComputedColumn() {
    final String sql = "CREATE TABLE tbl1 (\n" + "  a int,\n" + "  b varchar, \n" + "  c as a - 1, \n" + "  d as b || '$$', \n" + "  e as my_udf1(a)," + "  f as `default`.my_udf2(a) + 1," + "  g as builtin.`default`.my_udf3(a) || '##'\n" + ")\n" + "  with (\n" + "    'connector' = 'kafka', \n" + "    'kafka.topic' = 'log.test'\n" + ")\n";
    functionCatalog.registerTempCatalogScalarFunction(ObjectIdentifier.of("builtin", "default", "my_udf1"), Func0$.MODULE$);
    functionCatalog.registerTempCatalogScalarFunction(ObjectIdentifier.of("builtin", "default", "my_udf2"), Func1$.MODULE$);
    functionCatalog.registerTempCatalogScalarFunction(ObjectIdentifier.of("builtin", "default", "my_udf3"), Func8$.MODULE$);
    FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
    Operation operation = parse(sql, planner, getParserBySqlDialect(SqlDialect.DEFAULT));
    assertThat(operation).isInstanceOf(CreateTableOperation.class);
    CreateTableOperation op = (CreateTableOperation) operation;
    CatalogTable catalogTable = op.getCatalogTable();
    assertThat(catalogTable.getSchema().getFieldNames()).isEqualTo(new String[] { "a", "b", "c", "d", "e", "f", "g" });
    assertThat(catalogTable.getSchema().getFieldDataTypes()).isEqualTo(new DataType[] { DataTypes.INT(), DataTypes.STRING(), DataTypes.INT(), DataTypes.STRING(), DataTypes.INT().notNull(), DataTypes.INT(), DataTypes.STRING() });
    String[] columnExpressions = catalogTable.getSchema().getTableColumns().stream().filter(ComputedColumn.class::isInstance).map(ComputedColumn.class::cast).map(ComputedColumn::getExpression).toArray(String[]::new);
    String[] expected = new String[] { "`a` - 1", "`b` || '$$'", "`builtin`.`default`.`my_udf1`(`a`)", "`builtin`.`default`.`my_udf2`(`a`) + 1", "`builtin`.`default`.`my_udf3`(`a`) || '##'" };
    assertThat(columnExpressions).isEqualTo(expected);
}
Also used : FlinkPlannerImpl(org.apache.flink.table.planner.calcite.FlinkPlannerImpl) ComputedColumn(org.apache.flink.table.api.TableColumn.ComputedColumn) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) ShowJarsOperation(org.apache.flink.table.operations.command.ShowJarsOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) EndStatementSetOperation(org.apache.flink.table.operations.EndStatementSetOperation) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) SetOperation(org.apache.flink.table.operations.command.SetOperation) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) Operation(org.apache.flink.table.operations.Operation) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) RemoveJarOperation(org.apache.flink.table.operations.command.RemoveJarOperation) BeginStatementSetOperation(org.apache.flink.table.operations.BeginStatementSetOperation) AddJarOperation(org.apache.flink.table.operations.command.AddJarOperation) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) ResetOperation(org.apache.flink.table.operations.command.ResetOperation) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) CatalogTable(org.apache.flink.table.catalog.CatalogTable) Test(org.junit.Test)

Example 3 with ComputedColumn

use of org.apache.flink.table.api.TableColumn.ComputedColumn in project flink by apache.

the class TableSchema method toSchema.

/**
 * Helps to migrate to the new {@link Schema} class.
 */
public Schema toSchema() {
    final Schema.Builder builder = Schema.newBuilder();
    columns.forEach(column -> {
        if (column instanceof PhysicalColumn) {
            final PhysicalColumn c = (PhysicalColumn) column;
            builder.column(c.getName(), c.getType());
        } else if (column instanceof MetadataColumn) {
            final MetadataColumn c = (MetadataColumn) column;
            builder.columnByMetadata(c.getName(), c.getType(), c.getMetadataAlias().orElse(null), c.isVirtual());
        } else if (column instanceof ComputedColumn) {
            final ComputedColumn c = (ComputedColumn) column;
            builder.columnByExpression(c.getName(), c.getExpression());
        } else {
            throw new IllegalArgumentException("Unsupported column type: " + column);
        }
    });
    watermarkSpecs.forEach(spec -> builder.watermark(spec.getRowtimeAttribute(), spec.getWatermarkExpr()));
    if (primaryKey != null) {
        builder.primaryKeyNamed(primaryKey.getName(), primaryKey.getColumns());
    }
    return builder.build();
}
Also used : MetadataColumn(org.apache.flink.table.api.TableColumn.MetadataColumn) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) PhysicalColumn(org.apache.flink.table.api.TableColumn.PhysicalColumn) ComputedColumn(org.apache.flink.table.api.TableColumn.ComputedColumn)

Aggregations

ComputedColumn (org.apache.flink.table.api.TableColumn.ComputedColumn)3 MetadataColumn (org.apache.flink.table.api.TableColumn.MetadataColumn)2 BigDecimal (java.math.BigDecimal)1 Duration (java.time.Duration)1 ArrayList (java.util.ArrayList)1 Arrays (java.util.Arrays)1 Collections (java.util.Collections)1 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1 List (java.util.List)1 Map (java.util.Map)1 Objects (java.util.Objects)1 Optional (java.util.Optional)1 Set (java.util.Set)1 Consumer (java.util.function.Consumer)1 Function (java.util.function.Function)1 Supplier (java.util.function.Supplier)1 Matcher (java.util.regex.Matcher)1 Pattern (java.util.regex.Pattern)1 Collectors (java.util.stream.Collectors)1