use of org.apache.flink.table.api.TableColumn.ComputedColumn in project flink by apache.
the class DescriptorProperties method putTableSchema.
/**
* Adds a table schema under the given key.
*/
public void putTableSchema(String key, TableSchema schema) {
checkNotNull(key);
checkNotNull(schema);
final String[] fieldNames = schema.getFieldNames();
final DataType[] fieldTypes = schema.getFieldDataTypes();
final String[] fieldExpressions = schema.getTableColumns().stream().map(column -> {
if (column instanceof ComputedColumn) {
return ((ComputedColumn) column).getExpression();
}
return null;
}).toArray(String[]::new);
final String[] fieldMetadata = schema.getTableColumns().stream().map(column -> {
if (column instanceof MetadataColumn) {
return ((MetadataColumn) column).getMetadataAlias().orElse(column.getName());
}
return null;
}).toArray(String[]::new);
final String[] fieldVirtual = schema.getTableColumns().stream().map(column -> {
if (column instanceof MetadataColumn) {
return Boolean.toString(((MetadataColumn) column).isVirtual());
}
return null;
}).toArray(String[]::new);
final List<List<String>> values = new ArrayList<>();
for (int i = 0; i < schema.getFieldCount(); i++) {
values.add(Arrays.asList(fieldNames[i], fieldTypes[i].getLogicalType().asSerializableString(), fieldExpressions[i], fieldMetadata[i], fieldVirtual[i]));
}
putIndexedOptionalProperties(key, Arrays.asList(NAME, DATA_TYPE, EXPR, METADATA, VIRTUAL), values);
if (!schema.getWatermarkSpecs().isEmpty()) {
final List<List<String>> watermarkValues = new ArrayList<>();
for (WatermarkSpec spec : schema.getWatermarkSpecs()) {
watermarkValues.add(Arrays.asList(spec.getRowtimeAttribute(), spec.getWatermarkExpr(), spec.getWatermarkExprOutputType().getLogicalType().asSerializableString()));
}
putIndexedFixedProperties(key + '.' + WATERMARK, Arrays.asList(WATERMARK_ROWTIME, WATERMARK_STRATEGY_EXPR, WATERMARK_STRATEGY_DATA_TYPE), watermarkValues);
}
schema.getPrimaryKey().ifPresent(pk -> {
putString(key + '.' + PRIMARY_KEY_NAME, pk.getName());
putString(key + '.' + PRIMARY_KEY_COLUMNS, String.join(",", pk.getColumns()));
});
}
use of org.apache.flink.table.api.TableColumn.ComputedColumn in project flink by apache.
the class SqlToOperationConverterTest method testCreateTableWithComputedColumn.
@Test
public void testCreateTableWithComputedColumn() {
final String sql = "CREATE TABLE tbl1 (\n" + " a int,\n" + " b varchar, \n" + " c as a - 1, \n" + " d as b || '$$', \n" + " e as my_udf1(a)," + " f as `default`.my_udf2(a) + 1," + " g as builtin.`default`.my_udf3(a) || '##'\n" + ")\n" + " with (\n" + " 'connector' = 'kafka', \n" + " 'kafka.topic' = 'log.test'\n" + ")\n";
functionCatalog.registerTempCatalogScalarFunction(ObjectIdentifier.of("builtin", "default", "my_udf1"), Func0$.MODULE$);
functionCatalog.registerTempCatalogScalarFunction(ObjectIdentifier.of("builtin", "default", "my_udf2"), Func1$.MODULE$);
functionCatalog.registerTempCatalogScalarFunction(ObjectIdentifier.of("builtin", "default", "my_udf3"), Func8$.MODULE$);
FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
Operation operation = parse(sql, planner, getParserBySqlDialect(SqlDialect.DEFAULT));
assertThat(operation).isInstanceOf(CreateTableOperation.class);
CreateTableOperation op = (CreateTableOperation) operation;
CatalogTable catalogTable = op.getCatalogTable();
assertThat(catalogTable.getSchema().getFieldNames()).isEqualTo(new String[] { "a", "b", "c", "d", "e", "f", "g" });
assertThat(catalogTable.getSchema().getFieldDataTypes()).isEqualTo(new DataType[] { DataTypes.INT(), DataTypes.STRING(), DataTypes.INT(), DataTypes.STRING(), DataTypes.INT().notNull(), DataTypes.INT(), DataTypes.STRING() });
String[] columnExpressions = catalogTable.getSchema().getTableColumns().stream().filter(ComputedColumn.class::isInstance).map(ComputedColumn.class::cast).map(ComputedColumn::getExpression).toArray(String[]::new);
String[] expected = new String[] { "`a` - 1", "`b` || '$$'", "`builtin`.`default`.`my_udf1`(`a`)", "`builtin`.`default`.`my_udf2`(`a`) + 1", "`builtin`.`default`.`my_udf3`(`a`) || '##'" };
assertThat(columnExpressions).isEqualTo(expected);
}
use of org.apache.flink.table.api.TableColumn.ComputedColumn in project flink by apache.
the class TableSchema method toSchema.
/**
* Helps to migrate to the new {@link Schema} class.
*/
public Schema toSchema() {
final Schema.Builder builder = Schema.newBuilder();
columns.forEach(column -> {
if (column instanceof PhysicalColumn) {
final PhysicalColumn c = (PhysicalColumn) column;
builder.column(c.getName(), c.getType());
} else if (column instanceof MetadataColumn) {
final MetadataColumn c = (MetadataColumn) column;
builder.columnByMetadata(c.getName(), c.getType(), c.getMetadataAlias().orElse(null), c.isVirtual());
} else if (column instanceof ComputedColumn) {
final ComputedColumn c = (ComputedColumn) column;
builder.columnByExpression(c.getName(), c.getExpression());
} else {
throw new IllegalArgumentException("Unsupported column type: " + column);
}
});
watermarkSpecs.forEach(spec -> builder.watermark(spec.getRowtimeAttribute(), spec.getWatermarkExpr()));
if (primaryKey != null) {
builder.primaryKeyNamed(primaryKey.getName(), primaryKey.getColumns());
}
return builder.build();
}
Aggregations