use of org.apache.flink.table.api.Schema in project flink by apache.
the class DataStreamJavaITCase method createTableFromElements.
private void createTableFromElements(StreamTableEnvironment tableEnv, String name, ChangelogMode changelogMode, Schema schema, List<TypeInformation<?>> fieldTypeInfo, Row... elements) {
final String[] fieldNames = schema.getColumns().stream().map(Schema.UnresolvedColumn::getName).toArray(String[]::new);
final TypeInformation<?>[] fieldTypes = fieldTypeInfo.toArray(new TypeInformation[0]);
final DataStream<Row> dataStream = env.fromElements(elements).returns(Types.ROW_NAMED(fieldNames, fieldTypes));
final Table table = tableEnv.fromChangelogStream(dataStream, schema, changelogMode);
tableEnv.createTemporaryView(name, table);
}
use of org.apache.flink.table.api.Schema in project flink by apache.
the class SqlToOperationConverterTest method testCreateTableWithFullDataTypes.
// TODO: tweak the tests when FLINK-13604 is fixed.
@Test
public void testCreateTableWithFullDataTypes() {
final List<TestItem> testItems = Arrays.asList(createTestItem("CHAR", DataTypes.CHAR(1)), createTestItem("CHAR NOT NULL", DataTypes.CHAR(1).notNull()), createTestItem("CHAR NULL", DataTypes.CHAR(1)), createTestItem("CHAR(33)", DataTypes.CHAR(33)), createTestItem("VARCHAR", DataTypes.STRING()), createTestItem("VARCHAR(33)", DataTypes.VARCHAR(33)), createTestItem("STRING", DataTypes.STRING()), createTestItem("BOOLEAN", DataTypes.BOOLEAN()), createTestItem("BINARY", DataTypes.BINARY(1)), createTestItem("BINARY(33)", DataTypes.BINARY(33)), createTestItem("VARBINARY", DataTypes.BYTES()), createTestItem("VARBINARY(33)", DataTypes.VARBINARY(33)), createTestItem("BYTES", DataTypes.BYTES()), createTestItem("DECIMAL", DataTypes.DECIMAL(10, 0)), createTestItem("DEC", DataTypes.DECIMAL(10, 0)), createTestItem("NUMERIC", DataTypes.DECIMAL(10, 0)), createTestItem("DECIMAL(10)", DataTypes.DECIMAL(10, 0)), createTestItem("DEC(10)", DataTypes.DECIMAL(10, 0)), createTestItem("NUMERIC(10)", DataTypes.DECIMAL(10, 0)), createTestItem("DECIMAL(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("DEC(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("NUMERIC(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("TINYINT", DataTypes.TINYINT()), createTestItem("SMALLINT", DataTypes.SMALLINT()), createTestItem("INTEGER", DataTypes.INT()), createTestItem("INT", DataTypes.INT()), createTestItem("BIGINT", DataTypes.BIGINT()), createTestItem("FLOAT", DataTypes.FLOAT()), createTestItem("DOUBLE", DataTypes.DOUBLE()), createTestItem("DOUBLE PRECISION", DataTypes.DOUBLE()), createTestItem("DATE", DataTypes.DATE()), createTestItem("TIME", DataTypes.TIME()), createTestItem("TIME WITHOUT TIME ZONE", DataTypes.TIME()), // Expect to be TIME(3).
createTestItem("TIME(3)", DataTypes.TIME()), // Expect to be TIME(3).
createTestItem("TIME(3) WITHOUT TIME ZONE", DataTypes.TIME()), createTestItem("TIMESTAMP", DataTypes.TIMESTAMP(6)), createTestItem("TIMESTAMP WITHOUT TIME ZONE", DataTypes.TIMESTAMP(6)), createTestItem("TIMESTAMP(3)", DataTypes.TIMESTAMP(3)), createTestItem("TIMESTAMP(3) WITHOUT TIME ZONE", DataTypes.TIMESTAMP(3)), createTestItem("TIMESTAMP WITH LOCAL TIME ZONE", DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(6)), createTestItem("TIMESTAMP(3) WITH LOCAL TIME ZONE", DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3)), createTestItem("ARRAY<TIMESTAMP(3) WITH LOCAL TIME ZONE>", DataTypes.ARRAY(DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3))), createTestItem("ARRAY<INT NOT NULL>", DataTypes.ARRAY(DataTypes.INT().notNull())), createTestItem("INT ARRAY", DataTypes.ARRAY(DataTypes.INT())), createTestItem("INT NOT NULL ARRAY", DataTypes.ARRAY(DataTypes.INT().notNull())), createTestItem("INT ARRAY NOT NULL", DataTypes.ARRAY(DataTypes.INT()).notNull()), createTestItem("MULTISET<INT NOT NULL>", DataTypes.MULTISET(DataTypes.INT().notNull())), createTestItem("INT MULTISET", DataTypes.MULTISET(DataTypes.INT())), createTestItem("INT NOT NULL MULTISET", DataTypes.MULTISET(DataTypes.INT().notNull())), createTestItem("INT MULTISET NOT NULL", DataTypes.MULTISET(DataTypes.INT()).notNull()), createTestItem("MAP<BIGINT, BOOLEAN>", DataTypes.MAP(DataTypes.BIGINT(), DataTypes.BOOLEAN())), // Expect to be ROW<`f0` INT NOT NULL, `f1` BOOLEAN>.
createTestItem("ROW<f0 INT NOT NULL, f1 BOOLEAN>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), // Expect to be ROW<`f0` INT NOT NULL, `f1` BOOLEAN>.
createTestItem("ROW(f0 INT NOT NULL, f1 BOOLEAN)", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), createTestItem("ROW<`f0` INT>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()))), createTestItem("ROW(`f0` INT)", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()))), createTestItem("ROW<>", DataTypes.ROW()), createTestItem("ROW()", DataTypes.ROW()), // Expect to be ROW<`f0` INT NOT NULL '...', `f1` BOOLEAN '...'>.
createTestItem("ROW<f0 INT NOT NULL 'This is a comment.'," + " f1 BOOLEAN 'This as well.'>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), createTestItem("ARRAY<ROW<f0 INT, f1 BOOLEAN>>", DataTypes.ARRAY(DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem("ROW<f0 INT, f1 BOOLEAN> MULTISET", DataTypes.MULTISET(DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem("MULTISET<ROW<f0 INT, f1 BOOLEAN>>", DataTypes.MULTISET(DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem("ROW<f0 Row<f00 INT, f01 BOOLEAN>, " + "f1 INT ARRAY, " + "f2 BOOLEAN MULTISET>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.ROW(DataTypes.FIELD("f00", DataTypes.INT()), DataTypes.FIELD("f01", DataTypes.BOOLEAN()))), DataTypes.FIELD("f1", DataTypes.ARRAY(DataTypes.INT())), DataTypes.FIELD("f2", DataTypes.MULTISET(DataTypes.BOOLEAN())))));
StringBuilder buffer = new StringBuilder("create table t1(\n");
for (int i = 0; i < testItems.size(); i++) {
buffer.append("f").append(i).append(" ").append(testItems.get(i).testExpr);
if (i == testItems.size() - 1) {
buffer.append(")");
} else {
buffer.append(",\n");
}
}
final String sql = buffer.toString();
final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
SqlNode node = parser.parse(sql);
assertThat(node).isInstanceOf(SqlCreateTable.class);
Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get();
TableSchema schema = ((CreateTableOperation) operation).getCatalogTable().getSchema();
Object[] expectedDataTypes = testItems.stream().map(item -> item.expectedType).toArray();
assertThat(schema.getFieldDataTypes()).isEqualTo(expectedDataTypes);
}
use of org.apache.flink.table.api.Schema in project flink by apache.
the class CommonExecSinkITCase method testStreamRecordTimestampInserterDataStreamSinkProvider.
@Test
public void testStreamRecordTimestampInserterDataStreamSinkProvider() throws ExecutionException, InterruptedException {
final StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
final SharedReference<List<Long>> timestamps = sharedObjects.add(new ArrayList<>());
final List<Row> rows = Arrays.asList(Row.of(1, "foo", Instant.parse("2020-11-10T11:34:56.123Z")), Row.of(2, "foo", Instant.parse("2020-11-10T12:34:56.789Z")), Row.of(3, "foo", Instant.parse("2020-11-11T10:11:22.777Z")), Row.of(4, "foo", Instant.parse("2020-11-11T10:11:23.888Z")));
final SinkFunction<RowData> sinkFunction = new SinkFunction<RowData>() {
@Override
public void invoke(RowData value, Context context) {
addElement(timestamps, context.timestamp());
}
};
final TableDescriptor sourceDescriptor = TableFactoryHarness.newBuilder().schema(schemaStreamRecordTimestampInserter(true)).source(new TestSource(rows)).sink(new TableFactoryHarness.SinkBase() {
@Override
public DataStreamSinkProvider getSinkRuntimeProvider(DynamicTableSink.Context context) {
return (providerContext, dataStream) -> dataStream.addSink(sinkFunction);
}
}).build();
tableEnv.createTable("T1", sourceDescriptor);
final String sqlStmt = "INSERT INTO T1 SELECT * FROM T1";
assertPlan(tableEnv, sqlStmt, true);
tableEnv.executeSql(sqlStmt).await();
Collections.sort(timestamps.get());
assertTimestampResults(timestamps, rows);
}
use of org.apache.flink.table.api.Schema in project flink by apache.
the class DynamicSinkUtils method pushMetadataProjection.
/**
* Creates a projection that reorders physical and metadata columns according to the consumed
* data type of the sink. It casts metadata columns into the expected data type.
*
* @see SupportsWritingMetadata
*/
private static void pushMetadataProjection(FlinkRelBuilder relBuilder, FlinkTypeFactory typeFactory, ResolvedSchema schema, DynamicTableSink sink) {
final RexBuilder rexBuilder = relBuilder.getRexBuilder();
final List<Column> columns = schema.getColumns();
final List<Integer> physicalColumns = extractPhysicalColumns(schema);
final Map<String, Integer> keyToMetadataColumn = extractPersistedMetadataColumns(schema).stream().collect(Collectors.toMap(pos -> {
final MetadataColumn metadataColumn = (MetadataColumn) columns.get(pos);
return metadataColumn.getMetadataKey().orElse(metadataColumn.getName());
}, Function.identity()));
final List<Integer> metadataColumns = createRequiredMetadataKeys(schema, sink).stream().map(keyToMetadataColumn::get).collect(Collectors.toList());
final List<String> fieldNames = Stream.concat(physicalColumns.stream().map(columns::get).map(Column::getName), metadataColumns.stream().map(columns::get).map(MetadataColumn.class::cast).map(c -> c.getMetadataKey().orElse(c.getName()))).collect(Collectors.toList());
final Map<String, DataType> metadataMap = extractMetadataMap(sink);
final List<RexNode> fieldNodes = Stream.concat(physicalColumns.stream().map(pos -> {
final int posAdjusted = adjustByVirtualColumns(columns, pos);
return relBuilder.field(posAdjusted);
}), metadataColumns.stream().map(pos -> {
final MetadataColumn metadataColumn = (MetadataColumn) columns.get(pos);
final String metadataKey = metadataColumn.getMetadataKey().orElse(metadataColumn.getName());
final LogicalType expectedType = metadataMap.get(metadataKey).getLogicalType();
final RelDataType expectedRelDataType = typeFactory.createFieldTypeFromLogicalType(expectedType);
final int posAdjusted = adjustByVirtualColumns(columns, pos);
return rexBuilder.makeAbstractCast(expectedRelDataType, relBuilder.field(posAdjusted));
})).collect(Collectors.toList());
relBuilder.projectNamed(fieldNodes, fieldNames, true);
}
Aggregations