Search in sources :

Example 56 with StreamTableEnvironment

use of org.apache.flink.table.api.bridge.java.StreamTableEnvironment in project flink by apache.

the class DataStreamJavaITCase method testFromAndToChangelogStreamRetract.

@Test
public void testFromAndToChangelogStreamRetract() throws Exception {
    final StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
    final List<Either<Row, Row>> inputOrOutput = Arrays.asList(input(RowKind.INSERT, "bob", 0), output(RowKind.INSERT, "bob", 0), // --
    input(RowKind.UPDATE_BEFORE, "bob", 0), output(RowKind.DELETE, "bob", 0), // --
    input(RowKind.UPDATE_AFTER, "bob", 1), output(RowKind.INSERT, "bob", 1), // --
    input(RowKind.INSERT, "alice", 1), output(RowKind.INSERT, "alice", 1), // --
    input(RowKind.INSERT, "alice", 1), output(RowKind.UPDATE_BEFORE, "alice", 1), output(RowKind.UPDATE_AFTER, "alice", 2), // --
    input(RowKind.UPDATE_BEFORE, "alice", 1), output(RowKind.UPDATE_BEFORE, "alice", 2), output(RowKind.UPDATE_AFTER, "alice", 1), // --
    input(RowKind.UPDATE_AFTER, "alice", 2), output(RowKind.UPDATE_BEFORE, "alice", 1), output(RowKind.UPDATE_AFTER, "alice", 3), // --
    input(RowKind.UPDATE_BEFORE, "alice", 2), output(RowKind.UPDATE_BEFORE, "alice", 3), output(RowKind.UPDATE_AFTER, "alice", 1), // --
    input(RowKind.UPDATE_AFTER, "alice", 100), output(RowKind.UPDATE_BEFORE, "alice", 1), output(RowKind.UPDATE_AFTER, "alice", 101));
    final DataStream<Row> changelogStream = env.fromElements(getInput(inputOrOutput));
    tableEnv.createTemporaryView("t", tableEnv.fromChangelogStream(changelogStream));
    final Table result = tableEnv.sqlQuery("SELECT f0, SUM(f1) FROM t GROUP BY f0");
    testResult(result.execute(), getOutput(inputOrOutput));
    testResult(tableEnv.toChangelogStream(result), getOutput(inputOrOutput));
}
Also used : Table(org.apache.flink.table.api.Table) Either(org.apache.flink.types.Either) StreamTableEnvironment(org.apache.flink.table.api.bridge.java.StreamTableEnvironment) Row(org.apache.flink.types.Row) Test(org.junit.Test)

Example 57 with StreamTableEnvironment

use of org.apache.flink.table.api.bridge.java.StreamTableEnvironment in project flink by apache.

the class DataStreamJavaITCase method testToDataStreamWithRow.

@Test
public void testToDataStreamWithRow() throws Exception {
    final StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
    final Row[] rows = new Row[] { Row.of(12, Row.of(false, "hello"), Collections.singletonMap("world", 2.0)), Row.of(null, Row.of(false, null), Collections.singletonMap("world", 1.0)) };
    final Table table = tableEnv.fromValues((Object[]) rows);
    testResult(tableEnv.toDataStream(table), rows);
}
Also used : Table(org.apache.flink.table.api.Table) StreamTableEnvironment(org.apache.flink.table.api.bridge.java.StreamTableEnvironment) Row(org.apache.flink.types.Row) Test(org.junit.Test)

Example 58 with StreamTableEnvironment

use of org.apache.flink.table.api.bridge.java.StreamTableEnvironment in project flink by apache.

the class CommonExecSinkITCase method testStreamRecordTimestampInserterNotApplied.

@Test
public void testStreamRecordTimestampInserterNotApplied() {
    final StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
    final SharedReference<List<Long>> timestamps = sharedObjects.add(new ArrayList<>());
    final List<Row> rows = Arrays.asList(Row.of(1, "foo", Instant.parse("2020-11-10T11:34:56.123Z")), Row.of(2, "foo", Instant.parse("2020-11-10T12:34:56.789Z")), Row.of(3, "foo", Instant.parse("2020-11-11T10:11:22.777Z")), Row.of(4, "foo", Instant.parse("2020-11-11T10:11:23.888Z")));
    final TableDescriptor sourceDescriptor = TableFactoryHarness.newBuilder().schema(schemaStreamRecordTimestampInserter(false)).source(new TestSource(rows)).sink(buildRuntimeSinkProvider(new TestTimestampWriter(timestamps))).build();
    tableEnv.createTable("T1", sourceDescriptor);
    assertPlan(tableEnv, "INSERT INTO T1 SELECT * FROM T1", false);
}
Also used : List(java.util.List) ArrayList(java.util.ArrayList) StreamTableEnvironment(org.apache.flink.table.api.bridge.java.StreamTableEnvironment) Row(org.apache.flink.types.Row) TableDescriptor(org.apache.flink.table.api.TableDescriptor) Test(org.junit.Test)

Example 59 with StreamTableEnvironment

use of org.apache.flink.table.api.bridge.java.StreamTableEnvironment in project flink by apache.

the class CommonExecSinkITCase method testFromValuesWatermarkPropagation.

@Test
public void testFromValuesWatermarkPropagation() throws Exception {
    final StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
    final SharedReference<List<Long>> watermarks = sharedObjects.add(new ArrayList<>());
    final SinkFunction<RowData> sinkFunction = new SinkFunction<RowData>() {

        @Override
        public void writeWatermark(org.apache.flink.api.common.eventtime.Watermark watermark) {
            addElement(watermarks, watermark.getTimestamp());
        }
    };
    final TableDescriptor sinkDescriptor = TableFactoryHarness.newBuilder().sink(new TableFactoryHarness.SinkBase() {

        @Override
        public DataStreamSinkProvider getSinkRuntimeProvider(DynamicTableSink.Context context) {
            return (providerContext, dataStream) -> dataStream.addSink(sinkFunction);
        }
    }).build();
    final Table source = tableEnv.fromValues(DataTypes.ROW(DataTypes.FIELD("a", DataTypes.INT())), Row.of(1), Row.of(2), Row.of(3));
    source.executeInsert(sinkDescriptor).await();
    assertThat(watermarks.get().size()).isEqualTo(env.getParallelism());
    for (Long watermark : watermarks.get()) {
        assertThat(watermark).isEqualTo(Watermark.MAX_WATERMARK.getTimestamp());
    }
}
Also used : StreamTableEnvironment(org.apache.flink.table.api.bridge.java.StreamTableEnvironment) Arrays(java.util.Arrays) Schema(org.apache.flink.table.api.Schema) TableDescriptor(org.apache.flink.table.api.TableDescriptor) SinkV1Adapter(org.apache.flink.streaming.api.transformations.SinkV1Adapter) SharedObjects(org.apache.flink.testutils.junit.SharedObjects) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) ExplainDetail(org.apache.flink.table.api.ExplainDetail) ExceptionUtils(org.apache.flink.util.ExceptionUtils) TestSink(org.apache.flink.streaming.runtime.operators.sink.TestSink) Parameterized(org.junit.runners.Parameterized) AbstractTestBase(org.apache.flink.test.util.AbstractTestBase) TableFactoryHarness(org.apache.flink.table.planner.factories.TableFactoryHarness) DynamicTableSource(org.apache.flink.table.connector.source.DynamicTableSource) SinkFunction(org.apache.flink.streaming.api.functions.sink.SinkFunction) DynamicTableSink(org.apache.flink.table.connector.sink.DynamicTableSink) Collection(java.util.Collection) Table(org.apache.flink.table.api.Table) Instant(java.time.Instant) Collectors(java.util.stream.Collectors) List(java.util.List) DataStreamSinkProvider(org.apache.flink.table.connector.sink.DataStreamSinkProvider) ValidationException(org.apache.flink.table.api.ValidationException) TableResult(org.apache.flink.table.api.TableResult) Row(org.apache.flink.types.Row) NotNull(org.jetbrains.annotations.NotNull) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) Assert.assertThrows(org.junit.Assert.assertThrows) RunWith(org.junit.runner.RunWith) Watermark(org.apache.flink.streaming.api.watermark.Watermark) ScanTableSource(org.apache.flink.table.connector.source.ScanTableSource) ArrayList(java.util.ArrayList) SinkV2Provider(org.apache.flink.table.connector.sink.SinkV2Provider) TABLE_EXEC_SINK_TYPE_LENGTH_ENFORCER(org.apache.flink.table.api.config.ExecutionConfigOptions.TABLE_EXEC_SINK_TYPE_LENGTH_ENFORCER) SourceFunction(org.apache.flink.streaming.api.functions.source.SourceFunction) TABLE_EXEC_SINK_NOT_NULL_ENFORCER(org.apache.flink.table.api.config.ExecutionConfigOptions.TABLE_EXEC_SINK_NOT_NULL_ENFORCER) SourceFunctionProvider(org.apache.flink.table.connector.source.SourceFunctionProvider) SharedReference(org.apache.flink.testutils.junit.SharedReference) INT(org.apache.flink.table.api.DataTypes.INT) Before(org.junit.Before) RowData(org.apache.flink.table.data.RowData) DataTypes(org.apache.flink.table.api.DataTypes) SinkProvider(org.apache.flink.table.connector.sink.SinkProvider) Test(org.junit.Test) ExecutionException(java.util.concurrent.ExecutionException) Rule(org.junit.Rule) ExecutionConfigOptions(org.apache.flink.table.api.config.ExecutionConfigOptions) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) Table(org.apache.flink.table.api.Table) TableDescriptor(org.apache.flink.table.api.TableDescriptor) RowData(org.apache.flink.table.data.RowData) SinkFunction(org.apache.flink.streaming.api.functions.sink.SinkFunction) List(java.util.List) ArrayList(java.util.ArrayList) StreamTableEnvironment(org.apache.flink.table.api.bridge.java.StreamTableEnvironment) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Example 60 with StreamTableEnvironment

use of org.apache.flink.table.api.bridge.java.StreamTableEnvironment in project flink by apache.

the class CommonExecSinkITCase method testStreamRecordTimestampInserterDataStreamSinkProvider.

@Test
public void testStreamRecordTimestampInserterDataStreamSinkProvider() throws ExecutionException, InterruptedException {
    final StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
    final SharedReference<List<Long>> timestamps = sharedObjects.add(new ArrayList<>());
    final List<Row> rows = Arrays.asList(Row.of(1, "foo", Instant.parse("2020-11-10T11:34:56.123Z")), Row.of(2, "foo", Instant.parse("2020-11-10T12:34:56.789Z")), Row.of(3, "foo", Instant.parse("2020-11-11T10:11:22.777Z")), Row.of(4, "foo", Instant.parse("2020-11-11T10:11:23.888Z")));
    final SinkFunction<RowData> sinkFunction = new SinkFunction<RowData>() {

        @Override
        public void invoke(RowData value, Context context) {
            addElement(timestamps, context.timestamp());
        }
    };
    final TableDescriptor sourceDescriptor = TableFactoryHarness.newBuilder().schema(schemaStreamRecordTimestampInserter(true)).source(new TestSource(rows)).sink(new TableFactoryHarness.SinkBase() {

        @Override
        public DataStreamSinkProvider getSinkRuntimeProvider(DynamicTableSink.Context context) {
            return (providerContext, dataStream) -> dataStream.addSink(sinkFunction);
        }
    }).build();
    tableEnv.createTable("T1", sourceDescriptor);
    final String sqlStmt = "INSERT INTO T1 SELECT * FROM T1";
    assertPlan(tableEnv, sqlStmt, true);
    tableEnv.executeSql(sqlStmt).await();
    Collections.sort(timestamps.get());
    assertTimestampResults(timestamps, rows);
}
Also used : StreamTableEnvironment(org.apache.flink.table.api.bridge.java.StreamTableEnvironment) Arrays(java.util.Arrays) Schema(org.apache.flink.table.api.Schema) TableDescriptor(org.apache.flink.table.api.TableDescriptor) SinkV1Adapter(org.apache.flink.streaming.api.transformations.SinkV1Adapter) SharedObjects(org.apache.flink.testutils.junit.SharedObjects) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) ExplainDetail(org.apache.flink.table.api.ExplainDetail) ExceptionUtils(org.apache.flink.util.ExceptionUtils) TestSink(org.apache.flink.streaming.runtime.operators.sink.TestSink) Parameterized(org.junit.runners.Parameterized) AbstractTestBase(org.apache.flink.test.util.AbstractTestBase) TableFactoryHarness(org.apache.flink.table.planner.factories.TableFactoryHarness) DynamicTableSource(org.apache.flink.table.connector.source.DynamicTableSource) SinkFunction(org.apache.flink.streaming.api.functions.sink.SinkFunction) DynamicTableSink(org.apache.flink.table.connector.sink.DynamicTableSink) Collection(java.util.Collection) Table(org.apache.flink.table.api.Table) Instant(java.time.Instant) Collectors(java.util.stream.Collectors) List(java.util.List) DataStreamSinkProvider(org.apache.flink.table.connector.sink.DataStreamSinkProvider) ValidationException(org.apache.flink.table.api.ValidationException) TableResult(org.apache.flink.table.api.TableResult) Row(org.apache.flink.types.Row) NotNull(org.jetbrains.annotations.NotNull) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) Assert.assertThrows(org.junit.Assert.assertThrows) RunWith(org.junit.runner.RunWith) Watermark(org.apache.flink.streaming.api.watermark.Watermark) ScanTableSource(org.apache.flink.table.connector.source.ScanTableSource) ArrayList(java.util.ArrayList) SinkV2Provider(org.apache.flink.table.connector.sink.SinkV2Provider) TABLE_EXEC_SINK_TYPE_LENGTH_ENFORCER(org.apache.flink.table.api.config.ExecutionConfigOptions.TABLE_EXEC_SINK_TYPE_LENGTH_ENFORCER) SourceFunction(org.apache.flink.streaming.api.functions.source.SourceFunction) TABLE_EXEC_SINK_NOT_NULL_ENFORCER(org.apache.flink.table.api.config.ExecutionConfigOptions.TABLE_EXEC_SINK_NOT_NULL_ENFORCER) SourceFunctionProvider(org.apache.flink.table.connector.source.SourceFunctionProvider) SharedReference(org.apache.flink.testutils.junit.SharedReference) INT(org.apache.flink.table.api.DataTypes.INT) Before(org.junit.Before) RowData(org.apache.flink.table.data.RowData) DataTypes(org.apache.flink.table.api.DataTypes) SinkProvider(org.apache.flink.table.connector.sink.SinkProvider) Test(org.junit.Test) ExecutionException(java.util.concurrent.ExecutionException) Rule(org.junit.Rule) ExecutionConfigOptions(org.apache.flink.table.api.config.ExecutionConfigOptions) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) TableDescriptor(org.apache.flink.table.api.TableDescriptor) RowData(org.apache.flink.table.data.RowData) SinkFunction(org.apache.flink.streaming.api.functions.sink.SinkFunction) List(java.util.List) ArrayList(java.util.ArrayList) StreamTableEnvironment(org.apache.flink.table.api.bridge.java.StreamTableEnvironment) Row(org.apache.flink.types.Row) Test(org.junit.Test)

Aggregations

StreamTableEnvironment (org.apache.flink.table.api.bridge.java.StreamTableEnvironment)64 Test (org.junit.Test)53 StreamExecutionEnvironment (org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)41 Row (org.apache.flink.types.Row)38 Table (org.apache.flink.table.api.Table)36 ArrayList (java.util.ArrayList)19 TableResult (org.apache.flink.table.api.TableResult)18 List (java.util.List)10 TableDescriptor (org.apache.flink.table.api.TableDescriptor)10 Arrays (java.util.Arrays)6 Collections (java.util.Collections)6 AbstractTestBase (org.apache.flink.test.util.AbstractTestBase)6 IOException (java.io.IOException)5 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)5 ResolvedSchema (org.apache.flink.table.catalog.ResolvedSchema)5 Either (org.apache.flink.types.Either)5 LocalDateTime (java.time.LocalDateTime)4 ZoneId (java.time.ZoneId)4 TypeHint (org.apache.flink.api.common.typeinfo.TypeHint)4 TypeInformation (org.apache.flink.api.common.typeinfo.TypeInformation)4