use of org.apache.flink.table.api.bridge.java.StreamTableEnvironment in project flink by apache.
the class HBaseConnectorITCase method testTableSink.
@Test
public void testTableSink() throws Exception {
StreamExecutionEnvironment execEnv = StreamExecutionEnvironment.getExecutionEnvironment();
StreamTableEnvironment tEnv = StreamTableEnvironment.create(execEnv, streamSettings);
// register HBase table testTable1 which contains test data
String table1DDL = createHBaseTableDDL(TEST_TABLE_1, false);
tEnv.executeSql(table1DDL);
String table2DDL = createHBaseTableDDL(TEST_TABLE_2, false);
tEnv.executeSql(table2DDL);
String query = "INSERT INTO " + TEST_TABLE_2 + " SELECT" + " rowkey," + " family1," + " family2," + " family3" + " FROM " + TEST_TABLE_1;
tEnv.executeSql(query).await();
// start a batch scan job to verify contents in HBase table
TableEnvironment batchEnv = TableEnvironment.create(batchSettings);
batchEnv.executeSql(table2DDL);
Table table = batchEnv.sqlQuery("SELECT " + " h.rowkey, " + " h.family1.col1, " + " h.family2.col1, " + " h.family2.col2, " + " h.family3.col1, " + " h.family3.col2, " + " h.family3.col3 " + "FROM " + TEST_TABLE_2 + " AS h");
List<Row> results = CollectionUtil.iteratorToList(table.execute().collect());
String expected = "+I[1, 10, Hello-1, 100, 1.01, false, Welt-1]\n" + "+I[2, 20, Hello-2, 200, 2.02, true, Welt-2]\n" + "+I[3, 30, Hello-3, 300, 3.03, false, Welt-3]\n" + "+I[4, 40, null, 400, 4.04, true, Welt-4]\n" + "+I[5, 50, Hello-5, 500, 5.05, false, Welt-5]\n" + "+I[6, 60, Hello-6, 600, 6.06, true, Welt-6]\n" + "+I[7, 70, Hello-7, 700, 7.07, false, Welt-7]\n" + "+I[8, 80, null, 800, 8.08, true, Welt-8]\n";
TestBaseUtils.compareResultAsText(results, expected);
}
use of org.apache.flink.table.api.bridge.java.StreamTableEnvironment in project flink by apache.
the class HBaseConnectorITCase method testTableSourceSinkWithDDL.
@Test
public void testTableSourceSinkWithDDL() throws Exception {
StreamExecutionEnvironment execEnv = StreamExecutionEnvironment.getExecutionEnvironment();
StreamTableEnvironment tEnv = StreamTableEnvironment.create(execEnv, streamSettings);
// register HBase table testTable1 which contains test data
String table1DDL = createHBaseTableDDL(TEST_TABLE_1, true);
tEnv.executeSql(table1DDL);
// register HBase table which is empty
String table3DDL = createHBaseTableDDL(TEST_TABLE_3, true);
tEnv.executeSql(table3DDL);
String insertStatement = "INSERT INTO " + TEST_TABLE_3 + " SELECT rowkey," + " family1," + " family2," + " family3," + " family4" + " from " + TEST_TABLE_1;
tEnv.executeSql(insertStatement).await();
// start a batch scan job to verify contents in HBase table
TableEnvironment batchEnv = TableEnvironment.create(batchSettings);
batchEnv.executeSql(table3DDL);
String query = "SELECT " + " h.rowkey, " + " h.family1.col1, " + " h.family2.col1, " + " h.family2.col2, " + " h.family3.col1, " + " h.family3.col2, " + " h.family3.col3, " + " h.family4.col1, " + " h.family4.col2, " + " h.family4.col3, " + " h.family4.col4 " + " FROM " + TEST_TABLE_3 + " AS h";
Iterator<Row> collected = tEnv.executeSql(query).collect();
List<String> result = CollectionUtil.iteratorToList(collected).stream().map(Row::toString).sorted().collect(Collectors.toList());
List<String> expected = new ArrayList<>();
expected.add("+I[1, 10, Hello-1, 100, 1.01, false, Welt-1, 2019-08-18T19:00, 2019-08-18, 19:00, 12345678.0001]");
expected.add("+I[2, 20, Hello-2, 200, 2.02, true, Welt-2, 2019-08-18T19:01, 2019-08-18, 19:01, 12345678.0002]");
expected.add("+I[3, 30, Hello-3, 300, 3.03, false, Welt-3, 2019-08-18T19:02, 2019-08-18, 19:02, 12345678.0003]");
expected.add("+I[4, 40, null, 400, 4.04, true, Welt-4, 2019-08-18T19:03, 2019-08-18, 19:03, 12345678.0004]");
expected.add("+I[5, 50, Hello-5, 500, 5.05, false, Welt-5, 2019-08-19T19:10, 2019-08-19, 19:10, 12345678.0005]");
expected.add("+I[6, 60, Hello-6, 600, 6.06, true, Welt-6, 2019-08-19T19:20, 2019-08-19, 19:20, 12345678.0006]");
expected.add("+I[7, 70, Hello-7, 700, 7.07, false, Welt-7, 2019-08-19T19:30, 2019-08-19, 19:30, 12345678.0007]");
expected.add("+I[8, 80, null, 800, 8.08, true, Welt-8, 2019-08-19T19:40, 2019-08-19, 19:40, 12345678.0008]");
assertEquals(expected, result);
}
use of org.apache.flink.table.api.bridge.java.StreamTableEnvironment in project flink by apache.
the class DataStreamJavaITCase method testMultiChangelogStreamUpsert.
@Test
public void testMultiChangelogStreamUpsert() throws Exception {
final StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
createTableFromElements(tableEnv, "T1", ChangelogMode.insertOnly(), Schema.newBuilder().column("pk", "INT NOT NULL").column("x", "STRING NOT NULL").primaryKey("pk").build(), Arrays.asList(Types.INT, Types.STRING), Row.ofKind(RowKind.INSERT, 1, "1"), Row.ofKind(RowKind.INSERT, 2, "2"));
createTableFromElements(tableEnv, "T2", ChangelogMode.upsert(), Schema.newBuilder().column("pk", "INT NOT NULL").column("y", "STRING NOT NULL").column("some_value", "DOUBLE NOT NULL").primaryKey("pk").build(), Arrays.asList(Types.INT, Types.STRING, Types.DOUBLE), Row.ofKind(RowKind.INSERT, 1, "A", 1.0), Row.ofKind(RowKind.INSERT, 2, "B", 2.0), Row.ofKind(RowKind.UPDATE_AFTER, 1, "A", 1.1), Row.ofKind(RowKind.UPDATE_AFTER, 2, "B", 2.1));
createTableFromElements(tableEnv, "T3", ChangelogMode.insertOnly(), Schema.newBuilder().column("pk1", "STRING NOT NULL").column("pk2", "STRING NOT NULL").column("some_other_value", "DOUBLE NOT NULL").primaryKey("pk1", "pk2").build(), Arrays.asList(Types.STRING, Types.STRING, Types.DOUBLE), Row.ofKind(RowKind.INSERT, "1", "A", 10.0), Row.ofKind(RowKind.INSERT, "1", "B", 11.0));
final Table resultTable = tableEnv.sqlQuery("SELECT\n" + "T1.pk,\n" + "T2.some_value * T3.some_other_value,\n" + "T3.pk1,\n" + "T3.pk2\n" + "FROM T1\n" + "LEFT JOIN T2 on T1.pk = T2.pk\n" + "LEFT JOIN T3 ON T1.x = T3.pk1 AND T2.y = T3.pk2");
final DataStream<Row> resultStream = tableEnv.toChangelogStream(resultTable, Schema.newBuilder().column("pk", "INT NOT NULL").column("some_calculated_value", "DOUBLE").column("pk1", "STRING").column("pk2", "STRING").primaryKey("pk").build(), ChangelogMode.upsert());
testMaterializedResult(resultStream, 0, Row.of(2, null, null, null), Row.of(1, 11.0, "1", "A"));
}
use of org.apache.flink.table.api.bridge.java.StreamTableEnvironment in project flink by apache.
the class DataStreamJavaITCase method testFromDataStreamWithRow.
@Test
public void testFromDataStreamWithRow() {
final StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
final TypeInformation<Row> typeInfo = Types.ROW_NAMED(new String[] { "b", "c", "a" }, Types.INT, Types.ROW(Types.BOOLEAN, Types.STRING), Types.MAP(Types.STRING, Types.DOUBLE));
final Row[] rows = new Row[] { Row.of(12, Row.of(false, "hello"), Collections.singletonMap("world", 2.0)), Row.of(null, Row.of(false, null), Collections.singletonMap("world", null)) };
final DataStream<Row> dataStream = env.fromCollection(Arrays.asList(rows), typeInfo);
final TableResult result = tableEnv.fromDataStream(dataStream).execute();
testSchema(result, Column.physical("b", INT()), Column.physical("c", ROW(FIELD("f0", BOOLEAN()), FIELD("f1", STRING()))), Column.physical("a", MAP(STRING(), DOUBLE())));
testResult(result, rows);
}
use of org.apache.flink.table.api.bridge.java.StreamTableEnvironment in project flink by apache.
the class DataStreamJavaITCase method testFromDataStreamAtomic.
@Test
public void testFromDataStreamAtomic() {
final StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
final DataStream<Integer> dataStream = env.fromElements(1, 2, 3, 4, 5);
// wraps the atomic type
final TableResult result = tableEnv.fromDataStream(dataStream).execute();
testSchema(result, Column.physical("f0", INT().notNull()));
testResult(result, Row.of(1), Row.of(2), Row.of(3), Row.of(4), Row.of(5));
}
Aggregations