use of org.apache.flink.table.api.TableEnvironment in project flink by apache.
the class HBaseConnectorITCase method testTableSourceReadAsByteArray.
@Test
public void testTableSourceReadAsByteArray() {
TableEnvironment tEnv = TableEnvironment.create(batchSettings);
tEnv.executeSql("CREATE TABLE hTable (" + " family2 ROW<col1 BYTES, col2 BYTES>," + " rowkey INT" + // no primary key syntax
") WITH (" + " 'connector' = 'hbase-2.2'," + " 'table-name' = '" + TEST_TABLE_1 + "'," + " 'zookeeper.quorum' = '" + getZookeeperQuorum() + "'" + ")");
tEnv.registerFunction("toUTF8", new ToUTF8());
tEnv.registerFunction("toLong", new ToLong());
Table table = tEnv.sqlQuery("SELECT " + " toUTF8(h.family2.col1), " + " toLong(h.family2.col2) " + "FROM hTable AS h");
List<Row> results = CollectionUtil.iteratorToList(table.execute().collect());
String expected = "+I[Hello-1, 100]\n" + "+I[Hello-2, 200]\n" + "+I[Hello-3, 300]\n" + "+I[null, 400]\n" + "+I[Hello-5, 500]\n" + "+I[Hello-6, 600]\n" + "+I[Hello-7, 700]\n" + "+I[null, 800]\n";
TestBaseUtils.compareResultAsText(results, expected);
}
use of org.apache.flink.table.api.TableEnvironment in project flink by apache.
the class HBaseConnectorITCase method testTableSourceSinkWithDDL.
@Test
public void testTableSourceSinkWithDDL() throws Exception {
StreamExecutionEnvironment execEnv = StreamExecutionEnvironment.getExecutionEnvironment();
StreamTableEnvironment tEnv = StreamTableEnvironment.create(execEnv, streamSettings);
// register HBase table testTable1 which contains test data
String table1DDL = createHBaseTableDDL(TEST_TABLE_1, true);
tEnv.executeSql(table1DDL);
// register HBase table which is empty
String table3DDL = createHBaseTableDDL(TEST_TABLE_3, true);
tEnv.executeSql(table3DDL);
String insertStatement = "INSERT INTO " + TEST_TABLE_3 + " SELECT rowkey," + " family1," + " family2," + " family3," + " family4" + " from " + TEST_TABLE_1;
TableResult tableResult = tEnv.executeSql(insertStatement);
// wait to finish
tableResult.await();
assertEquals("Expected INSERT rowKind", RowKind.INSERT, tableResult.collect().next().getKind());
// start a batch scan job to verify contents in HBase table
TableEnvironment batchEnv = TableEnvironment.create(batchSettings);
batchEnv.executeSql(table3DDL);
List<String> expected = new ArrayList<>();
expected.add("+I[1, 10, Hello-1, 100, 1.01, false, Welt-1, 2019-08-18T19:00, 2019-08-18, 19:00, 12345678.0001]");
expected.add("+I[2, 20, Hello-2, 200, 2.02, true, Welt-2, 2019-08-18T19:01, 2019-08-18, 19:01, 12345678.0002]");
expected.add("+I[3, 30, Hello-3, 300, 3.03, false, Welt-3, 2019-08-18T19:02, 2019-08-18, 19:02, 12345678.0003]");
expected.add("+I[4, 40, null, 400, 4.04, true, Welt-4, 2019-08-18T19:03, 2019-08-18, 19:03, 12345678.0004]");
expected.add("+I[5, 50, Hello-5, 500, 5.05, false, Welt-5, 2019-08-19T19:10, 2019-08-19, 19:10, 12345678.0005]");
expected.add("+I[6, 60, Hello-6, 600, 6.06, true, Welt-6, 2019-08-19T19:20, 2019-08-19, 19:20, 12345678.0006]");
expected.add("+I[7, 70, Hello-7, 700, 7.07, false, Welt-7, 2019-08-19T19:30, 2019-08-19, 19:30, 12345678.0007]");
expected.add("+I[8, 80, null, 800, 8.08, true, Welt-8, 2019-08-19T19:40, 2019-08-19, 19:40, 12345678.0008]");
Table countTable = batchEnv.sqlQuery("SELECT COUNT(h.rowkey) FROM " + TEST_TABLE_3 + " AS h");
assertEquals(new Long(expected.size()), countTable.execute().collect().next().getField(0));
String query = "SELECT " + " h.rowkey, " + " h.family1.col1, " + " h.family2.col1, " + " h.family2.col2, " + " h.family3.col1, " + " h.family3.col2, " + " h.family3.col3, " + " h.family4.col1, " + " h.family4.col2, " + " h.family4.col3, " + " h.family4.col4 " + " FROM " + TEST_TABLE_3 + " AS h";
TableResult tableResult3 = batchEnv.executeSql(query);
List<String> result = Lists.newArrayList(tableResult3.collect()).stream().map(Row::toString).sorted().collect(Collectors.toList());
assertEquals(expected, result);
}
use of org.apache.flink.table.api.TableEnvironment in project flink by apache.
the class HBaseConnectorITCase method testTableSourceFullScan.
// -------------------------------------------------------------------------------------
// HBaseTableSource tests
// -------------------------------------------------------------------------------------
@Test
public void testTableSourceFullScan() {
TableEnvironment tEnv = TableEnvironment.create(batchSettings);
tEnv.executeSql("CREATE TABLE hTable (" + " family1 ROW<col1 INT>," + " family2 ROW<col1 STRING, col2 BIGINT>," + " family3 ROW<col1 DOUBLE, col2 BOOLEAN, col3 STRING>," + " rowkey INT," + " PRIMARY KEY (rowkey) NOT ENFORCED" + ") WITH (" + " 'connector' = 'hbase-1.4'," + " 'table-name' = '" + TEST_TABLE_1 + "'," + " 'zookeeper.quorum' = '" + getZookeeperQuorum() + "'" + ")");
Table table = tEnv.sqlQuery("SELECT " + " h.family1.col1, " + " h.family2.col1, " + " h.family2.col2, " + " h.family3.col1, " + " h.family3.col2, " + " h.family3.col3 " + "FROM hTable AS h");
List<Row> results = CollectionUtil.iteratorToList(table.execute().collect());
String expected = "+I[10, Hello-1, 100, 1.01, false, Welt-1]\n" + "+I[20, Hello-2, 200, 2.02, true, Welt-2]\n" + "+I[30, Hello-3, 300, 3.03, false, Welt-3]\n" + "+I[40, null, 400, 4.04, true, Welt-4]\n" + "+I[50, Hello-5, 500, 5.05, false, Welt-5]\n" + "+I[60, Hello-6, 600, 6.06, true, Welt-6]\n" + "+I[70, Hello-7, 700, 7.07, false, Welt-7]\n" + "+I[80, null, 800, 8.08, true, Welt-8]\n";
TestBaseUtils.compareResultAsText(results, expected);
}
use of org.apache.flink.table.api.TableEnvironment in project flink by apache.
the class HiveTableSourceITCase method testParallelismSettingWithFileNum.
@Test
public void testParallelismSettingWithFileNum() throws IOException {
// create test files
File dir = Files.createTempDirectory("testParallelismSettingWithFileNum").toFile();
dir.deleteOnExit();
for (int i = 0; i < 3; i++) {
File csv = new File(dir, "data" + i + ".csv");
csv.createNewFile();
FileUtils.writeFileUtf8(csv, "1|100\n2|200\n");
}
TableEnvironment tEnv = createTableEnv();
tEnv.executeSql("CREATE EXTERNAL TABLE source_db.test_parallelism_setting_with_file_num " + "(a INT, b INT) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LOCATION '" + dir.toString() + "'");
Table table = tEnv.sqlQuery("select * from hive.source_db.test_parallelism_setting_with_file_num");
testParallelismSettingTranslateAndAssert(3, table, tEnv);
tEnv.getConfig().getConfiguration().setInteger(HiveOptions.TABLE_EXEC_HIVE_INFER_SOURCE_PARALLELISM_MAX, 2);
testParallelismSettingTranslateAndAssert(2, table, tEnv);
}
use of org.apache.flink.table.api.TableEnvironment in project flink by apache.
the class HiveTableSourceITCase method createTableEnv.
private static TableEnvironment createTableEnv() {
TableEnvironment tableEnv = HiveTestUtils.createTableEnvInBatchMode(SqlDialect.HIVE);
tableEnv.registerCatalog("hive", hiveCatalog);
tableEnv.useCatalog("hive");
return tableEnv;
}
Aggregations