use of org.apache.flink.table.api.TableEnvironment in project flink by apache.
the class HiveModuleTest method testConstantReturnValue.
@Test
public void testConstantReturnValue() {
TableEnvironment tableEnv = HiveTestUtils.createTableEnvInBatchMode();
tableEnv.unloadModule("core");
tableEnv.loadModule("hive", new HiveModule());
List<Row> results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select str_to_map('a:1,b:2,c:3',',',':')").execute().collect());
assertEquals("[{a=1, b=2, c=3}]", results.toString());
}
use of org.apache.flink.table.api.TableEnvironment in project flink by apache.
the class HiveModuleTest method testFunctionsNeedSessionState.
@Test
public void testFunctionsNeedSessionState() {
TableEnvironment tableEnv = HiveTestUtils.createTableEnvInBatchMode();
tableEnv.unloadModule("core");
tableEnv.loadModule("hive", new HiveModule());
tableEnv.loadModule("core", CoreModule.INSTANCE);
tableEnv.sqlQuery("select current_timestamp,current_date").execute().collect();
List<Row> results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select mod(-1,2),pmod(-1,2)").execute().collect());
assertEquals("[-1,1]", results.toString());
}
use of org.apache.flink.table.api.TableEnvironment in project flink by apache.
the class HiveModuleTest method testCallUDFWithNoParam.
@Test
public void testCallUDFWithNoParam() {
TableEnvironment tableEnv = HiveTestUtils.createTableEnvInBatchMode();
tableEnv.unloadModule("core");
tableEnv.loadModule("hive", new HiveModule());
tableEnv.loadModule("core", CoreModule.INSTANCE);
List<Row> results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select `array`(),`map`()").execute().collect());
assertEquals("[[],{}]", results.toString());
}
use of org.apache.flink.table.api.TableEnvironment in project flink by apache.
the class UnsignedTypeConversionITCase method testUnsignedType.
@Test
public void testUnsignedType() throws Exception {
try (Connection con = DriverManager.getConnection(MYSQL_CONTAINER.getJdbcUrl(), USER, PASSWORD)) {
StreamExecutionEnvironment sEnv = StreamExecutionEnvironment.getExecutionEnvironment();
TableEnvironment tableEnv = StreamTableEnvironment.create(sEnv);
createMysqlTable(con);
createFlinkTable(tableEnv);
prepareData(tableEnv);
// write data to db
tableEnv.executeSql(format("insert into jdbc_sink select %s from data", join(",", COLUMNS))).await();
// read data from db using jdbc connection and compare
try (PreparedStatement ps = con.prepareStatement(format("select %s from %s", join(",", COLUMNS), TABLE_NAME))) {
ResultSet resultSet = ps.executeQuery();
while (resultSet.next()) {
for (int i = 0; i < ROW.length; i++) {
assertThat(resultSet.getObject(i + 1, ROW[i].getClass())).isEqualTo(ROW[i]);
}
}
}
// read data from db using flink and compare
String sql = format("select %s from jdbc_source", join(",", COLUMNS));
CloseableIterator<Row> collected = tableEnv.executeSql(sql).collect();
List<Row> result = CollectionUtil.iteratorToList(collected);
assertThat(result).containsOnly(Row.ofKind(RowKind.INSERT, ROW));
}
}
use of org.apache.flink.table.api.TableEnvironment in project flink by apache.
the class JdbcDynamicTableSinkITCase method testReadingFromChangelogSource.
@Test
public void testReadingFromChangelogSource() throws Exception {
TableEnvironment tEnv = TableEnvironment.create(EnvironmentSettings.newInstance().build());
String dataId = TestValuesTableFactory.registerData(TestData.userChangelog());
tEnv.executeSql("CREATE TABLE user_logs (\n" + " user_id STRING,\n" + " user_name STRING,\n" + " email STRING,\n" + " balance DECIMAL(18,2),\n" + " balance2 AS balance * 2\n" + ") WITH (\n" + " 'connector' = 'values',\n" + " 'data-id' = '" + dataId + "',\n" + " 'changelog-mode' = 'I,UA,UB,D'\n" + ")");
tEnv.executeSql("CREATE TABLE user_sink (\n" + " user_id STRING PRIMARY KEY NOT ENFORCED,\n" + " user_name STRING,\n" + " email STRING,\n" + " balance DECIMAL(18,2),\n" + " balance2 DECIMAL(18,2)\n" + ") WITH (\n" + " 'connector' = 'jdbc'," + " 'url'='" + DB_URL + "'," + " 'table-name' = '" + USER_TABLE + "'," + " 'sink.buffer-flush.max-rows' = '2'," + " 'sink.buffer-flush.interval' = '0'" + // disable async flush
")");
tEnv.executeSql("INSERT INTO user_sink SELECT * FROM user_logs").await();
check(new Row[] { Row.of("user1", "Tom", "tom123@gmail.com", new BigDecimal("8.10"), new BigDecimal("16.20")), Row.of("user3", "Bailey", "bailey@qq.com", new BigDecimal("9.99"), new BigDecimal("19.98")), Row.of("user4", "Tina", "tina@gmail.com", new BigDecimal("11.30"), new BigDecimal("22.60")) }, DB_URL, USER_TABLE, new String[] { "user_id", "user_name", "email", "balance", "balance2" });
}
Aggregations