use of org.apache.flink.table.api.internal.TableImpl in project flink by apache.
the class HiveLookupJoinITCase method testLookupJoinPartitionedTable.
@Test
public void testLookupJoinPartitionedTable() throws Exception {
// constructs test data using dynamic partition
TableEnvironment batchEnv = HiveTestUtils.createTableEnvInBatchMode(SqlDialect.HIVE);
batchEnv.registerCatalog(hiveCatalog.getName(), hiveCatalog);
batchEnv.useCatalog(hiveCatalog.getName());
batchEnv.executeSql("insert overwrite partition_table_1 values " + "(1,'a',08,2019,'09','01')," + "(1,'a',10,2020,'09','31')," + "(2,'a',21,2020,'09','31')," + "(2,'b',22,2020,'09','31')," + "(3,'c',33,2020,'09','31')," + "(1,'a',101,2020,'08','01')," + "(2,'a',121,2020,'08','01')," + "(2,'b',122,2020,'08','01')").await();
TableImpl flinkTable = (TableImpl) tableEnv.sqlQuery("select p.x, p.y, b.z, b.pt_year, b.pt_mon, b.pt_day from " + " default_catalog.default_database.probe as p" + " join partition_table_1 for system_time as of p.p as b on p.x=b.x and p.y=b.y");
List<Row> results = CollectionUtil.iteratorToList(flinkTable.execute().collect());
assertEquals("[+I[1, a, 10, 2020, 09, 31], +I[2, b, 22, 2020, 09, 31], +I[3, c, 33, 2020, 09, 31]]", results.toString());
}
use of org.apache.flink.table.api.internal.TableImpl in project flink by apache.
the class HiveLookupJoinITCase method testLookupJoinBoundedTable.
@Test
public void testLookupJoinBoundedTable() throws Exception {
tableEnv.getConfig().setSqlDialect(SqlDialect.DEFAULT);
tableEnv.executeSql("insert into bounded_table values (1,'a',10),(2,'a',21),(2,'b',22),(3,'c',33)").await();
TableImpl flinkTable = (TableImpl) tableEnv.sqlQuery("select p.x, p.y, b.z from " + " default_catalog.default_database.probe as p " + " join bounded_table for system_time as of p.p as b on p.x=b.x and p.y=b.y");
List<Row> results = CollectionUtil.iteratorToList(flinkTable.execute().collect());
assertEquals("[+I[1, a, 10], +I[2, b, 22], +I[3, c, 33]]", results.toString());
}
Aggregations