Search in sources :

Example 6 with TableImpl

use of org.apache.flink.table.api.internal.TableImpl in project flink by apache.

the class HiveLookupJoinITCase method testLookupJoinPartitionedTable.

@Test
public void testLookupJoinPartitionedTable() throws Exception {
    // constructs test data using dynamic partition
    TableEnvironment batchEnv = HiveTestUtils.createTableEnvInBatchMode(SqlDialect.HIVE);
    batchEnv.registerCatalog(hiveCatalog.getName(), hiveCatalog);
    batchEnv.useCatalog(hiveCatalog.getName());
    batchEnv.executeSql("insert overwrite partition_table_1 values " + "(1,'a',08,2019,'09','01')," + "(1,'a',10,2020,'09','31')," + "(2,'a',21,2020,'09','31')," + "(2,'b',22,2020,'09','31')," + "(3,'c',33,2020,'09','31')," + "(1,'a',101,2020,'08','01')," + "(2,'a',121,2020,'08','01')," + "(2,'b',122,2020,'08','01')").await();
    TableImpl flinkTable = (TableImpl) tableEnv.sqlQuery("select p.x, p.y, b.z, b.pt_year, b.pt_mon, b.pt_day from " + " default_catalog.default_database.probe as p" + " join partition_table_1 for system_time as of p.p as b on p.x=b.x and p.y=b.y");
    List<Row> results = CollectionUtil.iteratorToList(flinkTable.execute().collect());
    assertEquals("[+I[1, a, 10, 2020, 09, 31], +I[2, b, 22, 2020, 09, 31], +I[3, c, 33, 2020, 09, 31]]", results.toString());
}
Also used : TableImpl(org.apache.flink.table.api.internal.TableImpl) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Row(org.apache.flink.types.Row) Test(org.junit.Test)

Example 7 with TableImpl

use of org.apache.flink.table.api.internal.TableImpl in project flink by apache.

the class HiveLookupJoinITCase method testLookupJoinBoundedTable.

@Test
public void testLookupJoinBoundedTable() throws Exception {
    tableEnv.getConfig().setSqlDialect(SqlDialect.DEFAULT);
    tableEnv.executeSql("insert into bounded_table values (1,'a',10),(2,'a',21),(2,'b',22),(3,'c',33)").await();
    TableImpl flinkTable = (TableImpl) tableEnv.sqlQuery("select p.x, p.y, b.z from " + " default_catalog.default_database.probe as p " + " join bounded_table for system_time as of p.p as b on p.x=b.x and p.y=b.y");
    List<Row> results = CollectionUtil.iteratorToList(flinkTable.execute().collect());
    assertEquals("[+I[1, a, 10], +I[2, b, 22], +I[3, c, 33]]", results.toString());
}
Also used : TableImpl(org.apache.flink.table.api.internal.TableImpl) Row(org.apache.flink.types.Row) Test(org.junit.Test)

Aggregations

TableImpl (org.apache.flink.table.api.internal.TableImpl)7 Row (org.apache.flink.types.Row)7 Test (org.junit.Test)7 TableEnvironment (org.apache.flink.table.api.TableEnvironment)6 ArrayList (java.util.ArrayList)1 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)1 ObjectPath (org.apache.flink.table.catalog.ObjectPath)1 FileSystem (org.apache.hadoop.fs.FileSystem)1 Path (org.apache.hadoop.fs.Path)1 HiveConf (org.apache.hadoop.hive.conf.HiveConf)1 Table (org.apache.hadoop.hive.metastore.api.Table)1