Search in sources :

Example 91 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class TableEnvHiveConnectorITCase method testLocationWithComma.

@Test
public void testLocationWithComma() throws Exception {
    TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
    File location = tempFolder.newFolder(",tbl1,location,");
    try {
        // test table location
        tableEnv.executeSql(String.format("create table tbl1 (x int) location '%s'", location.getAbsolutePath()));
        tableEnv.executeSql("insert into tbl1 values (1),(2)").await();
        List<Row> results = CollectionUtil.iteratorToList(tableEnv.executeSql("select * from tbl1").collect());
        assertEquals("[+I[1], +I[2]]", results.toString());
        // test partition location
        tableEnv.executeSql("create table tbl2 (x int) partitioned by (p string)");
        location = tempFolder.newFolder(",");
        tableEnv.executeSql(String.format("alter table tbl2 add partition (p='a') location '%s'", location.getAbsolutePath()));
        tableEnv.executeSql("insert into tbl2 partition (p='a') values (1),(2)").await();
        results = CollectionUtil.iteratorToList(tableEnv.executeSql("select * from tbl2").collect());
        assertEquals("[+I[1, a], +I[2, a]]", results.toString());
        tableEnv.executeSql("insert into tbl2 partition (p) values (3,'b ,')").await();
        results = CollectionUtil.iteratorToList(tableEnv.executeSql("select * from tbl2 where p='b ,'").collect());
        assertEquals("[+I[3, b ,]]", results.toString());
    } finally {
        if (location != null) {
            IOUtils.deleteFileQuietly(location.toPath());
        }
        tableEnv.executeSql("drop table if exists tbl1");
        tableEnv.executeSql("drop table if exists tbl2");
    }
}
Also used : TableEnvironment(org.apache.flink.table.api.TableEnvironment) Row(org.apache.flink.types.Row) File(java.io.File) Test(org.junit.Test)

Example 92 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class TableEnvHiveConnectorITCase method testParquetNameMapping.

@Test
public void testParquetNameMapping() throws Exception {
    TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
    tableEnv.executeSql("create database db1");
    try {
        tableEnv.executeSql("create table db1.t1 (x int,y int) stored as parquet");
        tableEnv.executeSql("insert into table db1.t1 values (1,10),(2,20)").await();
        Table hiveTable = hiveCatalog.getHiveTable(new ObjectPath("db1", "t1"));
        String location = hiveTable.getSd().getLocation();
        tableEnv.executeSql(String.format("create table db1.t2 (y int,x int) stored as parquet location '%s'", location));
        tableEnv.getConfig().getConfiguration().setBoolean(HiveOptions.TABLE_EXEC_HIVE_FALLBACK_MAPRED_READER, true);
        assertEquals("[+I[1], +I[2]]", CollectionUtil.iteratorToList(tableEnv.sqlQuery("select x from db1.t1").execute().collect()).toString());
        assertEquals("[+I[1], +I[2]]", CollectionUtil.iteratorToList(tableEnv.sqlQuery("select x from db1.t2").execute().collect()).toString());
    } finally {
        tableEnv.executeSql("drop database db1 cascade");
    }
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) Table(org.apache.hadoop.hive.metastore.api.Table) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Test(org.junit.Test)

Example 93 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class TableEnvHiveConnectorITCase method testUpdatePartitionSD.

@Test
public void testUpdatePartitionSD() throws Exception {
    TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
    tableEnv.executeSql("create database db1");
    try {
        tableEnv.executeSql("create table db1.dest (x int) partitioned by (p string) stored as rcfile");
        tableEnv.executeSql("insert overwrite db1.dest partition (p='1') select 1").await();
        tableEnv.executeSql("alter table db1.dest set fileformat sequencefile");
        tableEnv.executeSql("insert overwrite db1.dest partition (p='1') select 1").await();
        assertEquals("[+I[1, 1]]", CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.dest").execute().collect()).toString());
    } finally {
        tableEnv.executeSql("drop database db1 cascade");
    }
}
Also used : TableEnvironment(org.apache.flink.table.api.TableEnvironment) Test(org.junit.Test)

Example 94 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveLookupJoinITCase method testLookupJoinPartitionedTableWithPartitionTime.

@Test
public void testLookupJoinPartitionedTableWithPartitionTime() throws Exception {
    // constructs test data using dynamic partition
    TableEnvironment batchEnv = HiveTestUtils.createTableEnvInBatchMode(SqlDialect.HIVE);
    batchEnv.registerCatalog(hiveCatalog.getName(), hiveCatalog);
    batchEnv.useCatalog(hiveCatalog.getName());
    batchEnv.executeSql("insert overwrite partition_table_2 values " + "(1,'a',08,2020,'08','01')," + "(1,'a',10,2020,'08','31')," + "(2,'a',21,2019,'08','31')," + "(2,'b',22,2020,'08','31')," + "(3,'c',33,2017,'08','31')," + "(1,'a',101,2017,'09','01')," + "(2,'a',121,2019,'09','01')," + "(2,'b',122,2019,'09','01')").await();
    TableImpl flinkTable = (TableImpl) tableEnv.sqlQuery("select p.x, p.y, b.z, b.pt_year, b.pt_mon, b.pt_day from " + " default_catalog.default_database.probe as p" + " join partition_table_2 for system_time as of p.p as b on p.x=b.x and p.y=b.y");
    List<Row> results = CollectionUtil.iteratorToList(flinkTable.execute().collect());
    assertEquals("[+I[1, a, 10, 2020, 08, 31], +I[2, b, 22, 2020, 08, 31]]", results.toString());
}
Also used : TableImpl(org.apache.flink.table.api.internal.TableImpl) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Row(org.apache.flink.types.Row) Test(org.junit.Test)

Example 95 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveLookupJoinITCase method testLookupJoinPartitionedTable.

@Test
public void testLookupJoinPartitionedTable() throws Exception {
    // constructs test data using dynamic partition
    TableEnvironment batchEnv = HiveTestUtils.createTableEnvInBatchMode(SqlDialect.HIVE);
    batchEnv.registerCatalog(hiveCatalog.getName(), hiveCatalog);
    batchEnv.useCatalog(hiveCatalog.getName());
    batchEnv.executeSql("insert overwrite partition_table_1 values " + "(1,'a',08,2019,'09','01')," + "(1,'a',10,2020,'09','31')," + "(2,'a',21,2020,'09','31')," + "(2,'b',22,2020,'09','31')," + "(3,'c',33,2020,'09','31')," + "(1,'a',101,2020,'08','01')," + "(2,'a',121,2020,'08','01')," + "(2,'b',122,2020,'08','01')").await();
    TableImpl flinkTable = (TableImpl) tableEnv.sqlQuery("select p.x, p.y, b.z, b.pt_year, b.pt_mon, b.pt_day from " + " default_catalog.default_database.probe as p" + " join partition_table_1 for system_time as of p.p as b on p.x=b.x and p.y=b.y");
    List<Row> results = CollectionUtil.iteratorToList(flinkTable.execute().collect());
    assertEquals("[+I[1, a, 10, 2020, 09, 31], +I[2, b, 22, 2020, 09, 31], +I[3, c, 33, 2020, 09, 31]]", results.toString());
}
Also used : TableImpl(org.apache.flink.table.api.internal.TableImpl) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Row(org.apache.flink.types.Row) Test(org.junit.Test)

Aggregations

TableEnvironment (org.apache.flink.table.api.TableEnvironment)137 Test (org.junit.Test)95 Row (org.apache.flink.types.Row)58 StreamTableEnvironment (org.apache.flink.table.api.bridge.java.StreamTableEnvironment)38 Table (org.apache.flink.table.api.Table)27 ObjectPath (org.apache.flink.table.catalog.ObjectPath)19 StreamExecutionEnvironment (org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)14 ArrayList (java.util.ArrayList)13 CatalogTable (org.apache.flink.table.catalog.CatalogTable)12 HashMap (java.util.HashMap)11 EnvironmentSettings (org.apache.flink.table.api.EnvironmentSettings)10 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)10 TableResult (org.apache.flink.table.api.TableResult)8 File (java.io.File)7 Constructor (java.lang.reflect.Constructor)7 TableImpl (org.apache.flink.table.api.internal.TableImpl)7 TableException (org.apache.flink.table.api.TableException)5 List (java.util.List)4 Configuration (org.apache.flink.configuration.Configuration)4 TableSchema (org.apache.flink.table.api.TableSchema)4