Search in sources :

Example 96 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveRunnerITCase method testWriteNullValues.

@Test
public void testWriteNullValues() throws Exception {
    TableEnvironment tableEnv = HiveTestUtils.createTableEnvInBatchMode(SqlDialect.HIVE);
    tableEnv.registerCatalog(hiveCatalog.getName(), hiveCatalog);
    tableEnv.useCatalog(hiveCatalog.getName());
    tableEnv.executeSql("create database db1");
    try {
        // 17 data types
        tableEnv.executeSql("create table db1.src" + "(t tinyint,s smallint,i int,b bigint,f float,d double,de decimal(10,5),ts timestamp,dt date," + "str string,ch char(5),vch varchar(8),bl boolean,bin binary,arr array<int>,mp map<int,string>,strt struct<f1:int,f2:string>)");
        HiveTestUtils.createTextTableInserter(hiveCatalog, "db1", "src").addRow(new Object[] { null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null }).commit();
        hiveShell.execute("create table db1.dest like db1.src");
        tableEnv.executeSql("insert into db1.dest select * from db1.src").await();
        List<String> results = hiveShell.executeQuery("select * from db1.dest");
        assertEquals(1, results.size());
        String[] cols = results.get(0).split("\t");
        assertEquals(17, cols.length);
        assertEquals("NULL", cols[0]);
        assertEquals(1, new HashSet<>(Arrays.asList(cols)).size());
    } finally {
        tableEnv.executeSql("drop database db1 cascade");
    }
}
Also used : TableEnvironment(org.apache.flink.table.api.TableEnvironment) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 97 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveRunnerITCase method testDecimal.

@Test
public void testDecimal() throws Exception {
    TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
    tableEnv.executeSql("create database db1");
    try {
        tableEnv.executeSql("create table db1.src1 (x decimal(10,2))");
        tableEnv.executeSql("create table db1.src2 (x decimal(10,2))");
        tableEnv.executeSql("create table db1.dest (x decimal(10,2))");
        // populate src1 from Hive
        // TABLE keyword in INSERT INTO is mandatory prior to 1.1.0
        hiveShell.execute("insert into table db1.src1 values (1.0),(2.12),(5.123),(5.456),(123456789.12)");
        // populate src2 with same data from Flink
        tableEnv.executeSql("insert into db1.src2 values (1.0),(2.12),(5.123),(5.456),(123456789.12)").await();
        // verify src1 and src2 contain same data
        verifyHiveQueryResult("select * from db1.src2", hiveShell.executeQuery("select * from db1.src1"));
        // populate dest with src1 from Flink -- to test reading decimal type from Hive
        tableEnv.executeSql("insert into db1.dest select * from db1.src1").await();
        verifyHiveQueryResult("select * from db1.dest", hiveShell.executeQuery("select * from db1.src1"));
    } finally {
        tableEnv.executeSql("drop database db1 cascade");
    }
}
Also used : TableEnvironment(org.apache.flink.table.api.TableEnvironment) Test(org.junit.Test)

Example 98 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveRunnerITCase method testDynamicPartition.

@Test
public void testDynamicPartition() throws Exception {
    TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
    tableEnv.executeSql("create database db1");
    try {
        tableEnv.executeSql("create table db1.src (x int, y string, z double)");
        HiveTestUtils.createTextTableInserter(hiveCatalog, "db1", "src").addRow(new Object[] { 1, "a", 1.1 }).addRow(new Object[] { 2, "a", 2.2 }).addRow(new Object[] { 3, "b", 3.3 }).commit();
        tableEnv.executeSql("create table db1.dest (x int) partitioned by (p1 string, p2 double)");
        tableEnv.executeSql("insert into db1.dest select * from db1.src").await();
        assertEquals(3, hiveCatalog.listPartitions(new ObjectPath("db1", "dest")).size());
        verifyHiveQueryResult("select * from db1.dest", Arrays.asList("1\ta\t1.1", "2\ta\t2.2", "3\tb\t3.3"));
    } finally {
        tableEnv.executeSql("drop database db1 cascade");
    }
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Test(org.junit.Test)

Example 99 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveRunnerITCase method testViews.

@Test
public void testViews() throws Exception {
    TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
    tableEnv.executeSql("create database db1");
    try {
        tableEnv.executeSql("create table db1.src (key int,val string)");
        HiveTestUtils.createTextTableInserter(hiveCatalog, "db1", "src").addRow(new Object[] { 1, "a" }).addRow(new Object[] { 1, "aa" }).addRow(new Object[] { 1, "aaa" }).addRow(new Object[] { 2, "b" }).addRow(new Object[] { 3, "c" }).addRow(new Object[] { 3, "ccc" }).commit();
        tableEnv.executeSql("create table db1.keys (key int,name string)");
        HiveTestUtils.createTextTableInserter(hiveCatalog, "db1", "keys").addRow(new Object[] { 1, "key1" }).addRow(new Object[] { 2, "key2" }).addRow(new Object[] { 3, "key3" }).addRow(new Object[] { 4, "key4" }).commit();
        hiveShell.execute("create view db1.v1 as select key as k,val as v from db1.src limit 2");
        hiveShell.execute("create view db1.v2 as select key,count(*) from db1.src group by key having count(*)>1 order by key");
        hiveShell.execute("create view db1.v3 as select k.key,k.name,count(*) from db1.src s join db1.keys k on s.key=k.key group by k.key,k.name order by k.key");
        List<Row> results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select count(v) from db1.v1").execute().collect());
        assertEquals("[+I[2]]", results.toString());
        results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.v2").execute().collect());
        assertEquals("[+I[1, 3], +I[3, 2]]", results.toString());
        results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.v3").execute().collect());
        assertEquals("[+I[1, key1, 3], +I[2, key2, 1], +I[3, key3, 2]]", results.toString());
    } finally {
        tableEnv.executeSql("drop database db1 cascade");
    }
}
Also used : TableEnvironment(org.apache.flink.table.api.TableEnvironment) Row(org.apache.flink.types.Row) Test(org.junit.Test)

Example 100 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveRunnerITCase method testPartialDynamicPartition.

@Test
public void testPartialDynamicPartition() throws Exception {
    TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
    tableEnv.executeSql("create database db1");
    try {
        tableEnv.executeSql("create table db1.src (x int, y string)");
        HiveTestUtils.createTextTableInserter(hiveCatalog, "db1", "src").addRow(new Object[] { 1, "a" }).addRow(new Object[] { 2, "b" }).commit();
        tableEnv.executeSql("create table db1.dest (x int) partitioned by (p1 double, p2 string)");
        tableEnv.executeSql("insert into db1.dest partition (p1=1.1,p2) select x,y from db1.src").await();
        assertEquals(2, hiveCatalog.listPartitions(new ObjectPath("db1", "dest")).size());
        verifyHiveQueryResult("select * from db1.dest", Arrays.asList("1\t1.1\ta", "2\t1.1\tb"));
    } finally {
        tableEnv.executeSql("drop database db1 cascade");
    }
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Test(org.junit.Test)

Aggregations

TableEnvironment (org.apache.flink.table.api.TableEnvironment)137 Test (org.junit.Test)95 Row (org.apache.flink.types.Row)58 StreamTableEnvironment (org.apache.flink.table.api.bridge.java.StreamTableEnvironment)38 Table (org.apache.flink.table.api.Table)27 ObjectPath (org.apache.flink.table.catalog.ObjectPath)19 StreamExecutionEnvironment (org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)14 ArrayList (java.util.ArrayList)13 CatalogTable (org.apache.flink.table.catalog.CatalogTable)12 HashMap (java.util.HashMap)11 EnvironmentSettings (org.apache.flink.table.api.EnvironmentSettings)10 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)10 TableResult (org.apache.flink.table.api.TableResult)8 File (java.io.File)7 Constructor (java.lang.reflect.Constructor)7 TableImpl (org.apache.flink.table.api.internal.TableImpl)7 TableException (org.apache.flink.table.api.TableException)5 List (java.util.List)4 Configuration (org.apache.flink.configuration.Configuration)4 TableSchema (org.apache.flink.table.api.TableSchema)4