Search in sources :

Example 11 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveRunnerITCase method testOrcSchemaEvol.

@Test
public void testOrcSchemaEvol() throws Exception {
    // not supported until 2.1.0 -- https://issues.apache.org/jira/browse/HIVE-11981,
    // https://issues.apache.org/jira/browse/HIVE-13178
    Assume.assumeTrue(HiveVersionTestUtil.HIVE_210_OR_LATER);
    TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
    tableEnv.executeSql("create database db1");
    try {
        tableEnv.executeSql("create table db1.src (x smallint,y int) stored as orc");
        hiveShell.execute("insert into table db1.src values (1,100),(2,200)");
        tableEnv.getConfig().getConfiguration().setBoolean(HiveOptions.TABLE_EXEC_HIVE_FALLBACK_MAPRED_READER, true);
        tableEnv.executeSql("alter table db1.src change x x int");
        assertEquals("[+I[1, 100], +I[2, 200]]", CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.src").execute().collect()).toString());
        tableEnv.executeSql("alter table db1.src change y y string");
        assertEquals("[+I[1, 100], +I[2, 200]]", CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.src").execute().collect()).toString());
    } finally {
        tableEnv.executeSql("drop database db1 cascade");
    }
}
Also used : TableEnvironment(org.apache.flink.table.api.TableEnvironment) Test(org.junit.Test)

Example 12 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveRunnerITCase method testCompressTextTable.

private void testCompressTextTable(boolean batch) throws Exception {
    TableEnvironment tableEnv = batch ? getTableEnvWithHiveCatalog() : getStreamTableEnvWithHiveCatalog();
    tableEnv.executeSql("create database db1");
    try {
        tableEnv.executeSql("create table db1.src (x string,y string)");
        hiveShell.execute("create table db1.dest like db1.src");
        HiveTestUtils.createTextTableInserter(hiveCatalog, "db1", "src").addRow(new Object[] { "a", "b" }).addRow(new Object[] { "c", "d" }).commit();
        hiveCatalog.getHiveConf().setBoolVar(HiveConf.ConfVars.COMPRESSRESULT, true);
        tableEnv.executeSql("insert into db1.dest select * from db1.src").await();
        List<String> expected = Arrays.asList("a\tb", "c\td");
        verifyHiveQueryResult("select * from db1.dest", expected);
        verifyFlinkQueryResult(tableEnv.sqlQuery("select * from db1.dest"), expected);
    } finally {
        tableEnv.executeSql("drop database db1 cascade");
    }
}
Also used : TableEnvironment(org.apache.flink.table.api.TableEnvironment)

Example 13 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveRunnerITCase method getStreamTableEnvWithHiveCatalog.

private TableEnvironment getStreamTableEnvWithHiveCatalog() {
    StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
    TableEnvironment tableEnv = HiveTestUtils.createTableEnvInStreamingMode(env, SqlDialect.HIVE);
    tableEnv.registerCatalog(hiveCatalog.getName(), hiveCatalog);
    tableEnv.useCatalog(hiveCatalog.getName());
    return tableEnv;
}
Also used : StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) TableEnvironment(org.apache.flink.table.api.TableEnvironment)

Example 14 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveRunnerITCase method testTimestamp.

@Test
public void testTimestamp() throws Exception {
    TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
    tableEnv.executeSql("create database db1");
    try {
        tableEnv.executeSql("create table db1.src (ts timestamp)");
        tableEnv.executeSql("create table db1.dest (ts timestamp)");
        HiveTestUtils.createTextTableInserter(hiveCatalog, "db1", "src").addRow(new Object[] { Timestamp.valueOf("2019-11-11 00:00:00") }).addRow(new Object[] { Timestamp.valueOf("2019-12-03 15:43:32.123456789") }).commit();
        // test read timestamp from hive
        List<Row> results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.src").execute().collect());
        assertEquals(2, results.size());
        assertEquals(LocalDateTime.of(2019, 11, 11, 0, 0), results.get(0).getField(0));
        assertEquals(LocalDateTime.of(2019, 12, 3, 15, 43, 32, 123456789), results.get(1).getField(0));
        // test write timestamp to hive
        tableEnv.executeSql("insert into db1.dest select max(ts) from db1.src").await();
        verifyHiveQueryResult("select * from db1.dest", Collections.singletonList("2019-12-03 15:43:32.123456789"));
    } finally {
        tableEnv.executeSql("drop database db1 cascade");
    }
}
Also used : TableEnvironment(org.apache.flink.table.api.TableEnvironment) Row(org.apache.flink.types.Row) Test(org.junit.Test)

Example 15 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveRunnerITCase method testInsertOverwrite.

@Test
public void testInsertOverwrite() throws Exception {
    TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
    tableEnv.executeSql("create database db1");
    try {
        // non-partitioned
        tableEnv.executeSql("create table db1.dest (x int, y string)");
        HiveTestUtils.createTextTableInserter(hiveCatalog, "db1", "dest").addRow(new Object[] { 1, "a" }).addRow(new Object[] { 2, "b" }).commit();
        verifyHiveQueryResult("select * from db1.dest", Arrays.asList("1\ta", "2\tb"));
        tableEnv.executeSql("insert overwrite db1.dest values (3, 'c')").await();
        verifyHiveQueryResult("select * from db1.dest", Collections.singletonList("3\tc"));
        // static partition
        tableEnv.executeSql("create table db1.part(x int) partitioned by (y int)");
        HiveTestUtils.createTextTableInserter(hiveCatalog, "db1", "part").addRow(new Object[] { 1 }).commit("y=1");
        HiveTestUtils.createTextTableInserter(hiveCatalog, "db1", "part").addRow(new Object[] { 2 }).commit("y=2");
        tableEnv = getTableEnvWithHiveCatalog();
        tableEnv.executeSql("insert overwrite db1.part partition (y=1) select 100").await();
        verifyHiveQueryResult("select * from db1.part", Arrays.asList("100\t1", "2\t2"));
        // dynamic partition
        tableEnv = getTableEnvWithHiveCatalog();
        tableEnv.executeSql("insert overwrite db1.part values (200,2),(3,3)").await();
        // only overwrite dynamically matched partitions, other existing partitions remain
        // intact
        verifyHiveQueryResult("select * from db1.part", Arrays.asList("100\t1", "200\t2", "3\t3"));
    } finally {
        tableEnv.executeSql("drop database db1 cascade");
    }
}
Also used : TableEnvironment(org.apache.flink.table.api.TableEnvironment) Test(org.junit.Test)

Aggregations

TableEnvironment (org.apache.flink.table.api.TableEnvironment)137 Test (org.junit.Test)95 Row (org.apache.flink.types.Row)58 StreamTableEnvironment (org.apache.flink.table.api.bridge.java.StreamTableEnvironment)38 Table (org.apache.flink.table.api.Table)27 ObjectPath (org.apache.flink.table.catalog.ObjectPath)19 StreamExecutionEnvironment (org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)14 ArrayList (java.util.ArrayList)13 CatalogTable (org.apache.flink.table.catalog.CatalogTable)12 HashMap (java.util.HashMap)11 EnvironmentSettings (org.apache.flink.table.api.EnvironmentSettings)10 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)10 TableResult (org.apache.flink.table.api.TableResult)8 File (java.io.File)7 Constructor (java.lang.reflect.Constructor)7 TableImpl (org.apache.flink.table.api.internal.TableImpl)7 TableException (org.apache.flink.table.api.TableException)5 List (java.util.List)4 Configuration (org.apache.flink.configuration.Configuration)4 TableSchema (org.apache.flink.table.api.TableSchema)4