Search in sources :

Example 76 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveDialectQueryITCase method getTableEnvWithHiveCatalog.

private static TableEnvironment getTableEnvWithHiveCatalog() {
    TableEnvironment tableEnv = HiveTestUtils.createTableEnvInBatchMode(SqlDialect.HIVE);
    tableEnv.registerCatalog(hiveCatalog.getName(), hiveCatalog);
    tableEnv.useCatalog(hiveCatalog.getName());
    // automatically load hive module in hive-compatible mode
    HiveModule hiveModule = new HiveModule(hiveCatalog.getHiveVersion());
    CoreModule coreModule = CoreModule.INSTANCE;
    for (String loaded : tableEnv.listModules()) {
        tableEnv.unloadModule(loaded);
    }
    tableEnv.loadModule("hive", hiveModule);
    tableEnv.loadModule("core", coreModule);
    return tableEnv;
}
Also used : HiveModule(org.apache.flink.table.module.hive.HiveModule) TableEnvironment(org.apache.flink.table.api.TableEnvironment) CoreModule(org.apache.flink.table.module.CoreModule)

Example 77 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class TableEnvHiveConnectorITCase method testInsertPartitionWithValuesSource.

@Test
public void testInsertPartitionWithValuesSource() throws Exception {
    TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
    tableEnv.executeSql("create table dest (x int) partitioned by (p1 int,p2 string)");
    tableEnv.executeSql("insert into dest partition (p1=1,p2) values(1, 'a')").await();
    List<Row> results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from dest").execute().collect());
    assertEquals("[+I[1, 1, a]]", results.toString());
    tableEnv.executeSql("drop table if exists dest");
}
Also used : TableEnvironment(org.apache.flink.table.api.TableEnvironment) Row(org.apache.flink.types.Row) Test(org.junit.Test)

Example 78 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveCatalogITCase method testNewTableFactory.

@Test
public void testNewTableFactory() throws Exception {
    TableEnvironment tEnv = TableEnvironment.create(EnvironmentSettings.newInstance().inBatchMode().build());
    tEnv.registerCatalog("myhive", hiveCatalog);
    tEnv.useCatalog("myhive");
    tEnv.getConfig().getConfiguration().set(TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM, 1);
    String path = this.getClass().getResource("/csv/test.csv").getPath();
    PrintStream originalSystemOut = System.out;
    try {
        ByteArrayOutputStream arrayOutputStream = new ByteArrayOutputStream();
        System.setOut(new PrintStream(arrayOutputStream));
        tEnv.executeSql("create table csv_table (name String, age Int) with (" + "'connector.type' = 'filesystem'," + "'connector.path' = 'file://" + path + "'," + "'format.type' = 'csv')");
        tEnv.executeSql("create table print_table (name String, age Int) with ('connector' = 'print')");
        tEnv.executeSql("insert into print_table select * from csv_table").await();
        // assert query result
        assertThat(arrayOutputStream.toString()).isEqualTo("+I[1, 1]\n+I[2, 2]\n+I[3, 3]\n");
    } finally {
        if (System.out != originalSystemOut) {
            System.out.close();
        }
        System.setOut(originalSystemOut);
        tEnv.executeSql("DROP TABLE csv_table");
        tEnv.executeSql("DROP TABLE print_table");
    }
}
Also used : PrintStream(java.io.PrintStream) TableEnvironment(org.apache.flink.table.api.TableEnvironment) ByteArrayOutputStream(java.io.ByteArrayOutputStream) Test(org.junit.Test)

Example 79 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveCatalogITCase method testReadWriteCsv.

@Test
public void testReadWriteCsv() throws Exception {
    // similar to CatalogTableITCase::testReadWriteCsvUsingDDL but uses HiveCatalog
    TableEnvironment tableEnv = TableEnvironment.create(EnvironmentSettings.inStreamingMode());
    tableEnv.getConfig().getConfiguration().setInteger(TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM, 1);
    tableEnv.registerCatalog("myhive", hiveCatalog);
    tableEnv.useCatalog("myhive");
    String srcPath = this.getClass().getResource("/csv/test3.csv").getPath();
    tableEnv.executeSql("CREATE TABLE src (" + "price DECIMAL(10, 2),currency STRING,ts6 TIMESTAMP(6),ts AS CAST(ts6 AS TIMESTAMP(3)),WATERMARK FOR ts AS ts) " + String.format("WITH ('connector.type' = 'filesystem','connector.path' = 'file://%s','format.type' = 'csv')", srcPath));
    String sinkPath = new File(tempFolder.newFolder(), "csv-order-sink").toURI().toString();
    tableEnv.executeSql("CREATE TABLE sink (" + "window_end TIMESTAMP(3),max_ts TIMESTAMP(6),counter BIGINT,total_price DECIMAL(10, 2)) " + String.format("WITH ('connector.type' = 'filesystem','connector.path' = '%s','format.type' = 'csv')", sinkPath));
    tableEnv.executeSql("INSERT INTO sink " + "SELECT TUMBLE_END(ts, INTERVAL '5' SECOND),MAX(ts6),COUNT(*),MAX(price) FROM src " + "GROUP BY TUMBLE(ts, INTERVAL '5' SECOND)").await();
    String expected = "2019-12-12 00:00:05.0,2019-12-12 00:00:04.004001,3,50.00\n" + "2019-12-12 00:00:10.0,2019-12-12 00:00:06.006001,2,5.33\n";
    assertThat(FileUtils.readFileUtf8(new File(new URI(sinkPath)))).isEqualTo(expected);
}
Also used : TableEnvironment(org.apache.flink.table.api.TableEnvironment) File(java.io.File) URI(java.net.URI) Test(org.junit.Test)

Example 80 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveCatalogITCase method testCreateTableLike.

@Test
public void testCreateTableLike() throws Exception {
    TableEnvironment tableEnv = HiveTestUtils.createTableEnvInBatchMode();
    tableEnv.registerCatalog(hiveCatalog.getName(), hiveCatalog);
    tableEnv.useCatalog(hiveCatalog.getName());
    tableEnv.executeSql("create table generic_table (x int) with ('connector'='COLLECTION')");
    tableEnv.useCatalog(EnvironmentSettings.DEFAULT_BUILTIN_CATALOG);
    tableEnv.executeSql(String.format("create table copy like `%s`.`default`.generic_table", hiveCatalog.getName()));
    Catalog builtInCat = tableEnv.getCatalog(EnvironmentSettings.DEFAULT_BUILTIN_CATALOG).get();
    CatalogBaseTable catalogTable = builtInCat.getTable(new ObjectPath(EnvironmentSettings.DEFAULT_BUILTIN_DATABASE, "copy"));
    assertThat(catalogTable.getOptions()).hasSize(1);
    assertThat(catalogTable.getOptions()).containsEntry(FactoryUtil.CONNECTOR.key(), "COLLECTION");
    assertThat(catalogTable.getSchema().getFieldCount()).isEqualTo(1);
    assertThat(catalogTable.getSchema().getFieldNames()).hasSameElementsAs(Collections.singletonList("x"));
    assertThat(catalogTable.getSchema().getFieldDataTypes()).hasSameElementsAs(Collections.singletonList(DataTypes.INT()));
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ObjectPath(org.apache.flink.table.catalog.ObjectPath) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Catalog(org.apache.flink.table.catalog.Catalog) Test(org.junit.Test)

Aggregations

TableEnvironment (org.apache.flink.table.api.TableEnvironment)137 Test (org.junit.Test)95 Row (org.apache.flink.types.Row)58 StreamTableEnvironment (org.apache.flink.table.api.bridge.java.StreamTableEnvironment)38 Table (org.apache.flink.table.api.Table)27 ObjectPath (org.apache.flink.table.catalog.ObjectPath)19 StreamExecutionEnvironment (org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)14 ArrayList (java.util.ArrayList)13 CatalogTable (org.apache.flink.table.catalog.CatalogTable)12 HashMap (java.util.HashMap)11 EnvironmentSettings (org.apache.flink.table.api.EnvironmentSettings)10 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)10 TableResult (org.apache.flink.table.api.TableResult)8 File (java.io.File)7 Constructor (java.lang.reflect.Constructor)7 TableImpl (org.apache.flink.table.api.internal.TableImpl)7 TableException (org.apache.flink.table.api.TableException)5 List (java.util.List)4 Configuration (org.apache.flink.configuration.Configuration)4 TableSchema (org.apache.flink.table.api.TableSchema)4