Search in sources :

Example 31 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveInputFormatPartitionReaderITCase method testReadMultipleSplits.

@Test
public void testReadMultipleSplits() throws Exception {
    HiveCatalog hiveCatalog = HiveTestUtils.createHiveCatalog();
    TableEnvironment tableEnv = HiveTestUtils.createTableEnvInBatchMode(SqlDialect.HIVE);
    tableEnv.registerCatalog(hiveCatalog.getName(), hiveCatalog);
    tableEnv.useCatalog(hiveCatalog.getName());
    if (!HiveShimLoader.getHiveVersion().startsWith("2.0")) {
        testReadFormat(tableEnv, hiveCatalog, "orc");
    }
    testReadFormat(tableEnv, hiveCatalog, "parquet");
}
Also used : HiveCatalog(org.apache.flink.table.catalog.hive.HiveCatalog) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Test(org.junit.Test)

Example 32 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveCatalogITCase method testCsvTableViaSQL.

@Test
public void testCsvTableViaSQL() {
    TableEnvironment tableEnv = TableEnvironment.create(EnvironmentSettings.inBatchMode());
    tableEnv.registerCatalog("myhive", hiveCatalog);
    tableEnv.useCatalog("myhive");
    String path = this.getClass().getResource("/csv/test.csv").getPath();
    tableEnv.executeSql("create table test2 (name String, age Int) with (\n" + "   'connector.type' = 'filesystem',\n" + "   'connector.path' = 'file://" + path + "',\n" + "   'format.type' = 'csv'\n" + ")");
    Table t = tableEnv.sqlQuery("SELECT * FROM myhive.`default`.test2");
    List<Row> result = CollectionUtil.iteratorToList(t.execute().collect());
    // assert query result
    assertThat(result).containsExactlyInAnyOrder(Row.of("1", 1), Row.of("2", 2), Row.of("3", 3));
    tableEnv.executeSql("ALTER TABLE test2 RENAME TO newtable");
    t = tableEnv.sqlQuery("SELECT * FROM myhive.`default`.newtable");
    result = CollectionUtil.iteratorToList(t.execute().collect());
    // assert query result
    assertThat(result).containsExactlyInAnyOrder(Row.of("1", 1), Row.of("2", 2), Row.of("3", 3));
    tableEnv.executeSql("DROP TABLE newtable");
}
Also used : CatalogTable(org.apache.flink.table.catalog.CatalogTable) Table(org.apache.flink.table.api.Table) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Row(org.apache.flink.types.Row) Test(org.junit.Test)

Example 33 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveCatalogITCase method prepareTable.

private TableEnvironment prepareTable(boolean isStreaming) {
    EnvironmentSettings settings;
    if (isStreaming) {
        settings = EnvironmentSettings.inStreamingMode();
    } else {
        settings = EnvironmentSettings.inBatchMode();
    }
    TableEnvironment tableEnv = TableEnvironment.create(settings);
    tableEnv.getConfig().getConfiguration().setInteger(TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM, 1);
    tableEnv.registerCatalog("myhive", hiveCatalog);
    tableEnv.useCatalog("myhive");
    String srcPath = this.getClass().getResource("/csv/test3.csv").getPath();
    tableEnv.executeSql("CREATE TABLE proctime_src (" + "price DECIMAL(10, 2)," + "currency STRING," + "ts6 TIMESTAMP(6)," + "ts AS CAST(ts6 AS TIMESTAMP(3))," + "WATERMARK FOR ts AS ts," + "l_proctime AS PROCTIME( )) " + // test " " in proctime()
    String.format("WITH (" + "'connector.type' = 'filesystem'," + "'connector.path' = 'file://%s'," + "'format.type' = 'csv')", srcPath));
    return tableEnv;
}
Also used : EnvironmentSettings(org.apache.flink.table.api.EnvironmentSettings) TableEnvironment(org.apache.flink.table.api.TableEnvironment)

Example 34 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveCatalogITCase method testCreateAndGetManagedTable.

@Test
public void testCreateAndGetManagedTable() throws Exception {
    TableEnvironment tableEnv = TableEnvironment.create(EnvironmentSettings.inStreamingMode());
    String catalog = "myhive";
    String database = "default";
    String table = "managed_table";
    ObjectIdentifier tableIdentifier = ObjectIdentifier.of(catalog, database, table);
    try {
        TestManagedTableFactory.MANAGED_TABLES.put(tableIdentifier, new AtomicReference<>());
        tableEnv.registerCatalog(catalog, hiveCatalog);
        tableEnv.useCatalog(catalog);
        final String sql = String.format("CREATE TABLE %s (\n" + "  uuid varchar(40) not null,\n" + "  price DECIMAL(10, 2),\n" + "  currency STRING,\n" + "  ts6 TIMESTAMP(6),\n" + "  ts AS CAST(ts6 AS TIMESTAMP(3)),\n" + "  WATERMARK FOR ts AS ts,\n" + "  constraint ct1 PRIMARY KEY(uuid) NOT ENFORCED)\n", table);
        tableEnv.executeSql(sql);
        Map<String, String> expectedOptions = new HashMap<>();
        expectedOptions.put(TestManagedTableFactory.ENRICHED_KEY, TestManagedTableFactory.ENRICHED_VALUE);
        assertThat(TestManagedTableFactory.MANAGED_TABLES.get(tableIdentifier).get()).containsExactlyInAnyOrderEntriesOf(expectedOptions);
        Map<String, String> expectedParameters = new HashMap<>();
        expectedOptions.forEach((k, v) -> expectedParameters.put(FLINK_PROPERTY_PREFIX + k, v));
        expectedParameters.put(FLINK_PROPERTY_PREFIX + CONNECTOR.key(), ManagedTableFactory.DEFAULT_IDENTIFIER);
        assertThat(hiveCatalog.getHiveTable(tableIdentifier.toObjectPath()).getParameters()).containsAllEntriesOf(expectedParameters);
        assertThat(hiveCatalog.getTable(tableIdentifier.toObjectPath()).getOptions()).containsExactlyEntriesOf(Collections.singletonMap(TestManagedTableFactory.ENRICHED_KEY, TestManagedTableFactory.ENRICHED_VALUE));
    } finally {
        tableEnv.executeSql(String.format("DROP TABLE %s", table));
        assertThat(TestManagedTableFactory.MANAGED_TABLES.get(tableIdentifier).get()).isNull();
    }
}
Also used : HashMap(java.util.HashMap) TableEnvironment(org.apache.flink.table.api.TableEnvironment) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) Test(org.junit.Test)

Example 35 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveCatalogITCase method testReadWriteCsvWithProctime.

private void testReadWriteCsvWithProctime(boolean isStreaming) {
    TableEnvironment tableEnv = prepareTable(isStreaming);
    List<Row> rows = CollectionUtil.iteratorToList(tableEnv.executeSql("SELECT * FROM proctime_src").collect());
    assertThat(rows).hasSize(5);
    tableEnv.executeSql("DROP TABLE proctime_src");
}
Also used : TableEnvironment(org.apache.flink.table.api.TableEnvironment) Row(org.apache.flink.types.Row)

Aggregations

TableEnvironment (org.apache.flink.table.api.TableEnvironment)137 Test (org.junit.Test)95 Row (org.apache.flink.types.Row)58 StreamTableEnvironment (org.apache.flink.table.api.bridge.java.StreamTableEnvironment)38 Table (org.apache.flink.table.api.Table)27 ObjectPath (org.apache.flink.table.catalog.ObjectPath)19 StreamExecutionEnvironment (org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)14 ArrayList (java.util.ArrayList)13 CatalogTable (org.apache.flink.table.catalog.CatalogTable)12 HashMap (java.util.HashMap)11 EnvironmentSettings (org.apache.flink.table.api.EnvironmentSettings)10 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)10 TableResult (org.apache.flink.table.api.TableResult)8 File (java.io.File)7 Constructor (java.lang.reflect.Constructor)7 TableImpl (org.apache.flink.table.api.internal.TableImpl)7 TableException (org.apache.flink.table.api.TableException)5 List (java.util.List)4 Configuration (org.apache.flink.configuration.Configuration)4 TableSchema (org.apache.flink.table.api.TableSchema)4