Search in sources :

Example 66 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveCatalogITCase method testViewSchema.

@Test
public void testViewSchema() throws Exception {
    TableEnvironment tableEnv = HiveTestUtils.createTableEnvInBatchMode(SqlDialect.DEFAULT);
    tableEnv.registerCatalog(hiveCatalog.getName(), hiveCatalog);
    tableEnv.useCatalog(hiveCatalog.getName());
    tableEnv.executeSql("create database db1");
    try {
        tableEnv.useDatabase("db1");
        tableEnv.executeSql("create table src(x int,ts timestamp(3)) with ('connector'='datagen','number-of-rows'='10')");
        tableEnv.executeSql("create view v1 as select x,ts from src order by x limit 3");
        CatalogView catalogView = (CatalogView) hiveCatalog.getTable(new ObjectPath("db1", "v1"));
        Schema viewSchema = catalogView.getUnresolvedSchema();
        assertThat(viewSchema).isEqualTo(Schema.newBuilder().fromFields(new String[] { "x", "ts" }, new AbstractDataType[] { DataTypes.INT(), DataTypes.TIMESTAMP(3) }).build());
        List<Row> results = CollectionUtil.iteratorToList(tableEnv.executeSql("select x from v1").collect());
        assertThat(results).hasSize(3);
        tableEnv.executeSql("create view v2 (v2_x,v2_ts) comment 'v2 comment' as select x,cast(ts as timestamp_ltz(3)) from v1");
        catalogView = (CatalogView) hiveCatalog.getTable(new ObjectPath("db1", "v2"));
        assertThat(catalogView.getUnresolvedSchema()).isEqualTo(Schema.newBuilder().fromFields(new String[] { "v2_x", "v2_ts" }, new AbstractDataType[] { DataTypes.INT(), DataTypes.TIMESTAMP_LTZ(3) }).build());
        assertThat(catalogView.getComment()).isEqualTo("v2 comment");
        results = CollectionUtil.iteratorToList(tableEnv.executeSql("select * from v2").collect());
        assertThat(results).hasSize(3);
    } finally {
        tableEnv.executeSql("drop database db1 cascade");
    }
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) Schema(org.apache.flink.table.api.Schema) TableSchema(org.apache.flink.table.api.TableSchema) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Row(org.apache.flink.types.Row) CatalogView(org.apache.flink.table.catalog.CatalogView) Test(org.junit.Test)

Example 67 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveTestUtils method createTableEnvWithHiveCatalog.

public static TableEnvironment createTableEnvWithHiveCatalog(HiveCatalog catalog) {
    TableEnvironment tableEnv = HiveTestUtils.createTableEnvInBatchMode();
    tableEnv.registerCatalog(catalog.getName(), catalog);
    tableEnv.useCatalog(catalog.getName());
    return tableEnv;
}
Also used : StreamTableEnvironment(org.apache.flink.table.api.bridge.java.StreamTableEnvironment) TableEnvironment(org.apache.flink.table.api.TableEnvironment)

Example 68 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveTestUtils method createTableEnvInBatchMode.

public static TableEnvironment createTableEnvInBatchMode(SqlDialect dialect) {
    TableEnvironment tableEnv = TableEnvironment.create(EnvironmentSettings.inBatchMode());
    tableEnv.getConfig().getConfiguration().setInteger(TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM.key(), 1);
    tableEnv.getConfig().setSqlDialect(dialect);
    return tableEnv;
}
Also used : StreamTableEnvironment(org.apache.flink.table.api.bridge.java.StreamTableEnvironment) TableEnvironment(org.apache.flink.table.api.TableEnvironment)

Example 69 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveModuleTest method testNumberOfBuiltinFunctions.

@Test
public void testNumberOfBuiltinFunctions() {
    String hiveVersion = HiveShimLoader.getHiveVersion();
    HiveModule hiveModule = new HiveModule(hiveVersion);
    verifyNumBuiltInFunctions(hiveVersion, hiveModule);
    // creating functions shouldn't change the number of built in functions
    TableEnvironment tableEnv = HiveTestUtils.createTableEnvInBatchMode();
    tableEnv.executeSql("create function myudf as 'org.apache.hadoop.hive.ql.udf.UDFPI'");
    tableEnv.executeSql("create function mygenericudf as 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFAbs'");
    tableEnv.executeSql("create function myudaf as 'org.apache.hadoop.hive.ql.udf.generic.GenericUDAFMax'");
    tableEnv.executeSql("create function myudtf as 'org.apache.hadoop.hive.ql.udf.generic.GenericUDTFExplode'");
    verifyNumBuiltInFunctions(hiveVersion, hiveModule);
    // explicitly verify that HiveModule doesn't consider the created functions as built-in
    // functions
    assertFalse(hiveModule.getFunctionDefinition("myudf").isPresent());
    assertFalse(hiveModule.getFunctionDefinition("mygenericudf").isPresent());
    assertFalse(hiveModule.getFunctionDefinition("myudaf").isPresent());
    assertFalse(hiveModule.getFunctionDefinition("myudtf").isPresent());
}
Also used : TableEnvironment(org.apache.flink.table.api.TableEnvironment) Test(org.junit.Test)

Example 70 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class HiveModuleTest method testConstantArguments.

@Test
public void testConstantArguments() {
    TableEnvironment tEnv = HiveTestUtils.createTableEnvInBatchMode();
    tEnv.unloadModule("core");
    tEnv.loadModule("hive", new HiveModule());
    List<Row> results = CollectionUtil.iteratorToList(tEnv.sqlQuery("select concat('an', 'bn')").execute().collect());
    assertEquals("[anbn]", results.toString());
    results = CollectionUtil.iteratorToList(tEnv.sqlQuery("select concat('ab', cast('cdefghi' as varchar(5)))").execute().collect());
    assertEquals("[abcdefg]", results.toString());
    results = CollectionUtil.iteratorToList(tEnv.sqlQuery("select concat('ab',cast(12.34 as decimal(10,5)))").execute().collect());
    assertEquals("[ab12.34]", results.toString());
    results = CollectionUtil.iteratorToList(tEnv.sqlQuery("select concat(cast('2018-01-19' as date),cast('2019-12-27 17:58:23.385' as timestamp))").execute().collect());
    assertEquals("[2018-01-192019-12-27 17:58:23.385]", results.toString());
    // TODO: null cannot be a constant argument at the moment. This test will make more sense
    // when that changes.
    results = CollectionUtil.iteratorToList(tEnv.sqlQuery("select concat('ab',cast(null as int))").execute().collect());
    assertEquals("[null]", results.toString());
}
Also used : TableEnvironment(org.apache.flink.table.api.TableEnvironment) Row(org.apache.flink.types.Row) Test(org.junit.Test)

Aggregations

TableEnvironment (org.apache.flink.table.api.TableEnvironment)137 Test (org.junit.Test)95 Row (org.apache.flink.types.Row)58 StreamTableEnvironment (org.apache.flink.table.api.bridge.java.StreamTableEnvironment)38 Table (org.apache.flink.table.api.Table)27 ObjectPath (org.apache.flink.table.catalog.ObjectPath)19 StreamExecutionEnvironment (org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)14 ArrayList (java.util.ArrayList)13 CatalogTable (org.apache.flink.table.catalog.CatalogTable)12 HashMap (java.util.HashMap)11 EnvironmentSettings (org.apache.flink.table.api.EnvironmentSettings)10 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)10 TableResult (org.apache.flink.table.api.TableResult)8 File (java.io.File)7 Constructor (java.lang.reflect.Constructor)7 TableImpl (org.apache.flink.table.api.internal.TableImpl)7 TableException (org.apache.flink.table.api.TableException)5 List (java.util.List)4 Configuration (org.apache.flink.configuration.Configuration)4 TableSchema (org.apache.flink.table.api.TableSchema)4