Search in sources :

Example 86 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class TableEnvHiveConnectorITCase method getTableEnvWithHiveCatalog.

private TableEnvironment getTableEnvWithHiveCatalog() {
    TableEnvironment tableEnv = HiveTestUtils.createTableEnvInBatchMode(SqlDialect.HIVE);
    tableEnv.registerCatalog(hiveCatalog.getName(), hiveCatalog);
    tableEnv.useCatalog(hiveCatalog.getName());
    return tableEnv;
}
Also used : TableEnvironment(org.apache.flink.table.api.TableEnvironment)

Example 87 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class TableEnvHiveConnectorITCase method testPKConstraint.

@Test
public void testPKConstraint() throws Exception {
    // While PK constraints are supported since Hive 2.1.0, the constraints cannot be RELY in
    // 2.x versions.
    // So let's only test for 3.x.
    Assume.assumeTrue(HiveVersionTestUtil.HIVE_310_OR_LATER);
    TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
    tableEnv.executeSql("create database db1");
    try {
        // test rely PK constraints
        tableEnv.executeSql("create table db1.tbl1 (x tinyint,y smallint,z int, primary key (x,z) disable novalidate rely)");
        CatalogBaseTable catalogTable = hiveCatalog.getTable(new ObjectPath("db1", "tbl1"));
        TableSchema tableSchema = catalogTable.getSchema();
        assertTrue(tableSchema.getPrimaryKey().isPresent());
        UniqueConstraint pk = tableSchema.getPrimaryKey().get();
        assertEquals(2, pk.getColumns().size());
        assertTrue(pk.getColumns().containsAll(Arrays.asList("x", "z")));
        // test norely PK constraints
        tableEnv.executeSql("create table db1.tbl2 (x tinyint,y smallint, primary key (x) disable norely)");
        catalogTable = hiveCatalog.getTable(new ObjectPath("db1", "tbl2"));
        tableSchema = catalogTable.getSchema();
        assertFalse(tableSchema.getPrimaryKey().isPresent());
        // test table w/o PK
        tableEnv.executeSql("create table db1.tbl3 (x tinyint)");
        catalogTable = hiveCatalog.getTable(new ObjectPath("db1", "tbl3"));
        tableSchema = catalogTable.getSchema();
        assertFalse(tableSchema.getPrimaryKey().isPresent());
    } finally {
        tableEnv.executeSql("drop database db1 cascade");
    }
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ObjectPath(org.apache.flink.table.catalog.ObjectPath) TableSchema(org.apache.flink.table.api.TableSchema) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Test(org.junit.Test)

Example 88 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class TableEnvHiveConnectorITCase method testDynamicPartWithOrderBy.

@Test
public void testDynamicPartWithOrderBy() throws Exception {
    TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
    tableEnv.executeSql("create table src(x int,y int)");
    tableEnv.executeSql("create table dest(x int) partitioned by (p int)");
    try {
        HiveTestUtils.createTextTableInserter(hiveCatalog, "default", "src").addRow(new Object[] { 2, 0 }).addRow(new Object[] { 1, 0 }).commit();
        // some hive feature relies on the results being sorted, e.g. bucket table
        tableEnv.executeSql("insert into dest partition(p) select * from src order by x").await();
        List<Row> results = CollectionUtil.iteratorToList(tableEnv.executeSql("select * from dest").collect());
        assertEquals("[+I[1, 0], +I[2, 0]]", results.toString());
    } finally {
        tableEnv.executeSql("drop table src");
        tableEnv.executeSql("drop table dest");
    }
}
Also used : TableEnvironment(org.apache.flink.table.api.TableEnvironment) Row(org.apache.flink.types.Row) Test(org.junit.Test)

Example 89 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class TableEnvHiveConnectorITCase method testUDTF.

@Test
public void testUDTF() throws Exception {
    // W/o https://issues.apache.org/jira/browse/HIVE-11878 Hive registers the App classloader
    // as the classloader
    // for the UDTF and closes the App classloader when we tear down the session. This causes
    // problems for JUnit code
    // and shutdown hooks that have to run after the test finishes, because App classloader can
    // no longer load new
    // classes. And will crash the forked JVM, thus failing the test phase.
    // Therefore disable such tests for older Hive versions.
    String hiveVersion = HiveShimLoader.getHiveVersion();
    Assume.assumeTrue(hiveVersion.compareTo("2.0.0") >= 0 || hiveVersion.compareTo("1.3.0") >= 0);
    TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
    tableEnv.executeSql("create database db1");
    try {
        tableEnv.executeSql("create table db1.simple (i int,a array<int>)");
        tableEnv.executeSql("create table db1.nested (a array<map<int, string>>)");
        tableEnv.executeSql("create function hiveudtf as 'org.apache.hadoop.hive.ql.udf.generic.GenericUDTFExplode'");
        HiveTestUtils.createTextTableInserter(hiveCatalog, "db1", "simple").addRow(new Object[] { 3, Arrays.asList(1, 2, 3) }).commit();
        Map<Integer, String> map1 = new HashMap<>();
        map1.put(1, "a");
        map1.put(2, "b");
        Map<Integer, String> map2 = new HashMap<>();
        map2.put(3, "c");
        HiveTestUtils.createTextTableInserter(hiveCatalog, "db1", "nested").addRow(new Object[] { Arrays.asList(map1, map2) }).commit();
        List<Row> results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select x from db1.simple, lateral table(hiveudtf(a)) as T(x)").execute().collect());
        assertEquals("[+I[1], +I[2], +I[3]]", results.toString());
        results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select x from db1.nested, lateral table(hiveudtf(a)) as T(x)").execute().collect());
        assertEquals("[+I[{1=a, 2=b}], +I[{3=c}]]", results.toString());
        tableEnv.executeSql("create table db1.ts (a array<timestamp>)");
        HiveTestUtils.createTextTableInserter(hiveCatalog, "db1", "ts").addRow(new Object[] { new Object[] { Timestamp.valueOf("2015-04-28 15:23:00"), Timestamp.valueOf("2016-06-03 17:05:52") } }).commit();
        results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select x from db1.ts, lateral table(hiveudtf(a)) as T(x)").execute().collect());
        assertEquals("[+I[2015-04-28T15:23], +I[2016-06-03T17:05:52]]", results.toString());
    } finally {
        tableEnv.executeSql("drop database db1 cascade");
        tableEnv.executeSql("drop function hiveudtf");
    }
}
Also used : HashMap(java.util.HashMap) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Row(org.apache.flink.types.Row) Test(org.junit.Test)

Example 90 with TableEnvironment

use of org.apache.flink.table.api.TableEnvironment in project flink by apache.

the class TableEnvHiveConnectorITCase method testNotNullConstraints.

@Test
public void testNotNullConstraints() throws Exception {
    Assume.assumeTrue(HiveVersionTestUtil.HIVE_310_OR_LATER);
    TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
    tableEnv.executeSql("create database db1");
    try {
        tableEnv.executeSql("create table db1.tbl (x int,y bigint not null enable rely,z string not null enable norely)");
        CatalogBaseTable catalogTable = hiveCatalog.getTable(new ObjectPath("db1", "tbl"));
        TableSchema tableSchema = catalogTable.getSchema();
        assertTrue("By default columns should be nullable", tableSchema.getFieldDataTypes()[0].getLogicalType().isNullable());
        assertFalse("NOT NULL columns should be reflected in table schema", tableSchema.getFieldDataTypes()[1].getLogicalType().isNullable());
        assertTrue("NOT NULL NORELY columns should be considered nullable", tableSchema.getFieldDataTypes()[2].getLogicalType().isNullable());
    } finally {
        tableEnv.executeSql("drop database db1 cascade");
    }
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ObjectPath(org.apache.flink.table.catalog.ObjectPath) TableSchema(org.apache.flink.table.api.TableSchema) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Test(org.junit.Test)

Aggregations

TableEnvironment (org.apache.flink.table.api.TableEnvironment)137 Test (org.junit.Test)95 Row (org.apache.flink.types.Row)58 StreamTableEnvironment (org.apache.flink.table.api.bridge.java.StreamTableEnvironment)38 Table (org.apache.flink.table.api.Table)27 ObjectPath (org.apache.flink.table.catalog.ObjectPath)19 StreamExecutionEnvironment (org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)14 ArrayList (java.util.ArrayList)13 CatalogTable (org.apache.flink.table.catalog.CatalogTable)12 HashMap (java.util.HashMap)11 EnvironmentSettings (org.apache.flink.table.api.EnvironmentSettings)10 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)10 TableResult (org.apache.flink.table.api.TableResult)8 File (java.io.File)7 Constructor (java.lang.reflect.Constructor)7 TableImpl (org.apache.flink.table.api.internal.TableImpl)7 TableException (org.apache.flink.table.api.TableException)5 List (java.util.List)4 Configuration (org.apache.flink.configuration.Configuration)4 TableSchema (org.apache.flink.table.api.TableSchema)4