Search in sources :

Example 61 with ObjectPath

use of org.apache.flink.table.catalog.ObjectPath in project flink by apache.

the class TableEnvHiveConnectorITCase method testNonExistingPartitionFolder.

@Test
public void testNonExistingPartitionFolder() throws Exception {
    TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
    tableEnv.executeSql("create database db1");
    try {
        tableEnv.executeSql("create table db1.part (x int) partitioned by (p int)");
        HiveTestUtils.createTextTableInserter(hiveCatalog, "db1", "part").addRow(new Object[] { 1 }).commit("p=1");
        HiveTestUtils.createTextTableInserter(hiveCatalog, "db1", "part").addRow(new Object[] { 2 }).commit("p=2");
        tableEnv.executeSql("alter table db1.part add partition (p=3)");
        // remove one partition
        Path toRemove = new Path(hiveCatalog.getHiveTable(new ObjectPath("db1", "part")).getSd().getLocation(), "p=2");
        FileSystem fs = toRemove.getFileSystem(hiveCatalog.getHiveConf());
        fs.delete(toRemove, true);
        List<Row> results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.part").execute().collect());
        assertEquals("[+I[1, 1]]", results.toString());
    } finally {
        tableEnv.executeSql("drop database db1 cascade");
    }
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) Path(org.apache.hadoop.fs.Path) ObjectPath(org.apache.flink.table.catalog.ObjectPath) FileSystem(org.apache.hadoop.fs.FileSystem) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Row(org.apache.flink.types.Row) Test(org.junit.Test)

Example 62 with ObjectPath

use of org.apache.flink.table.catalog.ObjectPath in project flink by apache.

the class TableEnvHiveConnectorITCase method testPKConstraint.

@Test
public void testPKConstraint() throws Exception {
    // While PK constraints are supported since Hive 2.1.0, the constraints cannot be RELY in
    // 2.x versions.
    // So let's only test for 3.x.
    Assume.assumeTrue(HiveVersionTestUtil.HIVE_310_OR_LATER);
    TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
    tableEnv.executeSql("create database db1");
    try {
        // test rely PK constraints
        tableEnv.executeSql("create table db1.tbl1 (x tinyint,y smallint,z int, primary key (x,z) disable novalidate rely)");
        CatalogBaseTable catalogTable = hiveCatalog.getTable(new ObjectPath("db1", "tbl1"));
        TableSchema tableSchema = catalogTable.getSchema();
        assertTrue(tableSchema.getPrimaryKey().isPresent());
        UniqueConstraint pk = tableSchema.getPrimaryKey().get();
        assertEquals(2, pk.getColumns().size());
        assertTrue(pk.getColumns().containsAll(Arrays.asList("x", "z")));
        // test norely PK constraints
        tableEnv.executeSql("create table db1.tbl2 (x tinyint,y smallint, primary key (x) disable norely)");
        catalogTable = hiveCatalog.getTable(new ObjectPath("db1", "tbl2"));
        tableSchema = catalogTable.getSchema();
        assertFalse(tableSchema.getPrimaryKey().isPresent());
        // test table w/o PK
        tableEnv.executeSql("create table db1.tbl3 (x tinyint)");
        catalogTable = hiveCatalog.getTable(new ObjectPath("db1", "tbl3"));
        tableSchema = catalogTable.getSchema();
        assertFalse(tableSchema.getPrimaryKey().isPresent());
    } finally {
        tableEnv.executeSql("drop database db1 cascade");
    }
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ObjectPath(org.apache.flink.table.catalog.ObjectPath) TableSchema(org.apache.flink.table.api.TableSchema) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Test(org.junit.Test)

Example 63 with ObjectPath

use of org.apache.flink.table.catalog.ObjectPath in project flink by apache.

the class TableEnvHiveConnectorITCase method testNotNullConstraints.

@Test
public void testNotNullConstraints() throws Exception {
    Assume.assumeTrue(HiveVersionTestUtil.HIVE_310_OR_LATER);
    TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
    tableEnv.executeSql("create database db1");
    try {
        tableEnv.executeSql("create table db1.tbl (x int,y bigint not null enable rely,z string not null enable norely)");
        CatalogBaseTable catalogTable = hiveCatalog.getTable(new ObjectPath("db1", "tbl"));
        TableSchema tableSchema = catalogTable.getSchema();
        assertTrue("By default columns should be nullable", tableSchema.getFieldDataTypes()[0].getLogicalType().isNullable());
        assertFalse("NOT NULL columns should be reflected in table schema", tableSchema.getFieldDataTypes()[1].getLogicalType().isNullable());
        assertTrue("NOT NULL NORELY columns should be considered nullable", tableSchema.getFieldDataTypes()[2].getLogicalType().isNullable());
    } finally {
        tableEnv.executeSql("drop database db1 cascade");
    }
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ObjectPath(org.apache.flink.table.catalog.ObjectPath) TableSchema(org.apache.flink.table.api.TableSchema) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Test(org.junit.Test)

Example 64 with ObjectPath

use of org.apache.flink.table.catalog.ObjectPath in project flink by apache.

the class TableEnvHiveConnectorITCase method testParquetNameMapping.

@Test
public void testParquetNameMapping() throws Exception {
    TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
    tableEnv.executeSql("create database db1");
    try {
        tableEnv.executeSql("create table db1.t1 (x int,y int) stored as parquet");
        tableEnv.executeSql("insert into table db1.t1 values (1,10),(2,20)").await();
        Table hiveTable = hiveCatalog.getHiveTable(new ObjectPath("db1", "t1"));
        String location = hiveTable.getSd().getLocation();
        tableEnv.executeSql(String.format("create table db1.t2 (y int,x int) stored as parquet location '%s'", location));
        tableEnv.getConfig().getConfiguration().setBoolean(HiveOptions.TABLE_EXEC_HIVE_FALLBACK_MAPRED_READER, true);
        assertEquals("[+I[1], +I[2]]", CollectionUtil.iteratorToList(tableEnv.sqlQuery("select x from db1.t1").execute().collect()).toString());
        assertEquals("[+I[1], +I[2]]", CollectionUtil.iteratorToList(tableEnv.sqlQuery("select x from db1.t2").execute().collect()).toString());
    } finally {
        tableEnv.executeSql("drop database db1 cascade");
    }
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) Table(org.apache.hadoop.hive.metastore.api.Table) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Test(org.junit.Test)

Example 65 with ObjectPath

use of org.apache.flink.table.catalog.ObjectPath in project flink by apache.

the class HiveDialectITCase method testCreateTableWithConstraints.

@Test
public void testCreateTableWithConstraints() throws Exception {
    Assume.assumeTrue(HiveVersionTestUtil.HIVE_310_OR_LATER);
    tableEnv.executeSql("create table tbl (x int,y int not null disable novalidate rely,z int not null disable novalidate norely," + "constraint pk_name primary key (x) disable rely)");
    CatalogTable catalogTable = (CatalogTable) hiveCatalog.getTable(new ObjectPath("default", "tbl"));
    TableSchema tableSchema = catalogTable.getSchema();
    assertTrue("PK not present", tableSchema.getPrimaryKey().isPresent());
    assertEquals("pk_name", tableSchema.getPrimaryKey().get().getName());
    assertFalse("PK cannot be null", tableSchema.getFieldDataTypes()[0].getLogicalType().isNullable());
    assertFalse("RELY NOT NULL should be reflected in schema", tableSchema.getFieldDataTypes()[1].getLogicalType().isNullable());
    assertTrue("NORELY NOT NULL shouldn't be reflected in schema", tableSchema.getFieldDataTypes()[2].getLogicalType().isNullable());
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) TableSchema(org.apache.flink.table.api.TableSchema) CatalogTable(org.apache.flink.table.catalog.CatalogTable) Test(org.junit.Test)

Aggregations

ObjectPath (org.apache.flink.table.catalog.ObjectPath)81 Test (org.junit.Test)52 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)32 CatalogTable (org.apache.flink.table.catalog.CatalogTable)29 HashMap (java.util.HashMap)21 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)20 TableSchema (org.apache.flink.table.api.TableSchema)19 TableEnvironment (org.apache.flink.table.api.TableEnvironment)17 CatalogPartitionSpec (org.apache.flink.table.catalog.CatalogPartitionSpec)12 Table (org.apache.hadoop.hive.metastore.api.Table)12 Configuration (org.apache.flink.configuration.Configuration)11 SqlCreateHiveTable (org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable)11 TableNotExistException (org.apache.flink.table.catalog.exceptions.TableNotExistException)9 ArrayList (java.util.ArrayList)8 Map (java.util.Map)8 GenericInMemoryCatalog (org.apache.flink.table.catalog.GenericInMemoryCatalog)8 LinkedHashMap (java.util.LinkedHashMap)7 Catalog (org.apache.flink.table.catalog.Catalog)7 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)6 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)6