Search in sources :

Example 21 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink-mirror by flink-ci.

the class HiveCatalogTest method testRetrieveFlinkProperties.

@Test
public void testRetrieveFlinkProperties() throws Exception {
    ObjectPath hiveObjectPath = new ObjectPath(HiveCatalog.DEFAULT_DB, "testRetrieveProperties");
    Map<String, String> options = getLegacyFileSystemConnectorOptions("/test_path");
    options.put(CONNECTOR.key(), "jdbc");
    options.put("url", "jdbc:clickhouse://host:port/testUrl1");
    options.put("flink.url", "jdbc:clickhouse://host:port/testUrl2");
    hiveCatalog.createTable(hiveObjectPath, new CatalogTableImpl(schema, options, null), false);
    CatalogBaseTable hiveTable = hiveCatalog.getTable(hiveObjectPath);
    assertThat(hiveTable.getOptions()).containsEntry("url", "jdbc:clickhouse://host:port/testUrl1");
    assertThat(hiveTable.getOptions()).containsEntry("flink.url", "jdbc:clickhouse://host:port/testUrl2");
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) Test(org.junit.Test)

Example 22 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink-mirror by flink-ci.

the class HiveCatalogTest method testCreateAndGetFlinkManagedTable.

@Test
public void testCreateAndGetFlinkManagedTable() throws Exception {
    CatalogTable table = new CatalogTableImpl(schema, Collections.emptyMap(), "Flink managed table");
    hiveCatalog.createTable(tablePath, table, false);
    Table hiveTable = hiveCatalog.getHiveTable(tablePath);
    assertThat(hiveTable.getParameters()).containsEntry(FLINK_PROPERTY_PREFIX + CONNECTOR.key(), ManagedTableFactory.DEFAULT_IDENTIFIER);
    CatalogBaseTable retrievedTable = hiveCatalog.instantiateCatalogTable(hiveTable);
    assertThat(retrievedTable.getOptions()).isEmpty();
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) Table(org.apache.hadoop.hive.metastore.api.Table) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) CatalogTable(org.apache.flink.table.catalog.CatalogTable) Test(org.junit.Test)

Example 23 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink-mirror by flink-ci.

the class TableEnvHiveConnectorITCase method testNotNullConstraints.

@Test
public void testNotNullConstraints() throws Exception {
    Assume.assumeTrue(HiveVersionTestUtil.HIVE_310_OR_LATER);
    TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
    tableEnv.executeSql("create database db1");
    try {
        tableEnv.executeSql("create table db1.tbl (x int,y bigint not null enable rely,z string not null enable norely)");
        CatalogBaseTable catalogTable = hiveCatalog.getTable(new ObjectPath("db1", "tbl"));
        TableSchema tableSchema = catalogTable.getSchema();
        assertTrue("By default columns should be nullable", tableSchema.getFieldDataTypes()[0].getLogicalType().isNullable());
        assertFalse("NOT NULL columns should be reflected in table schema", tableSchema.getFieldDataTypes()[1].getLogicalType().isNullable());
        assertTrue("NOT NULL NORELY columns should be considered nullable", tableSchema.getFieldDataTypes()[2].getLogicalType().isNullable());
    } finally {
        tableEnv.executeSql("drop database db1 cascade");
    }
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ObjectPath(org.apache.flink.table.catalog.ObjectPath) TableSchema(org.apache.flink.table.api.TableSchema) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Test(org.junit.Test)

Example 24 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink-mirror by flink-ci.

the class TableEnvHiveConnectorITCase method testPKConstraint.

@Test
public void testPKConstraint() throws Exception {
    // While PK constraints are supported since Hive 2.1.0, the constraints cannot be RELY in
    // 2.x versions.
    // So let's only test for 3.x.
    Assume.assumeTrue(HiveVersionTestUtil.HIVE_310_OR_LATER);
    TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
    tableEnv.executeSql("create database db1");
    try {
        // test rely PK constraints
        tableEnv.executeSql("create table db1.tbl1 (x tinyint,y smallint,z int, primary key (x,z) disable novalidate rely)");
        CatalogBaseTable catalogTable = hiveCatalog.getTable(new ObjectPath("db1", "tbl1"));
        TableSchema tableSchema = catalogTable.getSchema();
        assertTrue(tableSchema.getPrimaryKey().isPresent());
        UniqueConstraint pk = tableSchema.getPrimaryKey().get();
        assertEquals(2, pk.getColumns().size());
        assertTrue(pk.getColumns().containsAll(Arrays.asList("x", "z")));
        // test norely PK constraints
        tableEnv.executeSql("create table db1.tbl2 (x tinyint,y smallint, primary key (x) disable norely)");
        catalogTable = hiveCatalog.getTable(new ObjectPath("db1", "tbl2"));
        tableSchema = catalogTable.getSchema();
        assertFalse(tableSchema.getPrimaryKey().isPresent());
        // test table w/o PK
        tableEnv.executeSql("create table db1.tbl3 (x tinyint)");
        catalogTable = hiveCatalog.getTable(new ObjectPath("db1", "tbl3"));
        tableSchema = catalogTable.getSchema();
        assertFalse(tableSchema.getPrimaryKey().isPresent());
    } finally {
        tableEnv.executeSql("drop database db1 cascade");
    }
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ObjectPath(org.apache.flink.table.catalog.ObjectPath) TableSchema(org.apache.flink.table.api.TableSchema) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Test(org.junit.Test)

Example 25 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink-mirror by flink-ci.

the class TableEnvironmentImpl method registerTableSourceInternal.

@Override
public void registerTableSourceInternal(String name, TableSource<?> tableSource) {
    validateTableSource(tableSource);
    ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(UnresolvedIdentifier.of(name));
    Optional<CatalogBaseTable> table = getTemporaryTable(objectIdentifier);
    if (table.isPresent()) {
        if (table.get() instanceof ConnectorCatalogTable<?, ?>) {
            ConnectorCatalogTable<?, ?> sourceSinkTable = (ConnectorCatalogTable<?, ?>) table.get();
            if (sourceSinkTable.getTableSource().isPresent()) {
                throw new ValidationException(String.format("Table '%s' already exists. Please choose a different name.", name));
            } else {
                // wrapper contains only sink (not source)
                ConnectorCatalogTable sourceAndSink = ConnectorCatalogTable.sourceAndSink(tableSource, sourceSinkTable.getTableSink().get(), !IS_STREAM_TABLE);
                catalogManager.dropTemporaryTable(objectIdentifier, false);
                catalogManager.createTemporaryTable(sourceAndSink, objectIdentifier, false);
            }
        } else {
            throw new ValidationException(String.format("Table '%s' already exists. Please choose a different name.", name));
        }
    } else {
        ConnectorCatalogTable source = ConnectorCatalogTable.source(tableSource, !IS_STREAM_TABLE);
        catalogManager.createTemporaryTable(source, objectIdentifier, false);
    }
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ValidationException(org.apache.flink.table.api.ValidationException) ConnectorCatalogTable(org.apache.flink.table.catalog.ConnectorCatalogTable) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Aggregations

CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)103 ObjectPath (org.apache.flink.table.catalog.ObjectPath)51 CatalogTable (org.apache.flink.table.catalog.CatalogTable)46 Test (org.junit.Test)42 ValidationException (org.apache.flink.table.api.ValidationException)33 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)30 CatalogView (org.apache.flink.table.catalog.CatalogView)27 TableSchema (org.apache.flink.table.api.TableSchema)24 Table (org.apache.hadoop.hive.metastore.api.Table)21 HashMap (java.util.HashMap)18 SqlCreateHiveTable (org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable)18 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)15 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)15 Map (java.util.Map)13 LinkedHashMap (java.util.LinkedHashMap)12 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)12 AlterViewAsOperation (org.apache.flink.table.operations.ddl.AlterViewAsOperation)12 DropTableOperation (org.apache.flink.table.operations.ddl.DropTableOperation)12 ArrayList (java.util.ArrayList)9 CatalogException (org.apache.flink.table.catalog.exceptions.CatalogException)9