Search in sources :

Example 46 with ObjectPath

use of org.apache.flink.table.catalog.ObjectPath in project flink by apache.

the class HiveCatalogUdfITCase method testFlinkUdf.

@Test
public void testFlinkUdf() throws Exception {
    final TableSchema schema = TableSchema.builder().field("name", DataTypes.STRING()).field("age", DataTypes.INT()).build();
    final Map<String, String> sourceOptions = new HashMap<>();
    sourceOptions.put("connector.type", "filesystem");
    sourceOptions.put("connector.path", getClass().getResource("/csv/test.csv").getPath());
    sourceOptions.put("format.type", "csv");
    CatalogTable source = new CatalogTableImpl(schema, sourceOptions, "Comment.");
    hiveCatalog.createTable(new ObjectPath(HiveCatalog.DEFAULT_DB, sourceTableName), source, false);
    hiveCatalog.createFunction(new ObjectPath(HiveCatalog.DEFAULT_DB, "myudf"), new CatalogFunctionImpl(TestHiveSimpleUDF.class.getCanonicalName()), false);
    hiveCatalog.createFunction(new ObjectPath(HiveCatalog.DEFAULT_DB, "mygenericudf"), new CatalogFunctionImpl(TestHiveGenericUDF.class.getCanonicalName()), false);
    hiveCatalog.createFunction(new ObjectPath(HiveCatalog.DEFAULT_DB, "myudtf"), new CatalogFunctionImpl(TestHiveUDTF.class.getCanonicalName()), false);
    hiveCatalog.createFunction(new ObjectPath(HiveCatalog.DEFAULT_DB, "myudaf"), new CatalogFunctionImpl(GenericUDAFSum.class.getCanonicalName()), false);
    testUdf(true);
    testUdf(false);
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) TableSchema(org.apache.flink.table.api.TableSchema) HashMap(java.util.HashMap) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) CatalogTable(org.apache.flink.table.catalog.CatalogTable) CatalogFunctionImpl(org.apache.flink.table.catalog.CatalogFunctionImpl) Test(org.junit.Test)

Example 47 with ObjectPath

use of org.apache.flink.table.catalog.ObjectPath in project flink by apache.

the class HiveCatalogITCase method testViewSchema.

@Test
public void testViewSchema() throws Exception {
    TableEnvironment tableEnv = HiveTestUtils.createTableEnvInBatchMode(SqlDialect.DEFAULT);
    tableEnv.registerCatalog(hiveCatalog.getName(), hiveCatalog);
    tableEnv.useCatalog(hiveCatalog.getName());
    tableEnv.executeSql("create database db1");
    try {
        tableEnv.useDatabase("db1");
        tableEnv.executeSql("create table src(x int,ts timestamp(3)) with ('connector'='datagen','number-of-rows'='10')");
        tableEnv.executeSql("create view v1 as select x,ts from src order by x limit 3");
        CatalogView catalogView = (CatalogView) hiveCatalog.getTable(new ObjectPath("db1", "v1"));
        Schema viewSchema = catalogView.getUnresolvedSchema();
        assertThat(viewSchema).isEqualTo(Schema.newBuilder().fromFields(new String[] { "x", "ts" }, new AbstractDataType[] { DataTypes.INT(), DataTypes.TIMESTAMP(3) }).build());
        List<Row> results = CollectionUtil.iteratorToList(tableEnv.executeSql("select x from v1").collect());
        assertThat(results).hasSize(3);
        tableEnv.executeSql("create view v2 (v2_x,v2_ts) comment 'v2 comment' as select x,cast(ts as timestamp_ltz(3)) from v1");
        catalogView = (CatalogView) hiveCatalog.getTable(new ObjectPath("db1", "v2"));
        assertThat(catalogView.getUnresolvedSchema()).isEqualTo(Schema.newBuilder().fromFields(new String[] { "v2_x", "v2_ts" }, new AbstractDataType[] { DataTypes.INT(), DataTypes.TIMESTAMP_LTZ(3) }).build());
        assertThat(catalogView.getComment()).isEqualTo("v2 comment");
        results = CollectionUtil.iteratorToList(tableEnv.executeSql("select * from v2").collect());
        assertThat(results).hasSize(3);
    } finally {
        tableEnv.executeSql("drop database db1 cascade");
    }
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) Schema(org.apache.flink.table.api.Schema) TableSchema(org.apache.flink.table.api.TableSchema) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Row(org.apache.flink.types.Row) CatalogView(org.apache.flink.table.catalog.CatalogView) Test(org.junit.Test)

Example 48 with ObjectPath

use of org.apache.flink.table.catalog.ObjectPath in project flink by apache.

the class HiveCatalogTest method testRetrieveFlinkProperties.

@Test
public void testRetrieveFlinkProperties() throws Exception {
    ObjectPath hiveObjectPath = new ObjectPath(HiveCatalog.DEFAULT_DB, "testRetrieveProperties");
    Map<String, String> options = getLegacyFileSystemConnectorOptions("/test_path");
    options.put(CONNECTOR.key(), "jdbc");
    options.put("url", "jdbc:clickhouse://host:port/testUrl1");
    options.put("flink.url", "jdbc:clickhouse://host:port/testUrl2");
    hiveCatalog.createTable(hiveObjectPath, new CatalogTableImpl(schema, options, null), false);
    CatalogBaseTable hiveTable = hiveCatalog.getTable(hiveObjectPath);
    assertThat(hiveTable.getOptions()).containsEntry("url", "jdbc:clickhouse://host:port/testUrl1");
    assertThat(hiveTable.getOptions()).containsEntry("flink.url", "jdbc:clickhouse://host:port/testUrl2");
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) Test(org.junit.Test)

Example 49 with ObjectPath

use of org.apache.flink.table.catalog.ObjectPath in project flink by apache.

the class HiveCatalogTest method testGetNoSchemaGenericTable.

@Test
public void testGetNoSchemaGenericTable() throws Exception {
    ObjectPath hiveObjectPath = new ObjectPath(HiveCatalog.DEFAULT_DB, "testGetNoSchemaGenericTable");
    Map<String, String> properties = new HashMap<>();
    properties.put(CONNECTOR.key(), "jdbc");
    hiveCatalog.createTable(hiveObjectPath, new CatalogTableImpl(TableSchema.builder().build(), properties, null), false);
    CatalogBaseTable catalogTable = hiveCatalog.getTable(hiveObjectPath);
    assertThat(catalogTable.getSchema()).isEqualTo(TableSchema.builder().build());
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) HashMap(java.util.HashMap) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) Test(org.junit.Test)

Example 50 with ObjectPath

use of org.apache.flink.table.catalog.ObjectPath in project flink by apache.

the class PostgresCatalogTest method testGetTables_TableNotExistException_NoDb.

@Test
public void testGetTables_TableNotExistException_NoDb() throws TableNotExistException {
    exception.expect(TableNotExistException.class);
    catalog.getTable(new ObjectPath("nonexistdb", PostgresTablePath.toFlinkTableName(TEST_SCHEMA, "anytable")));
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) Test(org.junit.Test)

Aggregations

ObjectPath (org.apache.flink.table.catalog.ObjectPath)81 Test (org.junit.Test)52 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)32 CatalogTable (org.apache.flink.table.catalog.CatalogTable)29 HashMap (java.util.HashMap)21 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)20 TableSchema (org.apache.flink.table.api.TableSchema)19 TableEnvironment (org.apache.flink.table.api.TableEnvironment)17 CatalogPartitionSpec (org.apache.flink.table.catalog.CatalogPartitionSpec)12 Table (org.apache.hadoop.hive.metastore.api.Table)12 Configuration (org.apache.flink.configuration.Configuration)11 SqlCreateHiveTable (org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable)11 TableNotExistException (org.apache.flink.table.catalog.exceptions.TableNotExistException)9 ArrayList (java.util.ArrayList)8 Map (java.util.Map)8 GenericInMemoryCatalog (org.apache.flink.table.catalog.GenericInMemoryCatalog)8 LinkedHashMap (java.util.LinkedHashMap)7 Catalog (org.apache.flink.table.catalog.Catalog)7 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)6 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)6