Search in sources :

Example 1 with Catalog

use of org.apache.flink.table.catalog.Catalog in project flink by apache.

the class HiveParser method parse.

@Override
public List<Operation> parse(String statement) {
    CatalogManager catalogManager = getCatalogManager();
    Catalog currentCatalog = catalogManager.getCatalog(catalogManager.getCurrentCatalog()).orElse(null);
    if (!(currentCatalog instanceof HiveCatalog)) {
        LOG.warn("Current catalog is not HiveCatalog. Falling back to Flink's planner.");
        return super.parse(statement);
    }
    HiveConf hiveConf = new HiveConf(((HiveCatalog) currentCatalog).getHiveConf());
    hiveConf.setVar(HiveConf.ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict");
    hiveConf.set("hive.allow.udf.load.on.demand", "false");
    hiveConf.setVar(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE, "mr");
    HiveShim hiveShim = HiveShimLoader.loadHiveShim(((HiveCatalog) currentCatalog).getHiveVersion());
    try {
        // creates SessionState
        startSessionState(hiveConf, catalogManager);
        // We override Hive's grouping function. Refer to the implementation for more details.
        hiveShim.registerTemporaryFunction("grouping", HiveGenericUDFGrouping.class);
        return processCmd(statement, hiveConf, hiveShim, (HiveCatalog) currentCatalog);
    } finally {
        clearSessionState();
    }
}
Also used : HiveCatalog(org.apache.flink.table.catalog.hive.HiveCatalog) HiveConf(org.apache.hadoop.hive.conf.HiveConf) HiveShim(org.apache.flink.table.catalog.hive.client.HiveShim) CatalogManager(org.apache.flink.table.catalog.CatalogManager) HiveCatalog(org.apache.flink.table.catalog.hive.HiveCatalog) Catalog(org.apache.flink.table.catalog.Catalog)

Example 2 with Catalog

use of org.apache.flink.table.catalog.Catalog in project flink by apache.

the class HiveCatalogITCase method testTableWithPrimaryKey.

@Test
public void testTableWithPrimaryKey() {
    TableEnvironment tableEnv = TableEnvironment.create(EnvironmentSettings.inStreamingMode());
    tableEnv.getConfig().getConfiguration().setInteger(TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM, 1);
    tableEnv.registerCatalog("catalog1", hiveCatalog);
    tableEnv.useCatalog("catalog1");
    final String createTable = "CREATE TABLE pk_src (\n" + "  uuid varchar(40) not null,\n" + "  price DECIMAL(10, 2),\n" + "  currency STRING,\n" + "  ts6 TIMESTAMP(6),\n" + "  ts AS CAST(ts6 AS TIMESTAMP(3)),\n" + "  WATERMARK FOR ts AS ts,\n" + "  constraint ct1 PRIMARY KEY(uuid) NOT ENFORCED)\n" + "  WITH (\n" + "    'connector.type' = 'filesystem'," + "    'connector.path' = 'file://fakePath'," + "    'format.type' = 'csv')";
    tableEnv.executeSql(createTable);
    TableSchema tableSchema = tableEnv.getCatalog(tableEnv.getCurrentCatalog()).map(catalog -> {
        try {
            final ObjectPath tablePath = ObjectPath.fromString(catalog.getDefaultDatabase() + '.' + "pk_src");
            return catalog.getTable(tablePath).getSchema();
        } catch (TableNotExistException e) {
            return null;
        }
    }).orElse(null);
    assertThat(tableSchema).isNotNull();
    assertThat(tableSchema.getPrimaryKey()).hasValue(UniqueConstraint.primaryKey("ct1", Collections.singletonList("uuid")));
    tableEnv.executeSql("DROP TABLE pk_src");
}
Also used : Arrays(java.util.Arrays) Schema(org.apache.flink.table.api.Schema) FileUtils(org.apache.flink.util.FileUtils) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) CatalogTable(org.apache.flink.table.catalog.CatalogTable) FLINK_PROPERTY_PREFIX(org.apache.flink.table.catalog.CatalogPropertiesUtil.FLINK_PROPERTY_PREFIX) Future(java.util.concurrent.Future) Map(java.util.Map) URI(java.net.URI) Path(java.nio.file.Path) TableEnvironment(org.apache.flink.table.api.TableEnvironment) AfterClass(org.junit.AfterClass) Expressions.$(org.apache.flink.table.api.Expressions.$) TableSchema(org.apache.flink.table.api.TableSchema) Table(org.apache.flink.table.api.Table) TestCollectionTableFactory(org.apache.flink.table.planner.factories.utils.TestCollectionTableFactory) Executors(java.util.concurrent.Executors) List(java.util.List) FactoryUtil(org.apache.flink.table.factories.FactoryUtil) ManagedTableFactory(org.apache.flink.table.factories.ManagedTableFactory) Row(org.apache.flink.types.Row) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) BeforeClass(org.junit.BeforeClass) ByteArrayOutputStream(java.io.ByteArrayOutputStream) TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM(org.apache.flink.table.api.config.ExecutionConfigOptions.TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM) HashMap(java.util.HashMap) Callable(java.util.concurrent.Callable) ObjectPath(org.apache.flink.table.catalog.ObjectPath) AtomicReference(java.util.concurrent.atomic.AtomicReference) ArrayList(java.util.ArrayList) CatalogView(org.apache.flink.table.catalog.CatalogView) Catalog(org.apache.flink.table.catalog.Catalog) TestManagedTableFactory(org.apache.flink.table.factories.TestManagedTableFactory) ExecutorService(java.util.concurrent.ExecutorService) AbstractDataType(org.apache.flink.table.types.AbstractDataType) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) PrintStream(java.io.PrintStream) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException) Files(java.nio.file.Files) Configuration(org.apache.flink.configuration.Configuration) DataTypes(org.apache.flink.table.api.DataTypes) Test(org.junit.Test) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) CollectionUtil(org.apache.flink.util.CollectionUtil) File(java.io.File) TimeUnit(java.util.concurrent.TimeUnit) CONNECTOR(org.apache.flink.table.factories.FactoryUtil.CONNECTOR) Rule(org.junit.Rule) CoreOptions(org.apache.flink.configuration.CoreOptions) Paths(java.nio.file.Paths) SqlDialect(org.apache.flink.table.api.SqlDialect) EnvironmentSettings(org.apache.flink.table.api.EnvironmentSettings) BufferedReader(java.io.BufferedReader) FileReader(java.io.FileReader) Comparator(java.util.Comparator) Collections(java.util.Collections) TemporaryFolder(org.junit.rules.TemporaryFolder) ObjectPath(org.apache.flink.table.catalog.ObjectPath) TableSchema(org.apache.flink.table.api.TableSchema) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Test(org.junit.Test)

Example 3 with Catalog

use of org.apache.flink.table.catalog.Catalog in project flink by apache.

the class HiveCatalogFactoryTest method testCreateHiveCatalogWithIllegalHadoopConfDir.

@Test
public void testCreateHiveCatalogWithIllegalHadoopConfDir() throws IOException {
    final String catalogName = "mycatalog";
    final String hadoopConfDir = tempFolder.newFolder().getAbsolutePath();
    try {
        final Map<String, String> options = new HashMap<>();
        options.put(CommonCatalogOptions.CATALOG_TYPE.key(), HiveCatalogFactoryOptions.IDENTIFIER);
        options.put(HiveCatalogFactoryOptions.HIVE_CONF_DIR.key(), CONF_DIR.getPath());
        options.put(HiveCatalogFactoryOptions.HADOOP_CONF_DIR.key(), hadoopConfDir);
        final Catalog actualCatalog = FactoryUtil.createCatalog(catalogName, options, null, Thread.currentThread().getContextClassLoader());
        Assert.fail();
    } catch (ValidationException e) {
    }
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) HashMap(java.util.HashMap) HiveCatalog(org.apache.flink.table.catalog.hive.HiveCatalog) Catalog(org.apache.flink.table.catalog.Catalog) Test(org.junit.Test)

Example 4 with Catalog

use of org.apache.flink.table.catalog.Catalog in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method getDatabase.

private CatalogDatabase getDatabase(String databaseName) {
    Catalog catalog = catalogManager.getCatalog(catalogManager.getCurrentCatalog()).get();
    CatalogDatabase database;
    try {
        database = catalog.getDatabase(databaseName);
    } catch (DatabaseNotExistException e) {
        throw new ValidationException(String.format("Database %s not exists", databaseName), e);
    }
    return database;
}
Also used : CatalogDatabase(org.apache.flink.table.catalog.CatalogDatabase) ValidationException(org.apache.flink.table.api.ValidationException) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException) Catalog(org.apache.flink.table.catalog.Catalog) HiveCatalog(org.apache.flink.table.catalog.hive.HiveCatalog)

Example 5 with Catalog

use of org.apache.flink.table.catalog.Catalog in project flink by apache.

the class JdbcCatalogFactoryTest method test.

@Test
public void test() {
    final Map<String, String> options = new HashMap<>();
    options.put(CommonCatalogOptions.CATALOG_TYPE.key(), JdbcCatalogFactoryOptions.IDENTIFIER);
    options.put(JdbcCatalogFactoryOptions.DEFAULT_DATABASE.key(), PostgresCatalog.DEFAULT_DATABASE);
    options.put(JdbcCatalogFactoryOptions.USERNAME.key(), TEST_USERNAME);
    options.put(JdbcCatalogFactoryOptions.PASSWORD.key(), TEST_PWD);
    options.put(JdbcCatalogFactoryOptions.BASE_URL.key(), baseUrl);
    final Catalog actualCatalog = FactoryUtil.createCatalog(TEST_CATALOG_NAME, options, null, Thread.currentThread().getContextClassLoader());
    checkEquals(catalog, (JdbcCatalog) actualCatalog);
    assertTrue(((JdbcCatalog) actualCatalog).getInternal() instanceof PostgresCatalog);
}
Also used : JdbcCatalog(org.apache.flink.connector.jdbc.catalog.JdbcCatalog) HashMap(java.util.HashMap) PostgresCatalog(org.apache.flink.connector.jdbc.catalog.PostgresCatalog) JdbcCatalog(org.apache.flink.connector.jdbc.catalog.JdbcCatalog) PostgresCatalog(org.apache.flink.connector.jdbc.catalog.PostgresCatalog) Catalog(org.apache.flink.table.catalog.Catalog) Test(org.junit.Test)

Aggregations

Catalog (org.apache.flink.table.catalog.Catalog)23 HashMap (java.util.HashMap)10 ValidationException (org.apache.flink.table.api.ValidationException)10 Test (org.junit.Test)9 TableNotExistException (org.apache.flink.table.catalog.exceptions.TableNotExistException)8 GenericInMemoryCatalog (org.apache.flink.table.catalog.GenericInMemoryCatalog)7 ObjectPath (org.apache.flink.table.catalog.ObjectPath)7 TableException (org.apache.flink.table.api.TableException)6 FunctionCatalog (org.apache.flink.table.catalog.FunctionCatalog)6 CatalogException (org.apache.flink.table.catalog.exceptions.CatalogException)6 DatabaseNotExistException (org.apache.flink.table.catalog.exceptions.DatabaseNotExistException)6 HiveCatalog (org.apache.flink.table.catalog.hive.HiveCatalog)6 CatalogTable (org.apache.flink.table.catalog.CatalogTable)5 IOException (java.io.IOException)4 ArrayList (java.util.ArrayList)4 List (java.util.List)4 ExecutionException (java.util.concurrent.ExecutionException)4 SqlParserException (org.apache.flink.table.api.SqlParserException)4 DatabaseAlreadyExistException (org.apache.flink.table.catalog.exceptions.DatabaseAlreadyExistException)4 DatabaseNotEmptyException (org.apache.flink.table.catalog.exceptions.DatabaseNotEmptyException)4