Search in sources :

Example 11 with Catalog

use of org.apache.flink.table.catalog.Catalog in project flink by apache.

the class SqlToOperationConverterTest method prepareTable.

private void prepareTable(boolean managedTable, boolean hasPartition, boolean hasConstraint) throws Exception {
    Catalog catalog = new GenericInMemoryCatalog("default", "default");
    catalogManager.registerCatalog("cat1", catalog);
    catalog.createDatabase("db1", new CatalogDatabaseImpl(new HashMap<>(), null), true);
    Schema.Builder builder = Schema.newBuilder().column("a", DataTypes.STRING().notNull()).column("b", DataTypes.BIGINT().notNull()).column("c", DataTypes.BIGINT());
    Map<String, String> options = new HashMap<>();
    options.put("k", "v");
    if (!managedTable) {
        options.put("connector", "dummy");
    }
    CatalogTable catalogTable = CatalogTable.of(hasConstraint ? builder.primaryKeyNamed("ct1", "a", "b").build() : builder.build(), "tb1", hasPartition ? Arrays.asList("b", "c") : Collections.emptyList(), Collections.unmodifiableMap(options));
    catalogManager.setCurrentCatalog("cat1");
    catalogManager.setCurrentDatabase("db1");
    ObjectIdentifier tableIdentifier = ObjectIdentifier.of("cat1", "db1", "tb1");
    catalogManager.createTable(catalogTable, tableIdentifier, true);
}
Also used : HashMap(java.util.HashMap) TableSchema(org.apache.flink.table.api.TableSchema) OperationMatchers.withSchema(org.apache.flink.table.planner.utils.OperationMatchers.withSchema) CatalogManagerCalciteSchema(org.apache.flink.table.planner.catalog.CatalogManagerCalciteSchema) Schema(org.apache.flink.table.api.Schema) CalciteSchemaBuilder.asRootSchema(org.apache.calcite.jdbc.CalciteSchemaBuilder.asRootSchema) CatalogTable(org.apache.flink.table.catalog.CatalogTable) Catalog(org.apache.flink.table.catalog.Catalog) GenericInMemoryCatalog(org.apache.flink.table.catalog.GenericInMemoryCatalog) FunctionCatalog(org.apache.flink.table.catalog.FunctionCatalog) GenericInMemoryCatalog(org.apache.flink.table.catalog.GenericInMemoryCatalog) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 12 with Catalog

use of org.apache.flink.table.catalog.Catalog in project flink by apache.

the class CatalogITCase method testGetTablesFromGivenCatalogDatabase.

@Test
public void testGetTablesFromGivenCatalogDatabase() throws Exception {
    final Catalog c1 = new GenericInMemoryCatalog("c1", "default");
    final Catalog c2 = new GenericInMemoryCatalog("c2", "d2");
    final CatalogManager catalogManager = CatalogManagerMocks.preparedCatalogManager().defaultCatalog("c2", c2).build();
    catalogManager.registerCatalog("c1", c1);
    final CatalogTable catalogTable = CatalogTable.of(Schema.newBuilder().build(), null, new ArrayList<>(), new HashMap<>());
    c1.createDatabase("d1", new CatalogDatabaseImpl(new HashMap<>(), null), true);
    c1.createTable(new ObjectPath("d1", "t1"), catalogTable, true);
    c2.createTable(new ObjectPath(catalogManager.getCurrentDatabase(), "t2"), catalogTable, true);
    assertThat(catalogManager.getCurrentCatalog()).isEqualTo("c2");
    assertThat(catalogManager.getCurrentDatabase()).isEqualTo("d2");
    assertThat(catalogManager.listTables()).containsExactlyInAnyOrder("t2");
    assertThat(catalogManager.listTables("c1", "d1")).containsExactlyInAnyOrder("t1");
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) HashMap(java.util.HashMap) CatalogTable(org.apache.flink.table.catalog.CatalogTable) Catalog(org.apache.flink.table.catalog.Catalog) GenericInMemoryCatalog(org.apache.flink.table.catalog.GenericInMemoryCatalog) GenericInMemoryCatalog(org.apache.flink.table.catalog.GenericInMemoryCatalog) CatalogManager(org.apache.flink.table.catalog.CatalogManager) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl) Test(org.junit.Test)

Example 13 with Catalog

use of org.apache.flink.table.catalog.Catalog in project flink by apache.

the class HiveCatalogFactoryTest method testCreateMultipleHiveCatalog.

@Test
public void testCreateMultipleHiveCatalog() throws Exception {
    final Map<String, String> props1 = new HashMap<>();
    props1.put(CommonCatalogOptions.CATALOG_TYPE.key(), HiveCatalogFactoryOptions.IDENTIFIER);
    props1.put(HiveCatalogFactoryOptions.HIVE_CONF_DIR.key(), Thread.currentThread().getContextClassLoader().getResource("test-multi-hive-conf1").getPath());
    final Map<String, String> props2 = new HashMap<>();
    props2.put(CommonCatalogOptions.CATALOG_TYPE.key(), HiveCatalogFactoryOptions.IDENTIFIER);
    props2.put(HiveCatalogFactoryOptions.HIVE_CONF_DIR.key(), Thread.currentThread().getContextClassLoader().getResource("test-multi-hive-conf2").getPath());
    Callable<Catalog> callable1 = () -> FactoryUtil.createCatalog("cat1", props1, null, Thread.currentThread().getContextClassLoader());
    Callable<Catalog> callable2 = () -> FactoryUtil.createCatalog("cat2", props2, null, Thread.currentThread().getContextClassLoader());
    ExecutorService executorService = Executors.newFixedThreadPool(2);
    Future<Catalog> future1 = executorService.submit(callable1);
    Future<Catalog> future2 = executorService.submit(callable2);
    executorService.shutdown();
    HiveCatalog catalog1 = (HiveCatalog) future1.get();
    HiveCatalog catalog2 = (HiveCatalog) future2.get();
    // verify we read our own props
    assertEquals("val1", catalog1.getHiveConf().get("key"));
    assertNotNull(catalog1.getHiveConf().get("conf1", null));
    // verify we don't read props from other conf
    assertNull(catalog1.getHiveConf().get("conf2", null));
    // verify we read our own props
    assertEquals("val2", catalog2.getHiveConf().get("key"));
    assertNotNull(catalog2.getHiveConf().get("conf2", null));
    // verify we don't read props from other conf
    assertNull(catalog2.getHiveConf().get("conf1", null));
}
Also used : HiveCatalog(org.apache.flink.table.catalog.hive.HiveCatalog) HashMap(java.util.HashMap) ExecutorService(java.util.concurrent.ExecutorService) HiveCatalog(org.apache.flink.table.catalog.hive.HiveCatalog) Catalog(org.apache.flink.table.catalog.Catalog) Test(org.junit.Test)

Example 14 with Catalog

use of org.apache.flink.table.catalog.Catalog in project flink by apache.

the class HiveCatalogFactoryTest method testCreateHiveCatalog.

@Test
public void testCreateHiveCatalog() {
    final String catalogName = "mycatalog";
    final HiveCatalog expectedCatalog = HiveTestUtils.createHiveCatalog(catalogName, null);
    final Map<String, String> options = new HashMap<>();
    options.put(CommonCatalogOptions.CATALOG_TYPE.key(), HiveCatalogFactoryOptions.IDENTIFIER);
    options.put(HiveCatalogFactoryOptions.HIVE_CONF_DIR.key(), CONF_DIR.getPath());
    final Catalog actualCatalog = FactoryUtil.createCatalog(catalogName, options, null, Thread.currentThread().getContextClassLoader());
    assertEquals("dummy-hms", ((HiveCatalog) actualCatalog).getHiveConf().getVar(HiveConf.ConfVars.METASTOREURIS));
    checkEquals(expectedCatalog, (HiveCatalog) actualCatalog);
}
Also used : HiveCatalog(org.apache.flink.table.catalog.hive.HiveCatalog) HashMap(java.util.HashMap) HiveCatalog(org.apache.flink.table.catalog.hive.HiveCatalog) Catalog(org.apache.flink.table.catalog.Catalog) Test(org.junit.Test)

Example 15 with Catalog

use of org.apache.flink.table.catalog.Catalog in project flink by apache.

the class HiveCatalogFactoryTest method testCreateHiveCatalogWithHadoopConfDir.

@Test
public void testCreateHiveCatalogWithHadoopConfDir() throws IOException {
    final String catalogName = "mycatalog";
    final String hadoopConfDir = tempFolder.newFolder().getAbsolutePath();
    final File mapredSiteFile = new File(hadoopConfDir, "mapred-site.xml");
    final String mapredKey = "mapred.site.config.key";
    final String mapredVal = "mapred.site.config.val";
    writeProperty(mapredSiteFile, mapredKey, mapredVal);
    final HiveCatalog expectedCatalog = HiveTestUtils.createHiveCatalog(catalogName, CONF_DIR.getPath(), hadoopConfDir, null);
    final Map<String, String> options = new HashMap<>();
    options.put(CommonCatalogOptions.CATALOG_TYPE.key(), HiveCatalogFactoryOptions.IDENTIFIER);
    options.put(HiveCatalogFactoryOptions.HIVE_CONF_DIR.key(), CONF_DIR.getPath());
    options.put(HiveCatalogFactoryOptions.HADOOP_CONF_DIR.key(), hadoopConfDir);
    final Catalog actualCatalog = FactoryUtil.createCatalog(catalogName, options, null, Thread.currentThread().getContextClassLoader());
    checkEquals(expectedCatalog, (HiveCatalog) actualCatalog);
    assertEquals(mapredVal, ((HiveCatalog) actualCatalog).getHiveConf().get(mapredKey));
}
Also used : HiveCatalog(org.apache.flink.table.catalog.hive.HiveCatalog) HashMap(java.util.HashMap) File(java.io.File) HiveCatalog(org.apache.flink.table.catalog.hive.HiveCatalog) Catalog(org.apache.flink.table.catalog.Catalog) Test(org.junit.Test)

Aggregations

Catalog (org.apache.flink.table.catalog.Catalog)23 HashMap (java.util.HashMap)10 ValidationException (org.apache.flink.table.api.ValidationException)10 Test (org.junit.Test)9 TableNotExistException (org.apache.flink.table.catalog.exceptions.TableNotExistException)8 GenericInMemoryCatalog (org.apache.flink.table.catalog.GenericInMemoryCatalog)7 ObjectPath (org.apache.flink.table.catalog.ObjectPath)7 TableException (org.apache.flink.table.api.TableException)6 FunctionCatalog (org.apache.flink.table.catalog.FunctionCatalog)6 CatalogException (org.apache.flink.table.catalog.exceptions.CatalogException)6 DatabaseNotExistException (org.apache.flink.table.catalog.exceptions.DatabaseNotExistException)6 HiveCatalog (org.apache.flink.table.catalog.hive.HiveCatalog)6 CatalogTable (org.apache.flink.table.catalog.CatalogTable)5 IOException (java.io.IOException)4 ArrayList (java.util.ArrayList)4 List (java.util.List)4 ExecutionException (java.util.concurrent.ExecutionException)4 SqlParserException (org.apache.flink.table.api.SqlParserException)4 DatabaseAlreadyExistException (org.apache.flink.table.catalog.exceptions.DatabaseAlreadyExistException)4 DatabaseNotEmptyException (org.apache.flink.table.catalog.exceptions.DatabaseNotEmptyException)4