Search in sources :

Example 6 with CatalogDatabaseImpl

use of org.apache.flink.table.catalog.CatalogDatabaseImpl in project flink by apache.

the class SqlToOperationConverter method convertAlterDatabase.

/**
 * Convert ALTER DATABASE statement.
 */
private Operation convertAlterDatabase(SqlAlterDatabase sqlAlterDatabase) {
    String[] fullDatabaseName = sqlAlterDatabase.fullDatabaseName();
    if (fullDatabaseName.length > 2) {
        throw new ValidationException("alter database identifier format error");
    }
    String catalogName = (fullDatabaseName.length == 1) ? catalogManager.getCurrentCatalog() : fullDatabaseName[0];
    String databaseName = (fullDatabaseName.length == 1) ? fullDatabaseName[0] : fullDatabaseName[1];
    final Map<String, String> properties;
    CatalogDatabase originCatalogDatabase;
    Optional<Catalog> catalog = catalogManager.getCatalog(catalogName);
    if (catalog.isPresent()) {
        try {
            originCatalogDatabase = catalog.get().getDatabase(databaseName);
            properties = new HashMap<>(originCatalogDatabase.getProperties());
        } catch (DatabaseNotExistException e) {
            throw new ValidationException(String.format("Database %s not exists", databaseName), e);
        }
    } else {
        throw new ValidationException(String.format("Catalog %s not exists", catalogName));
    }
    // set with properties
    sqlAlterDatabase.getPropertyList().getList().forEach(p -> properties.put(((SqlTableOption) p).getKeyString(), ((SqlTableOption) p).getValueString()));
    CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(properties, originCatalogDatabase.getComment());
    return new AlterDatabaseOperation(catalogName, databaseName, catalogDatabase);
}
Also used : CatalogDatabase(org.apache.flink.table.catalog.CatalogDatabase) SqlTableOption(org.apache.flink.sql.parser.ddl.SqlTableOption) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) ValidationException(org.apache.flink.table.api.ValidationException) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException) SqlShowCurrentCatalog(org.apache.flink.sql.parser.dql.SqlShowCurrentCatalog) Catalog(org.apache.flink.table.catalog.Catalog) SqlUseCatalog(org.apache.flink.sql.parser.ddl.SqlUseCatalog) SqlDropCatalog(org.apache.flink.sql.parser.ddl.SqlDropCatalog) SqlCreateCatalog(org.apache.flink.sql.parser.ddl.SqlCreateCatalog) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl)

Example 7 with CatalogDatabaseImpl

use of org.apache.flink.table.catalog.CatalogDatabaseImpl in project flink by apache.

the class SqlToOperationConverterTest method prepareTable.

private void prepareTable(boolean managedTable, boolean hasPartition, boolean hasConstraint) throws Exception {
    Catalog catalog = new GenericInMemoryCatalog("default", "default");
    catalogManager.registerCatalog("cat1", catalog);
    catalog.createDatabase("db1", new CatalogDatabaseImpl(new HashMap<>(), null), true);
    Schema.Builder builder = Schema.newBuilder().column("a", DataTypes.STRING().notNull()).column("b", DataTypes.BIGINT().notNull()).column("c", DataTypes.BIGINT());
    Map<String, String> options = new HashMap<>();
    options.put("k", "v");
    if (!managedTable) {
        options.put("connector", "dummy");
    }
    CatalogTable catalogTable = CatalogTable.of(hasConstraint ? builder.primaryKeyNamed("ct1", "a", "b").build() : builder.build(), "tb1", hasPartition ? Arrays.asList("b", "c") : Collections.emptyList(), Collections.unmodifiableMap(options));
    catalogManager.setCurrentCatalog("cat1");
    catalogManager.setCurrentDatabase("db1");
    ObjectIdentifier tableIdentifier = ObjectIdentifier.of("cat1", "db1", "tb1");
    catalogManager.createTable(catalogTable, tableIdentifier, true);
}
Also used : HashMap(java.util.HashMap) TableSchema(org.apache.flink.table.api.TableSchema) OperationMatchers.withSchema(org.apache.flink.table.planner.utils.OperationMatchers.withSchema) CatalogManagerCalciteSchema(org.apache.flink.table.planner.catalog.CatalogManagerCalciteSchema) Schema(org.apache.flink.table.api.Schema) CalciteSchemaBuilder.asRootSchema(org.apache.calcite.jdbc.CalciteSchemaBuilder.asRootSchema) CatalogTable(org.apache.flink.table.catalog.CatalogTable) Catalog(org.apache.flink.table.catalog.Catalog) GenericInMemoryCatalog(org.apache.flink.table.catalog.GenericInMemoryCatalog) FunctionCatalog(org.apache.flink.table.catalog.FunctionCatalog) GenericInMemoryCatalog(org.apache.flink.table.catalog.GenericInMemoryCatalog) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 8 with CatalogDatabaseImpl

use of org.apache.flink.table.catalog.CatalogDatabaseImpl in project flink by apache.

the class CatalogITCase method testGetTablesFromGivenCatalogDatabase.

@Test
public void testGetTablesFromGivenCatalogDatabase() throws Exception {
    final Catalog c1 = new GenericInMemoryCatalog("c1", "default");
    final Catalog c2 = new GenericInMemoryCatalog("c2", "d2");
    final CatalogManager catalogManager = CatalogManagerMocks.preparedCatalogManager().defaultCatalog("c2", c2).build();
    catalogManager.registerCatalog("c1", c1);
    final CatalogTable catalogTable = CatalogTable.of(Schema.newBuilder().build(), null, new ArrayList<>(), new HashMap<>());
    c1.createDatabase("d1", new CatalogDatabaseImpl(new HashMap<>(), null), true);
    c1.createTable(new ObjectPath("d1", "t1"), catalogTable, true);
    c2.createTable(new ObjectPath(catalogManager.getCurrentDatabase(), "t2"), catalogTable, true);
    assertThat(catalogManager.getCurrentCatalog()).isEqualTo("c2");
    assertThat(catalogManager.getCurrentDatabase()).isEqualTo("d2");
    assertThat(catalogManager.listTables()).containsExactlyInAnyOrder("t2");
    assertThat(catalogManager.listTables("c1", "d1")).containsExactlyInAnyOrder("t1");
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) HashMap(java.util.HashMap) CatalogTable(org.apache.flink.table.catalog.CatalogTable) Catalog(org.apache.flink.table.catalog.Catalog) GenericInMemoryCatalog(org.apache.flink.table.catalog.GenericInMemoryCatalog) GenericInMemoryCatalog(org.apache.flink.table.catalog.GenericInMemoryCatalog) CatalogManager(org.apache.flink.table.catalog.CatalogManager) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl) Test(org.junit.Test)

Example 9 with CatalogDatabaseImpl

use of org.apache.flink.table.catalog.CatalogDatabaseImpl in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterDatabaseLocation.

private Operation convertAlterDatabaseLocation(HiveParserASTNode ast) {
    String dbName = HiveParserBaseSemanticAnalyzer.getUnescapedName((HiveParserASTNode) ast.getChild(0));
    String newLocation = HiveParserBaseSemanticAnalyzer.unescapeSQLString(ast.getChild(1).getText());
    CatalogDatabase originDB = getDatabase(dbName);
    Map<String, String> props = new HashMap<>(originDB.getProperties());
    props.put(ALTER_DATABASE_OP, SqlAlterHiveDatabase.AlterHiveDatabaseOp.CHANGE_LOCATION.name());
    props.put(DATABASE_LOCATION_URI, newLocation);
    CatalogDatabase newDB = new CatalogDatabaseImpl(props, originDB.getComment());
    return new AlterDatabaseOperation(catalogManager.getCurrentCatalog(), dbName, newDB);
}
Also used : CatalogDatabase(org.apache.flink.table.catalog.CatalogDatabase) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl)

Example 10 with CatalogDatabaseImpl

use of org.apache.flink.table.catalog.CatalogDatabaseImpl in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertCreateDatabase.

private Operation convertCreateDatabase(HiveParserASTNode ast) {
    String dbName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());
    boolean ifNotExists = false;
    String dbComment = null;
    String dbLocation = null;
    Map<String, String> dbProps = null;
    for (int i = 1; i < ast.getChildCount(); i++) {
        HiveParserASTNode childNode = (HiveParserASTNode) ast.getChild(i);
        switch(childNode.getToken().getType()) {
            case HiveASTParser.TOK_IFNOTEXISTS:
                ifNotExists = true;
                break;
            case HiveASTParser.TOK_DATABASECOMMENT:
                dbComment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(childNode.getChild(0).getText());
                break;
            case HiveASTParser.TOK_DATABASEPROPERTIES:
                dbProps = getProps((HiveParserASTNode) childNode.getChild(0));
                break;
            case HiveASTParser.TOK_DATABASELOCATION:
                dbLocation = HiveParserBaseSemanticAnalyzer.unescapeSQLString(childNode.getChild(0).getText());
                break;
            default:
                throw new ValidationException("Unknown AST node for CREATE DATABASE: " + childNode);
        }
    }
    Map<String, String> props = new HashMap<>();
    if (dbProps != null) {
        props.putAll(dbProps);
    }
    if (dbLocation != null) {
        props.put(DATABASE_LOCATION_URI, dbLocation);
    }
    CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(props, dbComment);
    return new CreateDatabaseOperation(catalogManager.getCurrentCatalog(), dbName, catalogDatabase, ifNotExists);
}
Also used : CatalogDatabase(org.apache.flink.table.catalog.CatalogDatabase) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl)

Aggregations

CatalogDatabaseImpl (org.apache.flink.table.catalog.CatalogDatabaseImpl)12 HashMap (java.util.HashMap)11 CatalogDatabase (org.apache.flink.table.catalog.CatalogDatabase)7 AlterDatabaseOperation (org.apache.flink.table.operations.ddl.AlterDatabaseOperation)6 LinkedHashMap (java.util.LinkedHashMap)5 ValidationException (org.apache.flink.table.api.ValidationException)5 CatalogTable (org.apache.flink.table.catalog.CatalogTable)5 Catalog (org.apache.flink.table.catalog.Catalog)4 Test (org.junit.Test)4 TableSchema (org.apache.flink.table.api.TableSchema)3 GenericInMemoryCatalog (org.apache.flink.table.catalog.GenericInMemoryCatalog)3 ObjectPath (org.apache.flink.table.catalog.ObjectPath)3 ResolvedCatalogTable (org.apache.flink.table.catalog.ResolvedCatalogTable)3 Configuration (org.apache.flink.configuration.Configuration)2 SqlCreateCatalog (org.apache.flink.sql.parser.ddl.SqlCreateCatalog)2 SqlDropCatalog (org.apache.flink.sql.parser.ddl.SqlDropCatalog)2 SqlTableOption (org.apache.flink.sql.parser.ddl.SqlTableOption)2 SqlUseCatalog (org.apache.flink.sql.parser.ddl.SqlUseCatalog)2 SqlShowCurrentCatalog (org.apache.flink.sql.parser.dql.SqlShowCurrentCatalog)2 Schema (org.apache.flink.table.api.Schema)2