use of org.apache.flink.table.catalog.CatalogDatabaseImpl in project flink by apache.
the class SqlToOperationConverter method convertAlterDatabase.
/**
* Convert ALTER DATABASE statement.
*/
private Operation convertAlterDatabase(SqlAlterDatabase sqlAlterDatabase) {
String[] fullDatabaseName = sqlAlterDatabase.fullDatabaseName();
if (fullDatabaseName.length > 2) {
throw new ValidationException("alter database identifier format error");
}
String catalogName = (fullDatabaseName.length == 1) ? catalogManager.getCurrentCatalog() : fullDatabaseName[0];
String databaseName = (fullDatabaseName.length == 1) ? fullDatabaseName[0] : fullDatabaseName[1];
final Map<String, String> properties;
CatalogDatabase originCatalogDatabase;
Optional<Catalog> catalog = catalogManager.getCatalog(catalogName);
if (catalog.isPresent()) {
try {
originCatalogDatabase = catalog.get().getDatabase(databaseName);
properties = new HashMap<>(originCatalogDatabase.getProperties());
} catch (DatabaseNotExistException e) {
throw new ValidationException(String.format("Database %s not exists", databaseName), e);
}
} else {
throw new ValidationException(String.format("Catalog %s not exists", catalogName));
}
// set with properties
sqlAlterDatabase.getPropertyList().getList().forEach(p -> properties.put(((SqlTableOption) p).getKeyString(), ((SqlTableOption) p).getValueString()));
CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(properties, originCatalogDatabase.getComment());
return new AlterDatabaseOperation(catalogName, databaseName, catalogDatabase);
}
use of org.apache.flink.table.catalog.CatalogDatabaseImpl in project flink by apache.
the class SqlToOperationConverterTest method prepareTable.
private void prepareTable(boolean managedTable, boolean hasPartition, boolean hasConstraint) throws Exception {
Catalog catalog = new GenericInMemoryCatalog("default", "default");
catalogManager.registerCatalog("cat1", catalog);
catalog.createDatabase("db1", new CatalogDatabaseImpl(new HashMap<>(), null), true);
Schema.Builder builder = Schema.newBuilder().column("a", DataTypes.STRING().notNull()).column("b", DataTypes.BIGINT().notNull()).column("c", DataTypes.BIGINT());
Map<String, String> options = new HashMap<>();
options.put("k", "v");
if (!managedTable) {
options.put("connector", "dummy");
}
CatalogTable catalogTable = CatalogTable.of(hasConstraint ? builder.primaryKeyNamed("ct1", "a", "b").build() : builder.build(), "tb1", hasPartition ? Arrays.asList("b", "c") : Collections.emptyList(), Collections.unmodifiableMap(options));
catalogManager.setCurrentCatalog("cat1");
catalogManager.setCurrentDatabase("db1");
ObjectIdentifier tableIdentifier = ObjectIdentifier.of("cat1", "db1", "tb1");
catalogManager.createTable(catalogTable, tableIdentifier, true);
}
use of org.apache.flink.table.catalog.CatalogDatabaseImpl in project flink by apache.
the class CatalogITCase method testGetTablesFromGivenCatalogDatabase.
@Test
public void testGetTablesFromGivenCatalogDatabase() throws Exception {
final Catalog c1 = new GenericInMemoryCatalog("c1", "default");
final Catalog c2 = new GenericInMemoryCatalog("c2", "d2");
final CatalogManager catalogManager = CatalogManagerMocks.preparedCatalogManager().defaultCatalog("c2", c2).build();
catalogManager.registerCatalog("c1", c1);
final CatalogTable catalogTable = CatalogTable.of(Schema.newBuilder().build(), null, new ArrayList<>(), new HashMap<>());
c1.createDatabase("d1", new CatalogDatabaseImpl(new HashMap<>(), null), true);
c1.createTable(new ObjectPath("d1", "t1"), catalogTable, true);
c2.createTable(new ObjectPath(catalogManager.getCurrentDatabase(), "t2"), catalogTable, true);
assertThat(catalogManager.getCurrentCatalog()).isEqualTo("c2");
assertThat(catalogManager.getCurrentDatabase()).isEqualTo("d2");
assertThat(catalogManager.listTables()).containsExactlyInAnyOrder("t2");
assertThat(catalogManager.listTables("c1", "d1")).containsExactlyInAnyOrder("t1");
}
use of org.apache.flink.table.catalog.CatalogDatabaseImpl in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method convertAlterDatabaseLocation.
private Operation convertAlterDatabaseLocation(HiveParserASTNode ast) {
String dbName = HiveParserBaseSemanticAnalyzer.getUnescapedName((HiveParserASTNode) ast.getChild(0));
String newLocation = HiveParserBaseSemanticAnalyzer.unescapeSQLString(ast.getChild(1).getText());
CatalogDatabase originDB = getDatabase(dbName);
Map<String, String> props = new HashMap<>(originDB.getProperties());
props.put(ALTER_DATABASE_OP, SqlAlterHiveDatabase.AlterHiveDatabaseOp.CHANGE_LOCATION.name());
props.put(DATABASE_LOCATION_URI, newLocation);
CatalogDatabase newDB = new CatalogDatabaseImpl(props, originDB.getComment());
return new AlterDatabaseOperation(catalogManager.getCurrentCatalog(), dbName, newDB);
}
use of org.apache.flink.table.catalog.CatalogDatabaseImpl in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method convertCreateDatabase.
private Operation convertCreateDatabase(HiveParserASTNode ast) {
String dbName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());
boolean ifNotExists = false;
String dbComment = null;
String dbLocation = null;
Map<String, String> dbProps = null;
for (int i = 1; i < ast.getChildCount(); i++) {
HiveParserASTNode childNode = (HiveParserASTNode) ast.getChild(i);
switch(childNode.getToken().getType()) {
case HiveASTParser.TOK_IFNOTEXISTS:
ifNotExists = true;
break;
case HiveASTParser.TOK_DATABASECOMMENT:
dbComment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(childNode.getChild(0).getText());
break;
case HiveASTParser.TOK_DATABASEPROPERTIES:
dbProps = getProps((HiveParserASTNode) childNode.getChild(0));
break;
case HiveASTParser.TOK_DATABASELOCATION:
dbLocation = HiveParserBaseSemanticAnalyzer.unescapeSQLString(childNode.getChild(0).getText());
break;
default:
throw new ValidationException("Unknown AST node for CREATE DATABASE: " + childNode);
}
}
Map<String, String> props = new HashMap<>();
if (dbProps != null) {
props.putAll(dbProps);
}
if (dbLocation != null) {
props.put(DATABASE_LOCATION_URI, dbLocation);
}
CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(props, dbComment);
return new CreateDatabaseOperation(catalogManager.getCurrentCatalog(), dbName, catalogDatabase, ifNotExists);
}
Aggregations