use of org.apache.flink.table.catalog.CatalogDatabase in project flink by splunk.
the class HiveParserDDLSemanticAnalyzer method convertCreateDatabase.
private Operation convertCreateDatabase(HiveParserASTNode ast) {
String dbName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());
boolean ifNotExists = false;
String dbComment = null;
String dbLocation = null;
Map<String, String> dbProps = null;
for (int i = 1; i < ast.getChildCount(); i++) {
HiveParserASTNode childNode = (HiveParserASTNode) ast.getChild(i);
switch(childNode.getToken().getType()) {
case HiveASTParser.TOK_IFNOTEXISTS:
ifNotExists = true;
break;
case HiveASTParser.TOK_DATABASECOMMENT:
dbComment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(childNode.getChild(0).getText());
break;
case HiveASTParser.TOK_DATABASEPROPERTIES:
dbProps = getProps((HiveParserASTNode) childNode.getChild(0));
break;
case HiveASTParser.TOK_DATABASELOCATION:
dbLocation = HiveParserBaseSemanticAnalyzer.unescapeSQLString(childNode.getChild(0).getText());
break;
default:
throw new ValidationException("Unknown AST node for CREATE DATABASE: " + childNode);
}
}
Map<String, String> props = new HashMap<>();
if (dbProps != null) {
props.putAll(dbProps);
}
if (dbLocation != null) {
props.put(DATABASE_LOCATION_URI, dbLocation);
}
CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(props, dbComment);
return new CreateDatabaseOperation(catalogManager.getCurrentCatalog(), dbName, catalogDatabase, ifNotExists);
}
use of org.apache.flink.table.catalog.CatalogDatabase in project flink by splunk.
the class HiveCatalog method getDatabase.
// ------ databases ------
@Override
public CatalogDatabase getDatabase(String databaseName) throws DatabaseNotExistException, CatalogException {
Database hiveDatabase = getHiveDatabase(databaseName);
Map<String, String> properties = new HashMap<>(hiveDatabase.getParameters());
properties.put(SqlCreateHiveDatabase.DATABASE_LOCATION_URI, hiveDatabase.getLocationUri());
return new CatalogDatabaseImpl(properties, hiveDatabase.getDescription());
}
use of org.apache.flink.table.catalog.CatalogDatabase in project flink by splunk.
the class SqlToOperationConverter method convertAlterDatabase.
/**
* Convert ALTER DATABASE statement.
*/
private Operation convertAlterDatabase(SqlAlterDatabase sqlAlterDatabase) {
String[] fullDatabaseName = sqlAlterDatabase.fullDatabaseName();
if (fullDatabaseName.length > 2) {
throw new ValidationException("alter database identifier format error");
}
String catalogName = (fullDatabaseName.length == 1) ? catalogManager.getCurrentCatalog() : fullDatabaseName[0];
String databaseName = (fullDatabaseName.length == 1) ? fullDatabaseName[0] : fullDatabaseName[1];
final Map<String, String> properties;
CatalogDatabase originCatalogDatabase;
Optional<Catalog> catalog = catalogManager.getCatalog(catalogName);
if (catalog.isPresent()) {
try {
originCatalogDatabase = catalog.get().getDatabase(databaseName);
properties = new HashMap<>(originCatalogDatabase.getProperties());
} catch (DatabaseNotExistException e) {
throw new ValidationException(String.format("Database %s not exists", databaseName), e);
}
} else {
throw new ValidationException(String.format("Catalog %s not exists", catalogName));
}
// set with properties
sqlAlterDatabase.getPropertyList().getList().forEach(p -> properties.put(((SqlTableOption) p).getKeyString(), ((SqlTableOption) p).getValueString()));
CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(properties, originCatalogDatabase.getComment());
return new AlterDatabaseOperation(catalogName, databaseName, catalogDatabase);
}
use of org.apache.flink.table.catalog.CatalogDatabase in project flink by splunk.
the class SqlToOperationConverter method convertCreateDatabase.
/**
* Convert CREATE DATABASE statement.
*/
private Operation convertCreateDatabase(SqlCreateDatabase sqlCreateDatabase) {
String[] fullDatabaseName = sqlCreateDatabase.fullDatabaseName();
if (fullDatabaseName.length > 2) {
throw new ValidationException("create database identifier format error");
}
String catalogName = (fullDatabaseName.length == 1) ? catalogManager.getCurrentCatalog() : fullDatabaseName[0];
String databaseName = (fullDatabaseName.length == 1) ? fullDatabaseName[0] : fullDatabaseName[1];
boolean ignoreIfExists = sqlCreateDatabase.isIfNotExists();
String databaseComment = sqlCreateDatabase.getComment().map(comment -> comment.getNlsString().getValue()).orElse(null);
// set with properties
Map<String, String> properties = new HashMap<>();
sqlCreateDatabase.getPropertyList().getList().forEach(p -> properties.put(((SqlTableOption) p).getKeyString(), ((SqlTableOption) p).getValueString()));
CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(properties, databaseComment);
return new CreateDatabaseOperation(catalogName, databaseName, catalogDatabase, ignoreIfExists);
}
use of org.apache.flink.table.catalog.CatalogDatabase in project hudi by apache.
the class TestHoodieCatalog method testCreateAndDropDatabase.
@Test
public void testCreateAndDropDatabase() throws Exception {
CatalogDatabase expected = new CatalogDatabaseImpl(Collections.emptyMap(), null);
catalog.createDatabase("db1", expected, true);
CatalogDatabase actual = catalog.getDatabase("db1");
assertTrue(catalog.listDatabases().contains("db1"));
assertEquals(expected.getProperties(), actual.getProperties());
// create exist database
assertThrows(DatabaseAlreadyExistException.class, () -> catalog.createDatabase("db1", expected, false));
// drop exist database
catalog.dropDatabase("db1", true);
assertFalse(catalog.listDatabases().contains("db1"));
// drop non-exist database
assertThrows(DatabaseNotExistException.class, () -> catalog.dropDatabase(NONE_EXIST_DATABASE, false));
}
Aggregations