use of org.apache.flink.sql.parser.ddl.SqlTableOption in project flink by apache.
the class SqlToOperationConverter method convertCreateDatabase.
/**
* Convert CREATE DATABASE statement.
*/
private Operation convertCreateDatabase(SqlCreateDatabase sqlCreateDatabase) {
String[] fullDatabaseName = sqlCreateDatabase.fullDatabaseName();
if (fullDatabaseName.length > 2) {
throw new ValidationException("create database identifier format error");
}
String catalogName = (fullDatabaseName.length == 1) ? catalogManager.getCurrentCatalog() : fullDatabaseName[0];
String databaseName = (fullDatabaseName.length == 1) ? fullDatabaseName[0] : fullDatabaseName[1];
boolean ignoreIfExists = sqlCreateDatabase.isIfNotExists();
String databaseComment = sqlCreateDatabase.getComment().map(comment -> comment.getNlsString().getValue()).orElse(null);
// set with properties
Map<String, String> properties = new HashMap<>();
sqlCreateDatabase.getPropertyList().getList().forEach(p -> properties.put(((SqlTableOption) p).getKeyString(), ((SqlTableOption) p).getValueString()));
CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(properties, databaseComment);
return new CreateDatabaseOperation(catalogName, databaseName, catalogDatabase, ignoreIfExists);
}
use of org.apache.flink.sql.parser.ddl.SqlTableOption in project flink by apache.
the class SqlToOperationConverter method convertLoadModule.
/**
* Convert LOAD MODULE statement.
*/
private Operation convertLoadModule(SqlLoadModule sqlLoadModule) {
String moduleName = sqlLoadModule.moduleName();
Map<String, String> properties = new HashMap<>();
for (SqlNode node : sqlLoadModule.getPropertyList().getList()) {
SqlTableOption option = (SqlTableOption) node;
properties.put(option.getKeyString(), option.getValueString());
}
return new LoadModuleOperation(moduleName, properties);
}
use of org.apache.flink.sql.parser.ddl.SqlTableOption in project flink by apache.
the class SqlToOperationConverter method convertAlterDatabase.
/**
* Convert ALTER DATABASE statement.
*/
private Operation convertAlterDatabase(SqlAlterDatabase sqlAlterDatabase) {
String[] fullDatabaseName = sqlAlterDatabase.fullDatabaseName();
if (fullDatabaseName.length > 2) {
throw new ValidationException("alter database identifier format error");
}
String catalogName = (fullDatabaseName.length == 1) ? catalogManager.getCurrentCatalog() : fullDatabaseName[0];
String databaseName = (fullDatabaseName.length == 1) ? fullDatabaseName[0] : fullDatabaseName[1];
final Map<String, String> properties;
CatalogDatabase originCatalogDatabase;
Optional<Catalog> catalog = catalogManager.getCatalog(catalogName);
if (catalog.isPresent()) {
try {
originCatalogDatabase = catalog.get().getDatabase(databaseName);
properties = new HashMap<>(originCatalogDatabase.getProperties());
} catch (DatabaseNotExistException e) {
throw new ValidationException(String.format("Database %s not exists", databaseName), e);
}
} else {
throw new ValidationException(String.format("Catalog %s not exists", catalogName));
}
// set with properties
sqlAlterDatabase.getPropertyList().getList().forEach(p -> properties.put(((SqlTableOption) p).getKeyString(), ((SqlTableOption) p).getValueString()));
CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(properties, originCatalogDatabase.getComment());
return new AlterDatabaseOperation(catalogName, databaseName, catalogDatabase);
}
use of org.apache.flink.sql.parser.ddl.SqlTableOption in project flink by apache.
the class SqlAlterHiveTableSerDe method appendPrefix.
private static SqlNodeList appendPrefix(SqlNodeList propList) {
if (propList != null) {
for (int i = 0; i < propList.size(); i++) {
SqlTableOption tableOption = (SqlTableOption) propList.get(i);
if (!tableOption.getKeyString().equals(ALTER_TABLE_OP)) {
String key = HiveTableRowFormat.SERDE_INFO_PROP_PREFIX + tableOption.getKeyString();
tableOption = HiveDDLUtils.toTableOption(key, tableOption.getValue(), tableOption.getParserPosition());
propList.set(i, tableOption);
}
}
}
return propList;
}
Aggregations