use of org.apache.hadoop.hive.metastore.api.Database in project brisk by riptano.
the class MetaStorePersisterTest method testFindMetaStoreEntities.
@Test
public void testFindMetaStoreEntities() throws Exception {
setupClient();
Database database = new Database();
database.setName("dbname");
database.setDescription("description");
database.setLocationUri("uri");
database.setParameters(new HashMap<String, String>());
metaStorePersister.save(database.metaDataMap, database, database.getName());
Table table = new Table();
table.setDbName("dbname");
table.setTableName("table_one");
metaStorePersister.save(table.metaDataMap, table, table.getDbName());
table.setTableName("table_two");
metaStorePersister.save(table.metaDataMap, table, table.getDbName());
table.setTableName("table_three");
metaStorePersister.save(table.metaDataMap, table, table.getDbName());
table.setTableName("other_table");
metaStorePersister.save(table.metaDataMap, table, table.getDbName());
List tables = metaStorePersister.find(table, "dbname");
assertEquals(4, tables.size());
tables = metaStorePersister.find(table, "dbname", "table", 100);
assertEquals(3, tables.size());
}
use of org.apache.hadoop.hive.metastore.api.Database in project brisk by riptano.
the class MetaStorePersisterTest method testEntityDeletion.
@Test
public void testEntityDeletion() throws Exception {
setupClient();
Database database = new Database();
database.setName("dbname");
database.setDescription("description");
database.setLocationUri("uri");
database.setParameters(new HashMap<String, String>());
metaStorePersister.save(database.metaDataMap, database, database.getName());
Table table = new Table();
table.setDbName("dbname");
table.setTableName("table_one");
metaStorePersister.save(table.metaDataMap, table, table.getDbName());
Database foundDb = new Database();
foundDb.setName("dbname");
metaStorePersister.load(foundDb, "dbname");
assertEquals(database, foundDb);
Table foundTable = new Table();
foundTable.setDbName(table.getDbName());
foundTable.setTableName(table.getTableName());
metaStorePersister.load(foundTable, "dbname");
assertEquals(table, foundTable);
metaStorePersister.remove(foundTable, "dbname");
metaStorePersister.remove(foundDb, "dbname");
try {
metaStorePersister.load(foundTable, "dbname");
fail();
metaStorePersister.load(foundDb, "dbname");
fail();
} catch (NotFoundException e) {
// win! \o/
}
}
use of org.apache.hadoop.hive.metastore.api.Database in project metacat by Netflix.
the class HiveConvertersImpl method metacatToHiveDatabase.
/**
* {@inheritDoc}
*/
@Override
@SuppressWarnings("unchecked")
public Database metacatToHiveDatabase(final DatabaseDto dto) {
final Database database = new Database();
String name = "";
String description = "";
final QualifiedName databaseName = dto.getName();
if (databaseName != null) {
name = databaseName.getDatabaseName();
// Since this is required setting it to the same as the DB name for now
description = databaseName.getDatabaseName();
}
database.setName(name);
database.setDescription(description);
String dbUri = dto.getUri();
if (Strings.isNullOrEmpty(dbUri)) {
dbUri = "";
}
database.setLocationUri(dbUri);
Map<String, String> metadata = dto.getMetadata();
if (metadata == null) {
metadata = Collections.EMPTY_MAP;
}
database.setParameters(metadata);
return database;
}
use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.
the class DDLSemanticAnalyzer method analyzeSwitchDatabase.
private void analyzeSwitchDatabase(ASTNode ast) throws SemanticException {
String dbName = unescapeIdentifier(ast.getChild(0).getText());
Database database = getDatabase(dbName, true);
ReadEntity dbReadEntity = new ReadEntity(database);
dbReadEntity.noLockNeeded();
inputs.add(dbReadEntity);
SwitchDatabaseDesc switchDatabaseDesc = new SwitchDatabaseDesc(dbName);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), switchDatabaseDesc)));
}
use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.
the class DDLSemanticAnalyzer method analyzeCreateDatabase.
private void analyzeCreateDatabase(ASTNode ast) throws SemanticException {
String dbName = unescapeIdentifier(ast.getChild(0).getText());
boolean ifNotExists = false;
String dbComment = null;
String dbLocation = null;
Map<String, String> dbProps = null;
for (int i = 1; i < ast.getChildCount(); i++) {
ASTNode childNode = (ASTNode) ast.getChild(i);
switch(childNode.getToken().getType()) {
case HiveParser.TOK_IFNOTEXISTS:
ifNotExists = true;
break;
case HiveParser.TOK_DATABASECOMMENT:
dbComment = unescapeSQLString(childNode.getChild(0).getText());
break;
case TOK_DATABASEPROPERTIES:
dbProps = DDLSemanticAnalyzer.getProps((ASTNode) childNode.getChild(0));
break;
case TOK_DATABASELOCATION:
dbLocation = unescapeSQLString(childNode.getChild(0).getText());
addLocationToOutputs(dbLocation);
break;
default:
throw new SemanticException("Unrecognized token in CREATE DATABASE statement");
}
}
CreateDatabaseDesc createDatabaseDesc = new CreateDatabaseDesc(dbName, dbComment, dbLocation, ifNotExists);
if (dbProps != null) {
createDatabaseDesc.setDatabaseProperties(dbProps);
}
Database database = new Database(dbName, dbComment, dbLocation, dbProps);
outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK));
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), createDatabaseDesc)));
}
Aggregations