use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.
the class TestHiveMetaStoreTimeout method testResetTimeout.
@Test
public void testResetTimeout() throws Exception {
HiveMetaStore.TEST_TIMEOUT_VALUE = 5 * 1000;
String dbName = "db";
// no timeout before reset
client.dropDatabase(dbName, true, true);
Database db = new Database();
db.setName(dbName);
try {
client.createDatabase(db);
} catch (MetaException e) {
Assert.fail("should not throw timeout exception: " + e.getMessage());
}
client.dropDatabase(dbName, true, true);
// reset
client.setMetaConf(HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT.varname, "3s");
// timeout after reset
try {
client.createDatabase(db);
Assert.fail("should throw timeout exception.");
} catch (MetaException e) {
Assert.assertTrue("unexpected MetaException", e.getMessage().contains("Timeout when " + "executing method: create_database"));
}
// restore
client.dropDatabase(dbName, true, true);
client.setMetaConf(HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT.varname, "10s");
}
use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.
the class DDLTask method switchDatabase.
/**
* Switch to a different Database
* @param db
* @param switchDb
* @return Always returns 0
* @throws HiveException
*/
private int switchDatabase(Hive db, SwitchDatabaseDesc switchDb) throws HiveException {
String dbName = switchDb.getDatabaseName();
if (!db.databaseExists(dbName)) {
throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, dbName);
}
SessionState.get().setCurrentDatabase(dbName);
// set database specific parameters
Database database = db.getDatabase(dbName);
assert (database != null);
Map<String, String> dbParams = database.getParameters();
if (dbParams != null) {
for (HiveConf.ConfVars var : HiveConf.dbVars) {
String newValue = dbParams.get(var.varname);
if (newValue != null) {
LOG.info("Changing " + var.varname + " from " + conf.getVar(var) + " to " + newValue);
conf.setVar(var, newValue);
}
}
}
return 0;
}
use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.
the class DDLTask method showCreateDatabase.
private int showCreateDatabase(Hive db, DataOutputStream outStream, String databaseName) throws Exception {
Database database = db.getDatabase(databaseName);
StringBuilder createDb_str = new StringBuilder();
createDb_str.append("CREATE DATABASE `").append(database.getName()).append("`\n");
if (database.getDescription() != null) {
createDb_str.append("COMMENT\n '");
createDb_str.append(HiveStringUtils.escapeHiveCommand(database.getDescription())).append("'\n");
}
createDb_str.append("LOCATION\n '");
createDb_str.append(database.getLocationUri()).append("'\n");
String propertiesToString = propertiesToString(database.getParameters(), null);
if (!propertiesToString.isEmpty()) {
createDb_str.append("WITH DBPROPERTIES (\n");
createDb_str.append(propertiesToString).append(")\n");
}
outStream.write(createDb_str.toString().getBytes("UTF-8"));
return 0;
}
use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.
the class ImportSemanticAnalyzer method fixLocationInPartSpec.
/**
* Helper method to set location properly in partSpec
*/
private static void fixLocationInPartSpec(FileSystem fs, ImportTableDesc tblDesc, Table table, Warehouse wh, ReplicationSpec replicationSpec, AddPartitionDesc.OnePartitionDesc partSpec, EximUtil.SemanticAnalyzerWrapperContext x) throws MetaException, HiveException, IOException {
Path tgtPath = null;
if (tblDesc.getLocation() == null) {
if (table.getDataLocation() != null) {
tgtPath = new Path(table.getDataLocation().toString(), Warehouse.makePartPath(partSpec.getPartSpec()));
} else {
Database parentDb = x.getHive().getDatabase(tblDesc.getDatabaseName());
tgtPath = new Path(wh.getTablePath(parentDb, tblDesc.getTableName()), Warehouse.makePartPath(partSpec.getPartSpec()));
}
} else {
tgtPath = new Path(tblDesc.getLocation(), Warehouse.makePartPath(partSpec.getPartSpec()));
}
FileSystem tgtFs = FileSystem.get(tgtPath.toUri(), x.getConf());
checkTargetLocationEmpty(tgtFs, tgtPath, replicationSpec, x);
partSpec.setLocation(tgtPath.toString());
}
use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.
the class DDLSemanticAnalyzer method analyzeSwitchDatabase.
private void analyzeSwitchDatabase(ASTNode ast) throws SemanticException {
String dbName = unescapeIdentifier(ast.getChild(0).getText());
Database database = getDatabase(dbName, true);
ReadEntity dbReadEntity = new ReadEntity(database);
dbReadEntity.noLockNeeded();
inputs.add(dbReadEntity);
SwitchDatabaseDesc switchDatabaseDesc = new SwitchDatabaseDesc(dbName);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), switchDatabaseDesc), conf));
}
Aggregations