use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.
the class TestStorageBasedMetastoreAuthorizationDrops method testDropView.
/**
* Drop view should not be blocked by SBA. View will not have any location to drop.
* @throws Exception
*/
@Test
public void testDropView() throws Exception {
String dbName = getTestDbName();
String tblName = getTestTableName();
String viewName = "view" + tblName;
setPermissions(clientHiveConf.getVar(ConfVars.METASTOREWAREHOUSE), "-rwxrwxrwx");
CommandProcessorResponse resp = driver.run("create database " + dbName);
Assert.assertEquals(0, resp.getResponseCode());
Database db = msc.getDatabase(dbName);
validateCreateDb(db, dbName);
setPermissions(db.getLocationUri(), "-rwxrwxrwt");
String dbDotTable = dbName + "." + tblName;
resp = driver.run("create table " + dbDotTable + "(i int)");
Assert.assertEquals(0, resp.getResponseCode());
String dbDotView = dbName + "." + viewName;
resp = driver.run("create view " + dbDotView + " as select * from " + dbDotTable);
Assert.assertEquals(0, resp.getResponseCode());
resp = driver.run("drop view " + dbDotView);
Assert.assertEquals(0, resp.getResponseCode());
resp = driver.run("drop table " + dbDotTable);
Assert.assertEquals(0, resp.getResponseCode());
}
use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.
the class TestStorageBasedMetastoreAuthorizationReads method readTableByOtherUser.
/**
* @param perm dir permission for table dir
* @param isSuccess if command was successful
* @throws Exception
*/
private void readTableByOtherUser(String perm, boolean isSuccess) throws Exception {
String dbName = getTestDbName();
String tblName = getTestTableName();
setPermissions(clientHiveConf.getVar(ConfVars.METASTOREWAREHOUSE), "-rwxrwxrwx");
CommandProcessorResponse resp = driver.run("create database " + dbName);
Assert.assertEquals(0, resp.getResponseCode());
Database db = msc.getDatabase(dbName);
validateCreateDb(db, dbName);
setPermissions(db.getLocationUri(), "-rwxrwxrwx");
String dbDotTable = dbName + "." + tblName;
resp = driver.run("create table " + dbDotTable + "(i int) partitioned by (`date` string)");
Assert.assertEquals(0, resp.getResponseCode());
Table tab = msc.getTable(dbName, tblName);
setPermissions(tab.getSd().getLocation(), perm);
InjectableDummyAuthenticator.injectMode(true);
testCmd(driver, "DESCRIBE " + dbDotTable, isSuccess);
testCmd(driver, "DESCRIBE EXTENDED " + dbDotTable, isSuccess);
testCmd(driver, "SHOW PARTITIONS " + dbDotTable, isSuccess);
testCmd(driver, "SHOW COLUMNS IN " + tblName + " IN " + dbName, isSuccess);
testCmd(driver, "use " + dbName, true);
testCmd(driver, "SHOW TABLE EXTENDED LIKE " + tblName, isSuccess);
}
use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.
the class TestStorageBasedMetastoreAuthorizationReads method readDbByOtherUser.
/**
* @param perm dir permission for database dir
* @param isSuccess if command was successful
* @throws Exception
*/
private void readDbByOtherUser(String perm, boolean isSuccess) throws Exception {
String dbName = getTestDbName();
setPermissions(clientHiveConf.getVar(ConfVars.METASTOREWAREHOUSE), perm);
CommandProcessorResponse resp = driver.run("create database " + dbName);
Assert.assertEquals(0, resp.getResponseCode());
Database db = msc.getDatabase(dbName);
validateCreateDb(db, dbName);
setPermissions(db.getLocationUri(), perm);
InjectableDummyAuthenticator.injectMode(true);
testCmd(driver, "DESCRIBE DATABASE " + dbName, isSuccess);
testCmd(driver, "DESCRIBE DATABASE EXTENDED " + dbName, isSuccess);
testCmd(driver, "SHOW TABLES IN " + dbName, isSuccess);
driver.run("use " + dbName);
testCmd(driver, "SHOW TABLES ", isSuccess);
}
use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.
the class HBaseImport method copyIndexes.
private void copyIndexes() throws MetaException, InvalidObjectException, InterruptedException {
screen("Copying indexes");
// Start the parallel threads that will copy the indexes
Thread[] copiers = new Thread[parallel];
writingToQueue = true;
for (int i = 0; i < parallel; i++) {
copiers[i] = new IndexCopier();
copiers[i].start();
}
// Put indexes from the databases we copied into the queue
for (Database db : dbs) {
screen("Coyping indexes in database " + db.getName());
for (String tableName : rdbmsStore.get().getAllTables(db.getName())) {
for (Index index : rdbmsStore.get().getIndexes(db.getName(), tableName, -1)) {
indexNameQueue.put(new String[] { db.getName(), tableName, index.getIndexName() });
}
}
}
// Now put any specifically requested tables into the queue
if (tablesToImport != null) {
for (String compoundTableName : tablesToImport) {
String[] tn = compoundTableName.split("\\.");
if (tn.length != 2) {
error(compoundTableName + " not in proper form. Must be in form dbname.tablename. " + "Ignoring this table and continuing.");
} else {
for (Index index : rdbmsStore.get().getIndexes(tn[0], tn[1], -1)) {
indexNameQueue.put(new String[] { tn[0], tn[1], index.getIndexName() });
}
}
}
}
writingToQueue = false;
// Wait until we've finished adding all the tables
for (Thread copier : copiers) copier.join();
}
use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.
the class HBaseImport method copyDbs.
private void copyDbs() throws MetaException, NoSuchObjectException, InvalidObjectException {
screen("Copying databases");
List<String> toCopy = doAll ? rdbmsStore.get().getAllDatabases() : dbsToImport;
for (String dbName : toCopy) {
Database db = rdbmsStore.get().getDatabase(dbName);
dbs.add(db);
screen("Copying database " + dbName);
hbaseStore.get().createDatabase(db);
}
}
Aggregations