use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class TestHiveAuthorizationTaskFactory method testShowGrantRoleOnTable.
/**
* SHOW GRANT ROLE ... ON TABLE ...
*/
@Test
public void testShowGrantRoleOnTable() throws Exception {
DDLWork work = analyze("SHOW GRANT ROLE " + ROLE + " ON TABLE " + TABLE);
ShowGrantDesc grantDesc = work.getShowGrantDesc();
Assert.assertNotNull("Show grant should not be null", grantDesc);
Assert.assertEquals(PrincipalType.ROLE, grantDesc.getPrincipalDesc().getType());
Assert.assertEquals(ROLE, grantDesc.getPrincipalDesc().getName());
Assert.assertTrue("Expected table", grantDesc.getHiveObj().getTable());
Assert.assertEquals(TABLE_QNAME, grantDesc.getHiveObj().getObject());
Assert.assertTrue("Expected table", grantDesc.getHiveObj().getTable());
}
use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class TestHiveAuthorizationTaskFactory method testGrantUserTable.
/**
* GRANT ... ON TABLE ... TO USER ...
*/
@Test
public void testGrantUserTable() throws Exception {
DDLWork work = analyze("GRANT " + SELECT + " ON TABLE " + TABLE + " TO USER " + USER);
GrantDesc grantDesc = work.getGrantDesc();
Assert.assertNotNull("Grant should not be null", grantDesc);
for (PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) {
Assert.assertEquals(PrincipalType.USER, principal.getType());
Assert.assertEquals(USER, principal.getName());
}
for (PrivilegeDesc privilege : ListSizeMatcher.inList(grantDesc.getPrivileges()).ofSize(1)) {
Assert.assertEquals(Privilege.SELECT, privilege.getPrivilege());
}
Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubjectDesc().getTable());
Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubjectDesc().getObject());
}
use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class DDLSemanticAnalyzer method analyzeShowIndexes.
private void analyzeShowIndexes(ASTNode ast) throws SemanticException {
ShowIndexesDesc showIndexesDesc;
String tableName = getUnescapedName((ASTNode) ast.getChild(0));
showIndexesDesc = new ShowIndexesDesc(tableName, ctx.getResFile());
if (ast.getChildCount() == 2) {
int descOptions = ast.getChild(1).getType();
showIndexesDesc.setFormatted(descOptions == HiveParser.KW_FORMATTED);
}
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showIndexesDesc), conf));
setFetchTask(createFetchTask(showIndexesDesc.getSchema()));
}
use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class DDLSemanticAnalyzer method analyzeAlterIndexRebuild.
private void analyzeAlterIndexRebuild(ASTNode ast) throws SemanticException {
String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0));
String indexName = unescapeIdentifier(ast.getChild(1).getText());
HashMap<String, String> partSpec = null;
Tree part = ast.getChild(2);
if (part != null) {
partSpec = getValidatedPartSpec(getTable(qualified), (ASTNode) part, conf, false);
}
List<Task<?>> indexBuilder = getIndexBuilderMapRed(qualified, indexName, partSpec);
rootTasks.addAll(indexBuilder);
// Handle updating index timestamps
AlterIndexDesc alterIdxDesc = new AlterIndexDesc(AlterIndexTypes.UPDATETIMESTAMP);
alterIdxDesc.setIndexName(indexName);
alterIdxDesc.setBaseTableName(getDotName(qualified));
alterIdxDesc.setSpec(partSpec);
Task<?> tsTask = TaskFactory.get(new DDLWork(alterIdxDesc), conf);
for (Task<?> t : indexBuilder) {
t.addDependentTask(tsTask);
}
}
use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class DDLSemanticAnalyzer method analyzeDropIndex.
private void analyzeDropIndex(ASTNode ast) throws SemanticException {
String indexName = unescapeIdentifier(ast.getChild(0).getText());
String tableName = getUnescapedName((ASTNode) ast.getChild(1));
boolean ifExists = (ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null);
// we want to signal an error if the index doesn't exist and we're
// configured not to ignore this
boolean throwException = !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT);
Table tbl = getTable(tableName, false);
if (throwException && tbl == null) {
throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName));
}
try {
Index idx = db.getIndex(tableName, indexName);
} catch (HiveException e) {
if (!(e.getCause() instanceof NoSuchObjectException)) {
throw new SemanticException(ErrorMsg.CANNOT_DROP_INDEX.getMsg("dropping index"), e);
}
if (throwException) {
throw new SemanticException(ErrorMsg.INVALID_INDEX.getMsg(indexName));
}
}
if (tbl != null) {
inputs.add(new ReadEntity(tbl));
}
DropIndexDesc dropIdxDesc = new DropIndexDesc(indexName, tableName, throwException);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropIdxDesc), conf));
}
Aggregations