use of org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc in project hive by apache.
the class HiveAuthorizationTaskFactoryImpl method createShowGrantTask.
@Override
public Task<? extends Serializable> createShowGrantTask(ASTNode ast, Path resultFile, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException {
PrincipalDesc principalDesc = null;
PrivilegeObjectDesc privHiveObj = null;
ASTNode param = null;
if (ast.getChildCount() > 0) {
param = (ASTNode) ast.getChild(0);
principalDesc = AuthorizationParseUtils.getPrincipalDesc(param);
if (principalDesc != null) {
// shift one
param = (ASTNode) ast.getChild(1);
}
}
if (param != null) {
if (param.getType() == HiveParser.TOK_RESOURCE_ALL) {
privHiveObj = new PrivilegeObjectDesc();
} else if (param.getType() == HiveParser.TOK_PRIV_OBJECT_COL) {
privHiveObj = parsePrivObject(param);
}
}
ShowGrantDesc showGrant = new ShowGrantDesc(resultFile.toString(), principalDesc, privHiveObj);
return TaskFactory.get(new DDLWork(inputs, outputs, showGrant), conf);
}
use of org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc in project hive by apache.
the class HiveAuthorizationTaskFactoryImpl method createRevokeTask.
@Override
public Task<? extends Serializable> createRevokeTask(ASTNode ast, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException {
List<PrivilegeDesc> privilegeDesc = analyzePrivilegeListDef((ASTNode) ast.getChild(0));
List<PrincipalDesc> principalDesc = AuthorizationParseUtils.analyzePrincipalListDef((ASTNode) ast.getChild(1));
PrivilegeObjectDesc hiveObj = null;
boolean grantOption = false;
if (ast.getChildCount() > 2) {
ASTNode astChild = (ASTNode) ast.getChild(2);
hiveObj = analyzePrivilegeObject(astChild, outputs);
if (null != ast.getFirstChildWithType(HiveParser.TOK_GRANT_OPTION_FOR)) {
grantOption = true;
}
}
RevokeDesc revokeDesc = new RevokeDesc(privilegeDesc, principalDesc, hiveObj, grantOption);
return TaskFactory.get(new DDLWork(inputs, outputs, revokeDesc), conf);
}
use of org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc in project hive by apache.
the class HiveAuthorizationTaskFactoryImpl method analyzePrivilegeObject.
private PrivilegeObjectDesc analyzePrivilegeObject(ASTNode ast, HashSet<WriteEntity> outputs) throws SemanticException {
PrivilegeObjectDesc subject = parsePrivObject(ast);
if (subject.getTable()) {
Table tbl = getTable(subject.getObject());
if (subject.getPartSpec() != null) {
Partition part = getPartition(tbl, subject.getPartSpec());
outputs.add(new WriteEntity(part, WriteEntity.WriteType.DDL_NO_LOCK));
} else {
outputs.add(new WriteEntity(tbl, WriteEntity.WriteType.DDL_NO_LOCK));
}
}
return subject;
}
use of org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc in project hive by apache.
the class HiveAuthorizationTaskFactoryImpl method parsePrivObject.
protected PrivilegeObjectDesc parsePrivObject(ASTNode ast) throws SemanticException {
PrivilegeObjectDesc subject = new PrivilegeObjectDesc();
ASTNode child = (ASTNode) ast.getChild(0);
ASTNode gchild = (ASTNode) child.getChild(0);
if (child.getType() == HiveParser.TOK_TABLE_TYPE) {
subject.setTable(true);
String[] qualified = BaseSemanticAnalyzer.getQualifiedTableName(gchild);
subject.setObject(BaseSemanticAnalyzer.getDotName(qualified));
} else if (child.getType() == HiveParser.TOK_URI_TYPE || child.getType() == HiveParser.TOK_SERVER_TYPE) {
throw new SemanticException("Hive authorization does not support the URI or SERVER objects");
} else {
subject.setTable(false);
subject.setObject(BaseSemanticAnalyzer.unescapeIdentifier(gchild.getText()));
}
//if partition spec node is present, set partition spec
for (int i = 1; i < child.getChildCount(); i++) {
gchild = (ASTNode) child.getChild(i);
if (gchild.getType() == HiveParser.TOK_PARTSPEC) {
subject.setPartSpec(DDLSemanticAnalyzer.getPartSpec(gchild));
} else if (gchild.getType() == HiveParser.TOK_TABCOLNAME) {
subject.setColumns(BaseSemanticAnalyzer.getColumnNames(gchild));
}
}
return subject;
}
use of org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc in project hive by apache.
the class HiveAuthorizationTaskFactoryImpl method createGrantTask.
@Override
public Task<? extends Serializable> createGrantTask(ASTNode ast, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException {
List<PrivilegeDesc> privilegeDesc = analyzePrivilegeListDef((ASTNode) ast.getChild(0));
List<PrincipalDesc> principalDesc = AuthorizationParseUtils.analyzePrincipalListDef((ASTNode) ast.getChild(1));
boolean grantOption = false;
PrivilegeObjectDesc privilegeObj = null;
if (ast.getChildCount() > 2) {
for (int i = 2; i < ast.getChildCount(); i++) {
ASTNode astChild = (ASTNode) ast.getChild(i);
if (astChild.getType() == HiveParser.TOK_GRANT_WITH_OPTION) {
grantOption = true;
} else if (astChild.getType() == HiveParser.TOK_PRIV_OBJECT) {
privilegeObj = analyzePrivilegeObject(astChild, outputs);
}
}
}
String userName = SessionState.getUserFromAuthenticator();
GrantDesc grantDesc = new GrantDesc(privilegeObj, privilegeDesc, principalDesc, userName, PrincipalType.USER, grantOption);
return TaskFactory.get(new DDLWork(inputs, outputs, grantDesc), conf);
}
Aggregations