use of org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeObjectDesc in project hive by apache.
the class HiveAuthorizationTaskFactoryImpl method createRevokeTask.
@Override
public Task<?> createRevokeTask(ASTNode ast, Set<ReadEntity> inputs, Set<WriteEntity> outputs) throws SemanticException {
List<PrivilegeDesc> privilegeDesc = analyzePrivilegeListDef((ASTNode) ast.getChild(0));
List<PrincipalDesc> principalDesc = AuthorizationParseUtils.analyzePrincipalListDef((ASTNode) ast.getChild(1));
PrivilegeObjectDesc hiveObj = null;
boolean grantOption = false;
if (ast.getChildCount() > 2) {
ASTNode astChild = (ASTNode) ast.getChild(2);
hiveObj = analyzePrivilegeObject(astChild, outputs);
if (null != ast.getFirstChildWithType(HiveParser.TOK_GRANT_OPTION_FOR)) {
grantOption = true;
}
}
RevokeDesc revokeDesc = new RevokeDesc(privilegeDesc, principalDesc, hiveObj, grantOption);
return TaskFactory.get(new DDLWork(inputs, outputs, revokeDesc));
}
use of org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeObjectDesc in project hive by apache.
the class HiveAuthorizationTaskFactoryImpl method createShowGrantTask.
@Override
public Task<?> createShowGrantTask(ASTNode ast, Path resultFile, Set<ReadEntity> inputs, Set<WriteEntity> outputs) throws SemanticException {
PrincipalDesc principalDesc = null;
PrivilegeObjectDesc privHiveObj = null;
ASTNode param = null;
if (ast.getChildCount() > 0) {
param = (ASTNode) ast.getChild(0);
principalDesc = AuthorizationParseUtils.getPrincipalDesc(param);
if (principalDesc != null) {
// shift one
param = (ASTNode) ast.getChild(1);
}
}
if (param != null) {
if (param.getType() == HiveParser.TOK_RESOURCE_ALL) {
privHiveObj = new PrivilegeObjectDesc(true, null, null, null);
} else if (param.getType() == HiveParser.TOK_PRIV_OBJECT_COL) {
privHiveObj = parsePrivObject(param);
}
}
ShowGrantDesc showGrant = new ShowGrantDesc(resultFile.toString(), principalDesc, privHiveObj);
return TaskFactory.get(new DDLWork(inputs, outputs, showGrant));
}
use of org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeObjectDesc in project hive by apache.
the class HiveAuthorizationTaskFactoryImpl method analyzePrivilegeObject.
private PrivilegeObjectDesc analyzePrivilegeObject(ASTNode ast, Set<WriteEntity> outputs) throws SemanticException {
PrivilegeObjectDesc subject = parsePrivObject(ast);
if (subject.getTable()) {
Table tbl = getTable(subject.getObject());
if (subject.getPartSpec() != null) {
Partition part = getPartition(tbl, subject.getPartSpec());
outputs.add(new WriteEntity(part, WriteEntity.WriteType.DDL_NO_LOCK));
} else {
outputs.add(new WriteEntity(tbl, WriteEntity.WriteType.DDL_NO_LOCK));
}
}
return subject;
}
use of org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeObjectDesc in project hive by apache.
the class HiveAuthorizationTaskFactoryImpl method parsePrivObject.
protected PrivilegeObjectDesc parsePrivObject(ASTNode ast) throws SemanticException {
boolean isTable;
String object = null;
Map<String, String> partSpec = null;
List<String> columns = null;
ASTNode child = (ASTNode) ast.getChild(0);
ASTNode gchild = (ASTNode) child.getChild(0);
if (child.getType() == HiveParser.TOK_TABLE_TYPE) {
isTable = true;
object = BaseSemanticAnalyzer.getQualifiedTableName(gchild).getNotEmptyDbTable();
} else if (child.getType() == HiveParser.TOK_URI_TYPE || child.getType() == HiveParser.TOK_SERVER_TYPE) {
throw new SemanticException("Hive authorization does not support the URI or SERVER objects");
} else {
isTable = false;
object = BaseSemanticAnalyzer.unescapeIdentifier(gchild.getText());
}
// if partition spec node is present, set partition spec
for (int i = 1; i < child.getChildCount(); i++) {
gchild = (ASTNode) child.getChild(i);
if (gchild.getType() == HiveParser.TOK_PARTSPEC) {
partSpec = BaseSemanticAnalyzer.getPartSpec(gchild);
} else if (gchild.getType() == HiveParser.TOK_TABCOLNAME) {
columns = BaseSemanticAnalyzer.getColumnNames(gchild);
}
}
return new PrivilegeObjectDesc(isTable, object, partSpec, columns);
}
use of org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeObjectDesc in project hive by apache.
the class HiveAuthorizationTaskFactoryImpl method createGrantTask.
@Override
public Task<?> createGrantTask(ASTNode ast, Set<ReadEntity> inputs, Set<WriteEntity> outputs) throws SemanticException {
List<PrivilegeDesc> privilegeDesc = analyzePrivilegeListDef((ASTNode) ast.getChild(0));
List<PrincipalDesc> principalDesc = AuthorizationParseUtils.analyzePrincipalListDef((ASTNode) ast.getChild(1));
boolean grantOption = false;
PrivilegeObjectDesc privilegeObj = null;
if (ast.getChildCount() > 2) {
for (int i = 2; i < ast.getChildCount(); i++) {
ASTNode astChild = (ASTNode) ast.getChild(i);
if (astChild.getType() == HiveParser.TOK_GRANT_WITH_OPTION) {
grantOption = true;
} else if (astChild.getType() == HiveParser.TOK_PRIV_OBJECT) {
privilegeObj = analyzePrivilegeObject(astChild, outputs);
}
}
}
String userName = SessionState.getUserFromAuthenticator();
GrantDesc grantDesc = new GrantDesc(privilegeObj, privilegeDesc, principalDesc, userName, PrincipalType.USER, grantOption);
return TaskFactory.get(new DDLWork(inputs, outputs, grantDesc));
}
Aggregations