use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project hive by apache.
the class SemanticAnalyzer method walkASTMarkTABREF.
private void walkASTMarkTABREF(ASTNode ast, Set<String> cteAlias) throws SemanticException {
Queue<Node> queue = new LinkedList<>();
queue.add(ast);
Map<HivePrivilegeObject, MaskAndFilterInfo> basicInfos = new LinkedHashMap<>();
while (!queue.isEmpty()) {
ASTNode astNode = (ASTNode) queue.poll();
if (astNode.getToken().getType() == HiveParser.TOK_TABREF) {
int aliasIndex = 0;
StringBuffer additionalTabInfo = new StringBuffer();
for (int index = 1; index < astNode.getChildCount(); index++) {
ASTNode ct = (ASTNode) astNode.getChild(index);
if (ct.getToken().getType() == HiveParser.TOK_TABLEBUCKETSAMPLE || ct.getToken().getType() == HiveParser.TOK_TABLESPLITSAMPLE || ct.getToken().getType() == HiveParser.TOK_TABLEPROPERTIES) {
additionalTabInfo.append(ctx.getTokenRewriteStream().toString(ct.getTokenStartIndex(), ct.getTokenStopIndex()));
} else {
aliasIndex = index;
}
}
ASTNode tableTree = (ASTNode) (astNode.getChild(0));
String tabIdName = getUnescapedName(tableTree);
String alias;
if (aliasIndex != 0) {
alias = unescapeIdentifier(astNode.getChild(aliasIndex).getText());
} else {
alias = getUnescapedUnqualifiedTableName(tableTree);
}
// select * from TAB2 [no masking]
if (cteAlias.contains(tabIdName)) {
continue;
}
String replacementText = null;
Table table = null;
try {
table = getTableObjectByName(tabIdName);
} catch (HiveException e) {
// Table may not be found when materialization of CTE is on.
LOG.info("Table " + tabIdName + " is not found in walkASTMarkTABREF.");
continue;
}
List<String> colNames = new ArrayList<>();
List<String> colTypes = new ArrayList<>();
for (FieldSchema col : table.getAllCols()) {
colNames.add(col.getName());
colTypes.add(col.getType());
}
basicInfos.put(new HivePrivilegeObject(table.getDbName(), table.getTableName(), colNames), new MaskAndFilterInfo(colTypes, additionalTabInfo.toString(), alias, astNode, table.isView()));
}
if (astNode.getChildCount() > 0 && !ignoredTokens.contains(astNode.getToken().getType())) {
for (Node child : astNode.getChildren()) {
queue.offer(child);
}
}
}
List<HivePrivilegeObject> basicPrivObjs = new ArrayList<>();
basicPrivObjs.addAll(basicInfos.keySet());
List<HivePrivilegeObject> needRewritePrivObjs = tableMask.applyRowFilterAndColumnMasking(basicPrivObjs);
if (needRewritePrivObjs != null && !needRewritePrivObjs.isEmpty()) {
for (HivePrivilegeObject privObj : needRewritePrivObjs) {
MaskAndFilterInfo info = basicInfos.get(privObj);
String replacementText = tableMask.create(privObj, info);
if (replacementText != null) {
// We don't support masking/filtering against ACID query at the moment
if (ctx.getIsUpdateDeleteMerge()) {
throw new SemanticException(ErrorMsg.MASKING_FILTERING_ON_ACID_NOT_SUPPORTED, privObj.getDbname(), privObj.getObjectName());
}
tableMask.setNeedsRewrite(true);
tableMask.addTranslation(info.astNode, replacementText);
}
}
}
}
use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project drill by apache.
the class HiveAuthorizationHelper method authorizeReadTable.
/**
* Check authorization for "READ TABLE" for given db.table. A {@link HiveAccessControlException} is thrown
* for illegal access.
* @param dbName
* @param tableName
*/
public void authorizeReadTable(final String dbName, final String tableName) throws HiveAccessControlException {
if (!authzEnabled) {
return;
}
HivePrivilegeObject toRead = new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, dbName, tableName);
authorize(HiveOperationType.QUERY, ImmutableList.of(toRead), Collections.<HivePrivilegeObject>emptyList(), "READ TABLE");
}
use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project hive by apache.
the class SemanticAnalyzer method walkASTMarkTABREF.
private static void walkASTMarkTABREF(TableMask tableMask, ASTNode ast, Set<String> cteAlias, Context ctx, Hive db, Map<String, Table> tabNameToTabObject, Set<Integer> ignoredTokens) throws SemanticException {
Queue<Node> queue = new LinkedList<>();
queue.add(ast);
Map<HivePrivilegeObject, MaskAndFilterInfo> basicInfos = new LinkedHashMap<>();
while (!queue.isEmpty()) {
ASTNode astNode = (ASTNode) queue.poll();
if (astNode.getToken().getType() == HiveParser.TOK_TABREF) {
int aliasIndex = 0;
StringBuilder additionalTabInfo = new StringBuilder();
for (int index = 1; index < astNode.getChildCount(); index++) {
ASTNode ct = (ASTNode) astNode.getChild(index);
if (ct.getToken().getType() == HiveParser.TOK_TABLEBUCKETSAMPLE || ct.getToken().getType() == HiveParser.TOK_TABLESPLITSAMPLE || ct.getToken().getType() == HiveParser.TOK_TABLEPROPERTIES) {
additionalTabInfo.append(ctx.getTokenRewriteStream().toString(ct.getTokenStartIndex(), ct.getTokenStopIndex()));
} else {
aliasIndex = index;
}
}
ASTNode tableTree = (ASTNode) (astNode.getChild(0));
String tabIdName = getUnescapedName(tableTree);
String alias;
if (aliasIndex != 0) {
alias = unescapeIdentifier(astNode.getChild(aliasIndex).getText());
} else {
alias = getUnescapedUnqualifiedTableName(tableTree);
}
// select * from TAB2 [no masking]
if (cteAlias.contains(tabIdName)) {
continue;
}
String replacementText = null;
Table table = null;
try {
if (!tabNameToTabObject.containsKey(tabIdName)) {
table = db.getTable(tabIdName, true);
tabNameToTabObject.put(tabIdName, table);
} else {
table = tabNameToTabObject.get(tabIdName);
}
} catch (HiveException e) {
// Table may not be found when materialization of CTE is on.
STATIC_LOG.debug("Table " + tabIdName + " is not found in walkASTMarkTABREF.");
continue;
}
List<String> colNames = new ArrayList<>();
List<String> colTypes = new ArrayList<>();
for (FieldSchema col : table.getAllCols()) {
colNames.add(col.getName());
colTypes.add(col.getType());
}
basicInfos.put(new HivePrivilegeObject(table.getDbName(), table.getTableName(), colNames), new MaskAndFilterInfo(colTypes, additionalTabInfo.toString(), alias, astNode, table.isView()));
}
if (astNode.getChildCount() > 0 && !ignoredTokens.contains(astNode.getToken().getType())) {
for (Node child : astNode.getChildren()) {
queue.offer(child);
}
}
}
List<HivePrivilegeObject> basicPrivObjs = new ArrayList<>();
basicPrivObjs.addAll(basicInfos.keySet());
List<HivePrivilegeObject> needRewritePrivObjs = tableMask.applyRowFilterAndColumnMasking(basicPrivObjs);
if (needRewritePrivObjs != null && !needRewritePrivObjs.isEmpty()) {
for (HivePrivilegeObject privObj : needRewritePrivObjs) {
MaskAndFilterInfo info = basicInfos.get(privObj);
String replacementText = tableMask.create(privObj, info);
if (replacementText != null) {
// We don't support masking/filtering against ACID query at the moment
if (ctx.getIsUpdateDeleteMerge()) {
throw new SemanticException(ErrorMsg.MASKING_FILTERING_ON_ACID_NOT_SUPPORTED, privObj.getDbname(), privObj.getObjectName());
}
tableMask.setNeedsRewrite(true);
tableMask.addTranslation(info.astNode, replacementText);
}
}
}
}
use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project drill by axbaretto.
the class HiveAuthorizationHelper method authorizeShowTables.
/**
* Check authorization for "SHOW TABLES" command in given Hive db. A {@link HiveAccessControlException} is thrown
* for illegal access.
* @param dbName
*/
public void authorizeShowTables(final String dbName) throws HiveAccessControlException {
if (!authzEnabled) {
return;
}
final HivePrivilegeObject toRead = new HivePrivilegeObject(HivePrivilegeObjectType.DATABASE, dbName, null);
authorize(HiveOperationType.SHOWTABLES, ImmutableList.of(toRead), Collections.<HivePrivilegeObject>emptyList(), "SHOW TABLES");
}
use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project hive by apache.
the class TestHDFSPermissionPolicyProvider method testPolicyProvider.
@Test
public void testPolicyProvider() throws Exception {
HDFSPermissionPolicyProvider policyProvider = new HDFSPermissionPolicyProvider(conf);
FileSystem fs = FileSystem.get(conf);
fs.setOwner(new Path(defaultTbl1Loc), "user1", "group1");
fs.setOwner(new Path(defaultTbl2Loc), "user1", "group1");
fs.setOwner(new Path(db1Loc), "user1", "group1");
fs.setOwner(new Path(db1Tbl1Loc), "user1", "group1");
// r--r--r--
fs.setPermission(new Path(defaultTbl1Loc), new FsPermission("444"));
HiveResourceACLs acls = policyProvider.getResourceACLs(new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, "default", "tbl1"));
assertEquals(acls.getUserPermissions().size(), 1);
assertTrue(acls.getUserPermissions().keySet().contains("user1"));
assertEquals(acls.getGroupPermissions().size(), 2);
assertTrue(acls.getGroupPermissions().keySet().contains("group1"));
assertTrue(acls.getGroupPermissions().keySet().contains("public"));
// r--r-----
fs.setPermission(new Path(defaultTbl1Loc), new FsPermission("440"));
acls = policyProvider.getResourceACLs(new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, "default", "tbl1"));
assertEquals(acls.getUserPermissions().size(), 1);
assertEquals(acls.getUserPermissions().keySet().iterator().next(), "user1");
assertEquals(acls.getGroupPermissions().size(), 1);
assertTrue(acls.getGroupPermissions().keySet().contains("group1"));
// r-----r--
fs.setPermission(new Path(defaultTbl1Loc), new FsPermission("404"));
acls = policyProvider.getResourceACLs(new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, "default", "tbl1"));
assertEquals(acls.getUserPermissions().size(), 1);
assertTrue(acls.getUserPermissions().keySet().contains("user1"));
assertEquals(acls.getGroupPermissions().size(), 1);
assertTrue(acls.getGroupPermissions().keySet().contains("public"));
// r--------
fs.setPermission(new Path(defaultTbl1Loc), new FsPermission("400"));
acls = policyProvider.getResourceACLs(new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, "default", "tbl1"));
assertEquals(acls.getUserPermissions().size(), 1);
assertTrue(acls.getUserPermissions().keySet().contains("user1"));
assertEquals(acls.getGroupPermissions().size(), 0);
// ------r--
fs.setPermission(new Path(defaultTbl1Loc), new FsPermission("004"));
// rwxrwxrwx
fs.setPermission(new Path(defaultTbl2Loc), new FsPermission("777"));
acls = policyProvider.getResourceACLs(new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, "default", "tbl1"));
assertEquals(acls.getUserPermissions().size(), 0);
assertEquals(acls.getGroupPermissions().size(), 1);
assertTrue(acls.getGroupPermissions().keySet().contains("public"));
acls = policyProvider.getResourceACLs(new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, "default", "tbl2"));
assertEquals(acls.getUserPermissions().size(), 1);
assertTrue(acls.getUserPermissions().keySet().contains("user1"));
assertEquals(acls.getGroupPermissions().size(), 2);
assertTrue(acls.getGroupPermissions().keySet().contains("group1"));
assertTrue(acls.getGroupPermissions().keySet().contains("public"));
// ------r--
fs.setPermission(new Path(db1Loc), new FsPermission("400"));
fs.delete(new Path(db1Tbl1Loc), true);
acls = policyProvider.getResourceACLs(new HivePrivilegeObject(HivePrivilegeObjectType.DATABASE, "db1", null));
assertEquals(acls.getUserPermissions().size(), 1);
assertTrue(acls.getUserPermissions().keySet().contains("user1"));
assertEquals(acls.getGroupPermissions().size(), 0);
acls = policyProvider.getResourceACLs(new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, "db1", "tbl1"));
assertEquals(acls.getUserPermissions().size(), 1);
assertTrue(acls.getUserPermissions().keySet().contains("user1"));
assertEquals(acls.getGroupPermissions().size(), 0);
}
Aggregations