use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project hive by apache.
the class ReadTableEvent method getInputHObjs.
private List<HivePrivilegeObject> getInputHObjs() {
LOG.debug("==> ReadTableEvent.getInputHObjs()");
List<HivePrivilegeObject> ret = new ArrayList<>();
PreReadTableEvent preReadTableEvent = (PreReadTableEvent) preEventContext;
String dbName = preReadTableEvent.getTable().getDbName();
Table table = preReadTableEvent.getTable();
ret.add(getHivePrivilegeObject(table));
LOG.debug("<== ReadTableEvent.getInputHObjs()" + ret);
return ret;
}
use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project hive by apache.
the class TableFilterContext method getInputHObjs.
private List<HivePrivilegeObject> getInputHObjs() {
LOG.debug("==> TableFilterContext.getOutputHObjs()");
List<HivePrivilegeObject> ret = new ArrayList<>();
if (tables != null) {
for (Table table : tables) {
HivePrivilegeObjectType type = HivePrivilegeObjectType.TABLE_OR_VIEW;
HivePrivObjectActionType objectActionType = HivePrivilegeObject.HivePrivObjectActionType.OTHER;
HivePrivilegeObject hivePrivilegeObject = new HivePrivilegeObject(type, table.getDbName(), table.getTableName(), null, null, objectActionType, null, null, table.getOwner(), table.getOwnerType());
ret.add(hivePrivilegeObject);
}
} else {
for (String tableName : tableNames) {
HivePrivilegeObjectType type = HivePrivilegeObjectType.TABLE_OR_VIEW;
HivePrivObjectActionType objectActionType = HivePrivilegeObject.HivePrivObjectActionType.OTHER;
HivePrivilegeObject hivePrivilegeObject = new HivePrivilegeObject(type, dbName, tableName, null, null, objectActionType, null, null);
ret.add(hivePrivilegeObject);
}
}
LOG.debug("<== TableFilterContext.getOutputHObjs(): ret=" + ret);
return ret;
}
use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project hive by apache.
the class DropTableEvent method getInputHObjs.
private List<HivePrivilegeObject> getInputHObjs() {
LOG.debug("==> DropTableEvent.getInputHObjs()");
List<HivePrivilegeObject> ret = new ArrayList<>();
PreDropTableEvent event = (PreDropTableEvent) preEventContext;
Table table = event.getTable();
ret.add(getHivePrivilegeObject(table));
COMMAND_STR = buildCommandString(COMMAND_STR, table);
LOG.debug("<== DropTableEvent.getInputHObjs(): ret={}", ret);
return ret;
}
use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project hive by apache.
the class ReadDatabaseEvent method getInputHObjs.
private List<HivePrivilegeObject> getInputHObjs() {
LOG.debug("==> ReadDatabaseEvent.getInputHObjs()");
List<HivePrivilegeObject> ret = new ArrayList<>();
PreReadDatabaseEvent preReadDatabaseEvent = (PreReadDatabaseEvent) preEventContext;
Database database = preReadDatabaseEvent.getDatabase();
if (database != null) {
ret.add(getHivePrivilegeObject(database));
COMMAND_STR = buildCommandString(COMMAND_STR, database);
LOG.debug("<== ReadDatabaseEvent.getInputHObjs(): ret=" + ret);
}
return ret;
}
use of org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject in project hive by apache.
the class SemanticAnalyzer method walkASTMarkTABREF.
private void walkASTMarkTABREF(TableMask tableMask, ASTNode ast, Set<String> cteAlias, Context ctx) throws SemanticException {
Queue<Node> queue = new LinkedList<>();
queue.add(ast);
Map<HivePrivilegeObject, MaskAndFilterInfo> basicInfos = new LinkedHashMap<>();
while (!queue.isEmpty()) {
ASTNode astNode = (ASTNode) queue.poll();
if (astNode.getToken().getType() == HiveParser.TOK_TABREF) {
int aliasIndex = 0;
StringBuilder additionalTabInfo = new StringBuilder();
for (int index = 1; index < astNode.getChildCount(); index++) {
ASTNode ct = (ASTNode) astNode.getChild(index);
if (ct.getToken().getType() == HiveParser.TOK_TABLEBUCKETSAMPLE || ct.getToken().getType() == HiveParser.TOK_TABLESPLITSAMPLE || ct.getToken().getType() == HiveParser.TOK_TABLEPROPERTIES) {
additionalTabInfo.append(ctx.getTokenRewriteStream().toString(ct.getTokenStartIndex(), ct.getTokenStopIndex()));
} else {
aliasIndex = index;
}
}
ASTNode tableTree = (ASTNode) (astNode.getChild(0));
String tabIdName = getUnescapedName(tableTree);
String alias;
if (aliasIndex != 0) {
alias = unescapeIdentifier(astNode.getChild(aliasIndex).getText());
} else {
alias = getUnescapedUnqualifiedTableName(tableTree);
}
// select * from TAB2 [no masking]
if (cteAlias.contains(tabIdName)) {
continue;
}
Table table = null;
try {
table = getTableObjectByName(tabIdName, false);
} catch (HiveException e) {
// This should not happen.
throw new SemanticException("Got exception though getTableObjectByName method should ignore it");
}
if (table == null) {
// Table may not be found when materialization of CTE is on.
STATIC_LOG.debug("Table " + tabIdName + " is not found in walkASTMarkTABREF.");
continue;
}
if (table.isMaterializedView()) {
// do not apply any policies.
for (SourceTable sourceTable : table.getMVMetadata().getSourceTables()) {
String qualifiedTableName = TableName.getDbTable(sourceTable.getTable().getDbName(), sourceTable.getTable().getTableName());
try {
table = getTableObjectByName(qualifiedTableName, true);
} catch (HiveException e) {
// This should not happen.
throw new SemanticException("Table " + qualifiedTableName + " not found when trying to obtain it to check masking/filtering policies");
}
List<String> colNames = new ArrayList<>();
extractColumnInfos(table, colNames, new ArrayList<>());
basicInfos.put(new HivePrivilegeObject(table.getDbName(), table.getTableName(), colNames), null);
}
} else {
List<String> colNames;
List<String> colTypes;
if (this.ctx.isCboSucceeded() && this.columnAccessInfo != null && (colNames = this.columnAccessInfo.getTableToColumnAllAccessMap().get(table.getCompleteName())) != null) {
Map<String, String> colNameToType = table.getAllCols().stream().collect(Collectors.toMap(FieldSchema::getName, FieldSchema::getType));
colTypes = colNames.stream().map(colNameToType::get).collect(Collectors.toList());
} else {
colNames = new ArrayList<>();
colTypes = new ArrayList<>();
extractColumnInfos(table, colNames, colTypes);
}
basicInfos.put(new HivePrivilegeObject(table.getDbName(), table.getTableName(), colNames), new MaskAndFilterInfo(colTypes, additionalTabInfo.toString(), alias, astNode, table.isView(), table.isNonNative()));
}
}
if (astNode.getChildCount() > 0 && !IGNORED_TOKENS.contains(astNode.getToken().getType())) {
for (Node child : astNode.getChildren()) {
queue.offer(child);
}
}
}
List<HivePrivilegeObject> basicPrivObjs = new ArrayList<>(basicInfos.keySet());
List<HivePrivilegeObject> needRewritePrivObjs = tableMask.applyRowFilterAndColumnMasking(basicPrivObjs);
if (needRewritePrivObjs != null && !needRewritePrivObjs.isEmpty()) {
for (HivePrivilegeObject privObj : needRewritePrivObjs) {
MaskAndFilterInfo info = basicInfos.get(privObj);
// First we check whether entity actually needs masking or filtering
if (tableMask.needsMaskingOrFiltering(privObj)) {
if (info == null) {
// when mask/filter should be applied on source tables
throw new SemanticException(ErrorMsg.MASKING_FILTERING_ON_MATERIALIZED_VIEWS_SOURCES, privObj.getDbname(), privObj.getObjectName());
} else {
String replacementText = tableMask.create(privObj, info);
// We don't support masking/filtering against ACID query at the moment
if (ctx.getIsUpdateDeleteMerge()) {
throw new SemanticException(ErrorMsg.MASKING_FILTERING_ON_ACID_NOT_SUPPORTED, privObj.getDbname(), privObj.getObjectName());
}
tableMask.setNeedsRewrite(true);
tableMask.addTranslation(info.astNode, replacementText);
}
}
}
}
}
Aggregations