use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.
the class Driver method getHivePrivObjects.
private static List<HivePrivilegeObject> getHivePrivObjects(Set<? extends Entity> privObjects, Map<String, List<String>> tableName2Cols) {
List<HivePrivilegeObject> hivePrivobjs = new ArrayList<HivePrivilegeObject>();
if (privObjects == null) {
return hivePrivobjs;
}
for (Entity privObject : privObjects) {
HivePrivilegeObjectType privObjType = AuthorizationUtils.getHivePrivilegeObjectType(privObject.getType());
if (privObject.isDummy()) {
// do not authorize dummy readEntity or writeEntity
continue;
}
if (privObject instanceof ReadEntity && !((ReadEntity) privObject).isDirect()) {
// See description of the isDirect in ReadEntity
continue;
}
if (privObject instanceof WriteEntity && ((WriteEntity) privObject).isTempURI()) {
// do not authorize temporary uris
continue;
}
// support for authorization on partitions needs to be added
String dbname = null;
String objName = null;
List<String> partKeys = null;
List<String> columns = null;
String className = null;
switch(privObject.getType()) {
case DATABASE:
dbname = privObject.getDatabase().getName();
break;
case TABLE:
dbname = privObject.getTable().getDbName();
objName = privObject.getTable().getTableName();
columns = tableName2Cols == null ? null : tableName2Cols.get(Table.getCompleteName(dbname, objName));
break;
case DFS_DIR:
case LOCAL_DIR:
objName = privObject.getD().toString();
break;
case FUNCTION:
if (privObject.getDatabase() != null) {
dbname = privObject.getDatabase().getName();
}
objName = privObject.getFunctionName();
className = privObject.getClassName();
break;
case DUMMYPARTITION:
case PARTITION:
// not currently handled
continue;
case SERVICE_NAME:
objName = privObject.getServiceName();
break;
default:
throw new AssertionError("Unexpected object type");
}
HivePrivObjectActionType actionType = AuthorizationUtils.getActionType(privObject);
HivePrivilegeObject hPrivObject = new HivePrivilegeObject(privObjType, dbname, objName, partKeys, columns, actionType, null, className);
hivePrivobjs.add(hPrivObject);
}
return hivePrivobjs;
}
use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.
the class QueryResultsCache method entryMatches.
private boolean entryMatches(LookupInfo lookupInfo, CacheEntry entry) {
QueryInfo queryInfo = entry.getQueryInfo();
for (ReadEntity readEntity : queryInfo.getInputs()) {
// Check that the tables used do not resolve to temp tables.
if (readEntity.getType() == Type.TABLE) {
Table tableUsed = readEntity.getTable();
Map<String, Table> tempTables = SessionHiveMetaStoreClient.getTempTablesForDatabase(tableUsed.getDbName());
if (tempTables != null && tempTables.containsKey(tableUsed.getTableName())) {
LOG.info("{} resolves to a temporary table in the current session. This query cannot use the cache.", tableUsed.getTableName());
return false;
}
}
}
return true;
}
use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.
the class DDLSemanticAnalyzer method analyzeCacheMetadata.
private void analyzeCacheMetadata(ASTNode ast) throws SemanticException {
Table tbl = AnalyzeCommandUtils.getTable(ast, this);
Map<String, String> partSpec = null;
CacheMetadataDesc desc;
// In 2 cases out of 3, we could pass the path and type directly to metastore...
if (AnalyzeCommandUtils.isPartitionLevelStats(ast)) {
partSpec = AnalyzeCommandUtils.getPartKeyValuePairsFromAST(tbl, ast, conf);
Partition part = getPartition(tbl, partSpec, true);
desc = new CacheMetadataDesc(tbl.getDbName(), tbl.getTableName(), part.getName());
inputs.add(new ReadEntity(part));
} else {
// Should we get all partitions for a partitioned table?
desc = new CacheMetadataDesc(tbl.getDbName(), tbl.getTableName(), tbl.isPartitioned());
inputs.add(new ReadEntity(tbl));
}
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.
the class DDLSemanticAnalyzer method analyzeDescDatabase.
/**
* Describe database.
*
* @param ast
* @throws SemanticException
*/
private void analyzeDescDatabase(ASTNode ast) throws SemanticException {
boolean isExtended;
String dbName;
if (ast.getChildCount() == 1) {
dbName = stripQuotes(ast.getChild(0).getText());
isExtended = false;
} else if (ast.getChildCount() == 2) {
dbName = stripQuotes(ast.getChild(0).getText());
isExtended = true;
} else {
throw new SemanticException("Unexpected Tokens at DESCRIBE DATABASE");
}
DescDatabaseDesc descDbDesc = new DescDatabaseDesc(ctx.getResFile(), dbName, isExtended);
inputs.add(new ReadEntity(getDatabase(dbName)));
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), descDbDesc)));
setFetchTask(createFetchTask(descDbDesc.getSchema()));
}
use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.
the class DDLSemanticAnalyzer method analyzeShowTables.
private void analyzeShowTables(ASTNode ast) throws SemanticException {
ShowTablesDesc showTblsDesc;
String dbName = SessionState.get().getCurrentDatabase();
String tableNames = null;
if (ast.getChildCount() > 3) {
throw new SemanticException(ErrorMsg.INVALID_AST_TREE.getMsg(ast.toStringTree()));
}
switch(ast.getChildCount()) {
case // Uses a pattern
1:
tableNames = unescapeSQLString(ast.getChild(0).getText());
showTblsDesc = new ShowTablesDesc(ctx.getResFile(), dbName, tableNames);
break;
case // Specifies a DB
2:
assert (ast.getChild(0).getType() == HiveParser.TOK_FROM);
dbName = unescapeIdentifier(ast.getChild(1).getText());
validateDatabase(dbName);
showTblsDesc = new ShowTablesDesc(ctx.getResFile(), dbName);
break;
case // Uses a pattern and specifies a DB
3:
assert (ast.getChild(0).getType() == HiveParser.TOK_FROM);
dbName = unescapeIdentifier(ast.getChild(1).getText());
tableNames = unescapeSQLString(ast.getChild(2).getText());
validateDatabase(dbName);
showTblsDesc = new ShowTablesDesc(ctx.getResFile(), dbName, tableNames);
break;
default:
// No pattern or DB
showTblsDesc = new ShowTablesDesc(ctx.getResFile(), dbName);
break;
}
inputs.add(new ReadEntity(getDatabase(dbName)));
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showTblsDesc)));
setFetchTask(createFetchTask(showTblsDesc.getSchema()));
}
Aggregations