Search in sources :

Example 81 with ReadEntity

use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.

the class Driver method getHivePrivObjects.

private static List<HivePrivilegeObject> getHivePrivObjects(Set<? extends Entity> privObjects, Map<String, List<String>> tableName2Cols) {
    List<HivePrivilegeObject> hivePrivobjs = new ArrayList<HivePrivilegeObject>();
    if (privObjects == null) {
        return hivePrivobjs;
    }
    for (Entity privObject : privObjects) {
        HivePrivilegeObjectType privObjType = AuthorizationUtils.getHivePrivilegeObjectType(privObject.getType());
        if (privObject.isDummy()) {
            // do not authorize dummy readEntity or writeEntity
            continue;
        }
        if (privObject instanceof ReadEntity && !((ReadEntity) privObject).isDirect()) {
            // See description of the isDirect in ReadEntity
            continue;
        }
        if (privObject instanceof WriteEntity && ((WriteEntity) privObject).isTempURI()) {
            // do not authorize temporary uris
            continue;
        }
        // support for authorization on partitions needs to be added
        String dbname = null;
        String objName = null;
        List<String> partKeys = null;
        List<String> columns = null;
        String className = null;
        switch(privObject.getType()) {
            case DATABASE:
                dbname = privObject.getDatabase().getName();
                break;
            case TABLE:
                dbname = privObject.getTable().getDbName();
                objName = privObject.getTable().getTableName();
                columns = tableName2Cols == null ? null : tableName2Cols.get(Table.getCompleteName(dbname, objName));
                break;
            case DFS_DIR:
            case LOCAL_DIR:
                objName = privObject.getD().toString();
                break;
            case FUNCTION:
                if (privObject.getDatabase() != null) {
                    dbname = privObject.getDatabase().getName();
                }
                objName = privObject.getFunctionName();
                className = privObject.getClassName();
                break;
            case DUMMYPARTITION:
            case PARTITION:
                // not currently handled
                continue;
            case SERVICE_NAME:
                objName = privObject.getServiceName();
                break;
            default:
                throw new AssertionError("Unexpected object type");
        }
        HivePrivObjectActionType actionType = AuthorizationUtils.getActionType(privObject);
        HivePrivilegeObject hPrivObject = new HivePrivilegeObject(privObjType, dbname, objName, partKeys, columns, actionType, null, className);
        hivePrivobjs.add(hPrivObject);
    }
    return hivePrivobjs;
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) Entity(org.apache.hadoop.hive.ql.hooks.Entity) HivePrivObjectActionType(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType) ArrayList(java.util.ArrayList) HivePrivilegeObjectType(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity)

Example 82 with ReadEntity

use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.

the class QueryResultsCache method entryMatches.

private boolean entryMatches(LookupInfo lookupInfo, CacheEntry entry) {
    QueryInfo queryInfo = entry.getQueryInfo();
    for (ReadEntity readEntity : queryInfo.getInputs()) {
        // Check that the tables used do not resolve to temp tables.
        if (readEntity.getType() == Type.TABLE) {
            Table tableUsed = readEntity.getTable();
            Map<String, Table> tempTables = SessionHiveMetaStoreClient.getTempTablesForDatabase(tableUsed.getDbName());
            if (tempTables != null && tempTables.containsKey(tableUsed.getTableName())) {
                LOG.info("{} resolves to a temporary table in the current session. This query cannot use the cache.", tableUsed.getTableName());
                return false;
            }
        }
    }
    return true;
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) Table(org.apache.hadoop.hive.ql.metadata.Table)

Example 83 with ReadEntity

use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.

the class DDLSemanticAnalyzer method analyzeCacheMetadata.

private void analyzeCacheMetadata(ASTNode ast) throws SemanticException {
    Table tbl = AnalyzeCommandUtils.getTable(ast, this);
    Map<String, String> partSpec = null;
    CacheMetadataDesc desc;
    // In 2 cases out of 3, we could pass the path and type directly to metastore...
    if (AnalyzeCommandUtils.isPartitionLevelStats(ast)) {
        partSpec = AnalyzeCommandUtils.getPartKeyValuePairsFromAST(tbl, ast, conf);
        Partition part = getPartition(tbl, partSpec, true);
        desc = new CacheMetadataDesc(tbl.getDbName(), tbl.getTableName(), part.getName());
        inputs.add(new ReadEntity(part));
    } else {
        // Should we get all partitions for a partitioned table?
        desc = new CacheMetadataDesc(tbl.getDbName(), tbl.getTableName(), tbl.isPartitioned());
        inputs.add(new ReadEntity(tbl));
    }
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) Partition(org.apache.hadoop.hive.ql.metadata.Partition) AlterTableExchangePartition(org.apache.hadoop.hive.ql.plan.AlterTableExchangePartition) Table(org.apache.hadoop.hive.ql.metadata.Table) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) CacheMetadataDesc(org.apache.hadoop.hive.ql.plan.CacheMetadataDesc)

Example 84 with ReadEntity

use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.

the class DDLSemanticAnalyzer method analyzeDescDatabase.

/**
 * Describe database.
 *
 * @param ast
 * @throws SemanticException
 */
private void analyzeDescDatabase(ASTNode ast) throws SemanticException {
    boolean isExtended;
    String dbName;
    if (ast.getChildCount() == 1) {
        dbName = stripQuotes(ast.getChild(0).getText());
        isExtended = false;
    } else if (ast.getChildCount() == 2) {
        dbName = stripQuotes(ast.getChild(0).getText());
        isExtended = true;
    } else {
        throw new SemanticException("Unexpected Tokens at DESCRIBE DATABASE");
    }
    DescDatabaseDesc descDbDesc = new DescDatabaseDesc(ctx.getResFile(), dbName, isExtended);
    inputs.add(new ReadEntity(getDatabase(dbName)));
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), descDbDesc)));
    setFetchTask(createFetchTask(descDbDesc.getSchema()));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) DescDatabaseDesc(org.apache.hadoop.hive.ql.plan.DescDatabaseDesc)

Example 85 with ReadEntity

use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.

the class DDLSemanticAnalyzer method analyzeShowTables.

private void analyzeShowTables(ASTNode ast) throws SemanticException {
    ShowTablesDesc showTblsDesc;
    String dbName = SessionState.get().getCurrentDatabase();
    String tableNames = null;
    if (ast.getChildCount() > 3) {
        throw new SemanticException(ErrorMsg.INVALID_AST_TREE.getMsg(ast.toStringTree()));
    }
    switch(ast.getChildCount()) {
        case // Uses a pattern
        1:
            tableNames = unescapeSQLString(ast.getChild(0).getText());
            showTblsDesc = new ShowTablesDesc(ctx.getResFile(), dbName, tableNames);
            break;
        case // Specifies a DB
        2:
            assert (ast.getChild(0).getType() == HiveParser.TOK_FROM);
            dbName = unescapeIdentifier(ast.getChild(1).getText());
            validateDatabase(dbName);
            showTblsDesc = new ShowTablesDesc(ctx.getResFile(), dbName);
            break;
        case // Uses a pattern and specifies a DB
        3:
            assert (ast.getChild(0).getType() == HiveParser.TOK_FROM);
            dbName = unescapeIdentifier(ast.getChild(1).getText());
            tableNames = unescapeSQLString(ast.getChild(2).getText());
            validateDatabase(dbName);
            showTblsDesc = new ShowTablesDesc(ctx.getResFile(), dbName, tableNames);
            break;
        default:
            // No pattern or DB
            showTblsDesc = new ShowTablesDesc(ctx.getResFile(), dbName);
            break;
    }
    inputs.add(new ReadEntity(getDatabase(dbName)));
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showTblsDesc)));
    setFetchTask(createFetchTask(showTblsDesc.getSchema()));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) ShowTablesDesc(org.apache.hadoop.hive.ql.plan.ShowTablesDesc) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork)

Aggregations

ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)139 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)70 Table (org.apache.hadoop.hive.ql.metadata.Table)69 DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)31 Partition (org.apache.hadoop.hive.ql.metadata.Partition)29 ArrayList (java.util.ArrayList)27 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)27 DDLWork (org.apache.hadoop.hive.ql.plan.DDLWork)24 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)22 HashMap (java.util.HashMap)16 Test (org.testng.annotations.Test)16 Map (java.util.Map)13 LinkedHashMap (java.util.LinkedHashMap)12 Path (org.apache.hadoop.fs.Path)12 List (java.util.List)11 Database (org.apache.hadoop.hive.metastore.api.Database)11 AtlasEntity (org.apache.atlas.model.instance.AtlasEntity)10 Referenceable (org.apache.atlas.typesystem.Referenceable)10 HashSet (java.util.HashSet)9 FileNotFoundException (java.io.FileNotFoundException)7