Search in sources :

Example 1 with HiveOperation

use of org.apache.hadoop.hive.ql.plan.HiveOperation in project hive by apache.

the class ExplainTask method collectAuthRelatedEntities.

private JSONObject collectAuthRelatedEntities(PrintStream out, ExplainWork work) throws Exception {
    BaseSemanticAnalyzer analyzer = work.getAnalyzer();
    HiveOperation operation = queryState.getHiveOperation();
    JSONObject object = new JSONObject(new LinkedHashMap<>());
    Object jsonInput = toJson("INPUTS", toString(analyzer.getInputs()), out, work);
    if (work.isFormatted()) {
        object.put("INPUTS", jsonInput);
    }
    Object jsonOutput = toJson("OUTPUTS", toString(analyzer.getOutputs()), out, work);
    if (work.isFormatted()) {
        object.put("OUTPUTS", jsonOutput);
    }
    String userName = SessionState.get().getAuthenticator().getUserName();
    Object jsonUser = toJson("CURRENT_USER", userName, out, work);
    if (work.isFormatted()) {
        object.put("CURRENT_USER", jsonUser);
    }
    Object jsonOperation = toJson("OPERATION", operation.name(), out, work);
    if (work.isFormatted()) {
        object.put("OPERATION", jsonOperation);
    }
    if (analyzer.skipAuthorization()) {
        return object;
    }
    final List<String> exceptions = new ArrayList<String>();
    Object delegate = SessionState.get().getActiveAuthorizer();
    if (delegate != null) {
        Class itface = SessionState.get().getAuthorizerInterface();
        Object authorizer = AuthorizationFactory.create(delegate, itface, new AuthorizationFactory.AuthorizationExceptionHandler() {

            public void exception(Exception exception) {
                exceptions.add(exception.getMessage());
            }
        });
        SessionState.get().setActiveAuthorizer(authorizer);
        try {
            Driver.doAuthorization(queryState.getHiveOperation(), analyzer, "");
        } finally {
            SessionState.get().setActiveAuthorizer(delegate);
        }
    }
    if (!exceptions.isEmpty()) {
        Object jsonFails = toJson("AUTHORIZATION_FAILURES", exceptions, out, work);
        if (work.isFormatted()) {
            object.put("AUTHORIZATION_FAILURES", jsonFails);
        }
    }
    return object;
}
Also used : HiveOperation(org.apache.hadoop.hive.ql.plan.HiveOperation) AuthorizationFactory(org.apache.hadoop.hive.ql.security.authorization.AuthorizationFactory) BaseSemanticAnalyzer(org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer) JSONObject(org.json.JSONObject) ArrayList(java.util.ArrayList) JSONObject(org.json.JSONObject) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) JSONException(org.json.JSONException) InvocationTargetException(java.lang.reflect.InvocationTargetException)

Example 2 with HiveOperation

use of org.apache.hadoop.hive.ql.plan.HiveOperation in project hive by apache.

the class SemanticAnalyzer method materializeCTE.

Table materializeCTE(String cteName, CTEClause cte) throws HiveException {
    ASTNode createTable = new ASTNode(new ClassicToken(HiveParser.TOK_CREATETABLE));
    ASTNode tableName = new ASTNode(new ClassicToken(HiveParser.TOK_TABNAME));
    tableName.addChild(new ASTNode(new ClassicToken(HiveParser.Identifier, cteName)));
    ASTNode temporary = new ASTNode(new ClassicToken(HiveParser.KW_TEMPORARY, MATERIALIZATION_MARKER));
    createTable.addChild(tableName);
    createTable.addChild(temporary);
    createTable.addChild(cte.cteNode);
    SemanticAnalyzer analyzer = new SemanticAnalyzer(queryState);
    analyzer.initCtx(ctx);
    analyzer.init(false);
    // should share cte contexts
    analyzer.aliasToCTEs.putAll(aliasToCTEs);
    HiveOperation operation = queryState.getHiveOperation();
    try {
        analyzer.analyzeInternal(createTable);
    } finally {
        queryState.setCommandType(operation);
    }
    Table table = analyzer.tableDesc.toTable(conf);
    Path location = table.getDataLocation();
    try {
        location.getFileSystem(conf).mkdirs(location);
    } catch (IOException e) {
        throw new HiveException(e);
    }
    table.setMaterializedTable(true);
    LOG.info(cteName + " will be materialized into " + location);
    cte.table = table;
    cte.source = analyzer;
    ctx.addMaterializedTable(cteName, table);
    return table;
}
Also used : HiveOperation(org.apache.hadoop.hive.ql.plan.HiveOperation) Path(org.apache.hadoop.fs.Path) ClassicToken(org.antlr.runtime.ClassicToken) Table(org.apache.hadoop.hive.ql.metadata.Table) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) IOException(java.io.IOException)

Example 3 with HiveOperation

use of org.apache.hadoop.hive.ql.plan.HiveOperation in project incubator-atlas by apache.

the class HiveHook method getProcessQualifiedName.

@VisibleForTesting
static String getProcessQualifiedName(HiveMetaStoreBridge dgiBridge, HiveEventContext eventContext, final SortedSet<ReadEntity> sortedHiveInputs, final SortedSet<WriteEntity> sortedHiveOutputs, SortedMap<ReadEntity, Referenceable> hiveInputsMap, SortedMap<WriteEntity, Referenceable> hiveOutputsMap) throws HiveException {
    HiveOperation op = eventContext.getOperation();
    if (isCreateOp(eventContext)) {
        Entity entity = getEntityByType(sortedHiveOutputs, Type.TABLE);
        if (entity != null) {
            Table outTable = entity.getTable();
            //refresh table
            outTable = dgiBridge.hiveClient.getTable(outTable.getDbName(), outTable.getTableName());
            return HiveMetaStoreBridge.getTableProcessQualifiedName(dgiBridge.getClusterName(), outTable);
        }
    }
    StringBuilder buffer = new StringBuilder(op.getOperationName());
    boolean ignoreHDFSPathsinQFName = ignoreHDFSPathsinQFName(op, sortedHiveInputs, sortedHiveOutputs);
    if (ignoreHDFSPathsinQFName && LOG.isDebugEnabled()) {
        LOG.debug("Ignoring HDFS paths in qualifiedName for {} {} ", op, eventContext.getQueryStr());
    }
    addInputs(dgiBridge, op, sortedHiveInputs, buffer, hiveInputsMap, ignoreHDFSPathsinQFName);
    buffer.append(IO_SEP);
    addOutputs(dgiBridge, op, sortedHiveOutputs, buffer, hiveOutputsMap, ignoreHDFSPathsinQFName);
    LOG.info("Setting process qualified name to {}", buffer);
    return buffer.toString();
}
Also used : HiveOperation(org.apache.hadoop.hive.ql.plan.HiveOperation) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) Entity(org.apache.hadoop.hive.ql.hooks.Entity) Table(org.apache.hadoop.hive.ql.metadata.Table) VisibleForTesting(com.google.common.annotations.VisibleForTesting)

Example 4 with HiveOperation

use of org.apache.hadoop.hive.ql.plan.HiveOperation in project hive by apache.

the class CalcitePlanner method materializeCTE.

@Override
Table materializeCTE(String cteName, CTEClause cte) throws HiveException {
    ASTNode createTable = new ASTNode(new ClassicToken(HiveParser.TOK_CREATETABLE));
    ASTNode tableName = new ASTNode(new ClassicToken(HiveParser.TOK_TABNAME));
    tableName.addChild(new ASTNode(new ClassicToken(HiveParser.Identifier, cteName)));
    ASTNode temporary = new ASTNode(new ClassicToken(HiveParser.KW_TEMPORARY, MATERIALIZATION_MARKER));
    createTable.addChild(tableName);
    createTable.addChild(temporary);
    createTable.addChild(cte.cteNode);
    CalcitePlanner analyzer = new CalcitePlanner(queryState);
    analyzer.initCtx(ctx);
    analyzer.init(false);
    // should share cte contexts
    analyzer.aliasToCTEs.putAll(aliasToCTEs);
    HiveOperation operation = queryState.getHiveOperation();
    try {
        analyzer.analyzeInternal(createTable);
    } finally {
        queryState.setCommandType(operation);
    }
    Table table = analyzer.tableDesc.toTable(conf);
    Path location = table.getDataLocation();
    try {
        location.getFileSystem(conf).mkdirs(location);
    } catch (IOException e) {
        throw new HiveException(e);
    }
    table.setMaterializedTable(true);
    LOG.info(cteName + " will be materialized into " + location);
    cte.table = table;
    cte.source = analyzer;
    ctx.addMaterializedTable(cteName, table);
    // For CalcitePlanner, store qualified name too
    ctx.addMaterializedTable(table.getDbName() + "." + table.getTableName(), table);
    return table;
}
Also used : HiveOperation(org.apache.hadoop.hive.ql.plan.HiveOperation) Path(org.apache.hadoop.fs.Path) PlanModifierForReturnPath(org.apache.hadoop.hive.ql.optimizer.calcite.translator.PlanModifierForReturnPath) ClassicToken(org.antlr.runtime.ClassicToken) DruidTable(org.apache.calcite.adapter.druid.DruidTable) RelOptHiveTable(org.apache.hadoop.hive.ql.optimizer.calcite.RelOptHiveTable) Table(org.apache.hadoop.hive.ql.metadata.Table) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) IOException(java.io.IOException)

Example 5 with HiveOperation

use of org.apache.hadoop.hive.ql.plan.HiveOperation in project hive by apache.

the class SemanticAnalyzerFactory method get.

public static BaseSemanticAnalyzer get(QueryState queryState, ASTNode tree) throws SemanticException {
    if (tree.getToken() == null) {
        throw new RuntimeException("Empty Syntax Tree");
    } else {
        HiveOperation opType = commandType.get(tree.getType());
        queryState.setCommandType(opType);
        switch(tree.getType()) {
            case HiveParser.TOK_EXPLAIN:
                return new ExplainSemanticAnalyzer(queryState);
            case HiveParser.TOK_EXPLAIN_SQ_REWRITE:
                return new ExplainSQRewriteSemanticAnalyzer(queryState);
            case HiveParser.TOK_LOAD:
                return new LoadSemanticAnalyzer(queryState);
            case HiveParser.TOK_EXPORT:
                return new ExportSemanticAnalyzer(queryState);
            case HiveParser.TOK_IMPORT:
                return new ImportSemanticAnalyzer(queryState);
            case HiveParser.TOK_REPL_DUMP:
                return new ReplicationSemanticAnalyzer(queryState);
            case HiveParser.TOK_REPL_LOAD:
                return new ReplicationSemanticAnalyzer(queryState);
            case HiveParser.TOK_REPL_STATUS:
                return new ReplicationSemanticAnalyzer(queryState);
            case HiveParser.TOK_ALTERTABLE:
                {
                    Tree child = tree.getChild(1);
                    switch(child.getType()) {
                        case HiveParser.TOK_ALTERTABLE_RENAME:
                        case HiveParser.TOK_ALTERTABLE_TOUCH:
                        case HiveParser.TOK_ALTERTABLE_ARCHIVE:
                        case HiveParser.TOK_ALTERTABLE_UNARCHIVE:
                        case HiveParser.TOK_ALTERTABLE_ADDCOLS:
                        case HiveParser.TOK_ALTERTABLE_RENAMECOL:
                        case HiveParser.TOK_ALTERTABLE_REPLACECOLS:
                        case HiveParser.TOK_ALTERTABLE_DROPPARTS:
                        case HiveParser.TOK_ALTERTABLE_ADDPARTS:
                        case HiveParser.TOK_ALTERTABLE_PARTCOLTYPE:
                        case HiveParser.TOK_ALTERTABLE_PROPERTIES:
                        case HiveParser.TOK_ALTERTABLE_DROPPROPERTIES:
                        case HiveParser.TOK_ALTERTABLE_EXCHANGEPARTITION:
                        case HiveParser.TOK_ALTERTABLE_SKEWED:
                        case HiveParser.TOK_ALTERTABLE_DROPCONSTRAINT:
                        case HiveParser.TOK_ALTERTABLE_ADDCONSTRAINT:
                            queryState.setCommandType(commandType.get(child.getType()));
                            return new DDLSemanticAnalyzer(queryState);
                    }
                    opType = tablePartitionCommandType.get(child.getType())[tree.getChildCount() > 2 ? 1 : 0];
                    queryState.setCommandType(opType);
                    return new DDLSemanticAnalyzer(queryState);
                }
            case HiveParser.TOK_ALTERVIEW:
                {
                    Tree child = tree.getChild(1);
                    switch(child.getType()) {
                        case HiveParser.TOK_ALTERVIEW_PROPERTIES:
                        case HiveParser.TOK_ALTERVIEW_DROPPROPERTIES:
                        case HiveParser.TOK_ALTERVIEW_ADDPARTS:
                        case HiveParser.TOK_ALTERVIEW_DROPPARTS:
                        case HiveParser.TOK_ALTERVIEW_RENAME:
                            opType = commandType.get(child.getType());
                            queryState.setCommandType(opType);
                            return new DDLSemanticAnalyzer(queryState);
                    }
                    // TOK_ALTERVIEW_AS
                    assert child.getType() == HiveParser.TOK_QUERY;
                    queryState.setCommandType(HiveOperation.ALTERVIEW_AS);
                    return new SemanticAnalyzer(queryState);
                }
            case HiveParser.TOK_CREATEDATABASE:
            case HiveParser.TOK_DROPDATABASE:
            case HiveParser.TOK_SWITCHDATABASE:
            case HiveParser.TOK_DROPTABLE:
            case HiveParser.TOK_DROPVIEW:
            case HiveParser.TOK_DROP_MATERIALIZED_VIEW:
            case HiveParser.TOK_DESCDATABASE:
            case HiveParser.TOK_DESCTABLE:
            case HiveParser.TOK_DESCFUNCTION:
            case HiveParser.TOK_MSCK:
            case HiveParser.TOK_ALTERINDEX_REBUILD:
            case HiveParser.TOK_ALTERINDEX_PROPERTIES:
            case HiveParser.TOK_SHOWDATABASES:
            case HiveParser.TOK_SHOWTABLES:
            case HiveParser.TOK_SHOWCOLUMNS:
            case HiveParser.TOK_SHOW_TABLESTATUS:
            case HiveParser.TOK_SHOW_TBLPROPERTIES:
            case HiveParser.TOK_SHOW_CREATEDATABASE:
            case HiveParser.TOK_SHOW_CREATETABLE:
            case HiveParser.TOK_SHOWFUNCTIONS:
            case HiveParser.TOK_SHOWPARTITIONS:
            case HiveParser.TOK_SHOWINDEXES:
            case HiveParser.TOK_SHOWLOCKS:
            case HiveParser.TOK_SHOWDBLOCKS:
            case HiveParser.TOK_SHOW_COMPACTIONS:
            case HiveParser.TOK_SHOW_TRANSACTIONS:
            case HiveParser.TOK_ABORT_TRANSACTIONS:
            case HiveParser.TOK_SHOWCONF:
            case HiveParser.TOK_SHOWVIEWS:
            case HiveParser.TOK_CREATEINDEX:
            case HiveParser.TOK_DROPINDEX:
            case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT:
            case HiveParser.TOK_LOCKTABLE:
            case HiveParser.TOK_UNLOCKTABLE:
            case HiveParser.TOK_LOCKDB:
            case HiveParser.TOK_UNLOCKDB:
            case HiveParser.TOK_CREATEROLE:
            case HiveParser.TOK_DROPROLE:
            case HiveParser.TOK_GRANT:
            case HiveParser.TOK_REVOKE:
            case HiveParser.TOK_SHOW_GRANT:
            case HiveParser.TOK_GRANT_ROLE:
            case HiveParser.TOK_REVOKE_ROLE:
            case HiveParser.TOK_SHOW_ROLE_GRANT:
            case HiveParser.TOK_SHOW_ROLE_PRINCIPALS:
            case HiveParser.TOK_SHOW_ROLES:
            case HiveParser.TOK_ALTERDATABASE_PROPERTIES:
            case HiveParser.TOK_ALTERDATABASE_OWNER:
            case HiveParser.TOK_TRUNCATETABLE:
            case HiveParser.TOK_SHOW_SET_ROLE:
            case HiveParser.TOK_CACHE_METADATA:
                return new DDLSemanticAnalyzer(queryState);
            case HiveParser.TOK_CREATEFUNCTION:
            case HiveParser.TOK_DROPFUNCTION:
            case HiveParser.TOK_RELOADFUNCTION:
                return new FunctionSemanticAnalyzer(queryState);
            case HiveParser.TOK_ANALYZE:
                return new ColumnStatsSemanticAnalyzer(queryState);
            case HiveParser.TOK_CREATEMACRO:
            case HiveParser.TOK_DROPMACRO:
                return new MacroSemanticAnalyzer(queryState);
            case HiveParser.TOK_UPDATE_TABLE:
            case HiveParser.TOK_DELETE_FROM:
            case HiveParser.TOK_MERGE:
                return new UpdateDeleteSemanticAnalyzer(queryState);
            case HiveParser.TOK_START_TRANSACTION:
            case HiveParser.TOK_COMMIT:
            case HiveParser.TOK_ROLLBACK:
            case HiveParser.TOK_SET_AUTOCOMMIT:
            default:
                {
                    SemanticAnalyzer semAnalyzer = HiveConf.getBoolVar(queryState.getConf(), HiveConf.ConfVars.HIVE_CBO_ENABLED) ? new CalcitePlanner(queryState) : new SemanticAnalyzer(queryState);
                    return semAnalyzer;
                }
        }
    }
}
Also used : HiveOperation(org.apache.hadoop.hive.ql.plan.HiveOperation) Tree(org.antlr.runtime.tree.Tree)

Aggregations

HiveOperation (org.apache.hadoop.hive.ql.plan.HiveOperation)5 Table (org.apache.hadoop.hive.ql.metadata.Table)3 IOException (java.io.IOException)2 ClassicToken (org.antlr.runtime.ClassicToken)2 Path (org.apache.hadoop.fs.Path)2 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)2 VisibleForTesting (com.google.common.annotations.VisibleForTesting)1 InvocationTargetException (java.lang.reflect.InvocationTargetException)1 ArrayList (java.util.ArrayList)1 Tree (org.antlr.runtime.tree.Tree)1 DruidTable (org.apache.calcite.adapter.druid.DruidTable)1 Entity (org.apache.hadoop.hive.ql.hooks.Entity)1 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)1 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)1 RelOptHiveTable (org.apache.hadoop.hive.ql.optimizer.calcite.RelOptHiveTable)1 PlanModifierForReturnPath (org.apache.hadoop.hive.ql.optimizer.calcite.translator.PlanModifierForReturnPath)1 BaseSemanticAnalyzer (org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer)1 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)1 AuthorizationFactory (org.apache.hadoop.hive.ql.security.authorization.AuthorizationFactory)1 JSONException (org.json.JSONException)1