Search in sources :

Example 16 with QueryState

use of org.apache.hadoop.hive.ql.QueryState in project hive by apache.

the class ATSHook method run.

@Override
public void run(final HookContext hookContext) throws Exception {
    final long currentTime = System.currentTimeMillis();
    final HiveConf conf = new HiveConf(hookContext.getConf());
    final QueryState queryState = hookContext.getQueryState();
    final String queryId = queryState.getQueryId();
    final Map<String, Long> durations = new HashMap<String, Long>();
    for (String key : hookContext.getPerfLogger().getEndTimes().keySet()) {
        durations.put(key, hookContext.getPerfLogger().getDuration(key));
    }
    try {
        setupAtsExecutor(conf);
        final String domainId = createOrGetDomain(hookContext);
        executor.submit(new Runnable() {

            @Override
            public void run() {
                try {
                    QueryPlan plan = hookContext.getQueryPlan();
                    if (plan == null) {
                        return;
                    }
                    String queryId = plan.getQueryId();
                    String opId = hookContext.getOperationId();
                    long queryStartTime = plan.getQueryStartTime();
                    String user = hookContext.getUgi().getShortUserName();
                    String requestuser = hookContext.getUserName();
                    if (hookContext.getUserName() == null) {
                        requestuser = hookContext.getUgi().getUserName();
                    }
                    int numMrJobs = Utilities.getMRTasks(plan.getRootTasks()).size();
                    int numTezJobs = Utilities.getTezTasks(plan.getRootTasks()).size();
                    if (numMrJobs + numTezJobs <= 0) {
                        // ignore client only queries
                        return;
                    }
                    switch(hookContext.getHookType()) {
                        case PRE_EXEC_HOOK:
                            ExplainConfiguration config = new ExplainConfiguration();
                            config.setFormatted(true);
                            ExplainWork work = new // resFile
                            ExplainWork(// resFile
                            null, // pCtx
                            null, // RootTasks
                            plan.getRootTasks(), // FetchTask
                            plan.getFetchTask(), // analyzer
                            null, //explainConfig
                            config, // cboInfo
                            null);
                            @SuppressWarnings("unchecked") ExplainTask explain = (ExplainTask) TaskFactory.get(work, conf);
                            explain.initialize(queryState, plan, null, null);
                            String query = plan.getQueryStr();
                            JSONObject explainPlan = explain.getJSONPlan(null, work);
                            String logID = conf.getLogIdVar(hookContext.getSessionId());
                            List<String> tablesRead = getTablesFromEntitySet(hookContext.getInputs());
                            List<String> tablesWritten = getTablesFromEntitySet(hookContext.getOutputs());
                            String executionMode = getExecutionMode(plan).name();
                            String hiveInstanceAddress = hookContext.getHiveInstanceAddress();
                            if (hiveInstanceAddress == null) {
                                hiveInstanceAddress = InetAddress.getLocalHost().getHostAddress();
                            }
                            String hiveInstanceType = hookContext.isHiveServerQuery() ? "HS2" : "CLI";
                            ApplicationId llapId = determineLlapId(conf, plan);
                            fireAndForget(createPreHookEvent(queryId, query, explainPlan, queryStartTime, user, requestuser, numMrJobs, numTezJobs, opId, hookContext.getIpAddress(), hiveInstanceAddress, hiveInstanceType, hookContext.getSessionId(), logID, hookContext.getThreadId(), executionMode, tablesRead, tablesWritten, conf, llapId, domainId));
                            break;
                        case POST_EXEC_HOOK:
                            fireAndForget(createPostHookEvent(queryId, currentTime, user, requestuser, true, opId, durations, domainId));
                            break;
                        case ON_FAILURE_HOOK:
                            fireAndForget(createPostHookEvent(queryId, currentTime, user, requestuser, false, opId, durations, domainId));
                            break;
                        default:
                            //ignore
                            break;
                    }
                } catch (Exception e) {
                    LOG.warn("Failed to submit plan to ATS for " + queryId, e);
                }
            }
        });
    } catch (Exception e) {
        LOG.warn("Failed to submit to ATS for " + queryId, e);
    }
}
Also used : ExplainConfiguration(org.apache.hadoop.hive.ql.parse.ExplainConfiguration) ExplainTask(org.apache.hadoop.hive.ql.exec.ExplainTask) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) ExplainWork(org.apache.hadoop.hive.ql.plan.ExplainWork) QueryState(org.apache.hadoop.hive.ql.QueryState) QueryPlan(org.apache.hadoop.hive.ql.QueryPlan) IOException(java.io.IOException) JSONObject(org.json.JSONObject) HiveConf(org.apache.hadoop.hive.conf.HiveConf) ArrayList(java.util.ArrayList) List(java.util.List) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId)

Aggregations

QueryState (org.apache.hadoop.hive.ql.QueryState)16 HiveConf (org.apache.hadoop.hive.conf.HiveConf)9 Hive (org.apache.hadoop.hive.ql.metadata.Hive)6 Before (org.junit.Before)6 Table (org.apache.hadoop.hive.ql.metadata.Table)5 ArrayList (java.util.ArrayList)4 Context (org.apache.hadoop.hive.ql.Context)4 BeforeClass (org.junit.BeforeClass)4 Partition (org.apache.hadoop.hive.ql.metadata.Partition)3 IOException (java.io.IOException)2 HashMap (java.util.HashMap)2 Path (org.apache.hadoop.fs.Path)2 CompilationOpContext (org.apache.hadoop.hive.ql.CompilationOpContext)2 LinkedHashMap (java.util.LinkedHashMap)1 List (java.util.List)1 FileStatus (org.apache.hadoop.fs.FileStatus)1 FileSystem (org.apache.hadoop.fs.FileSystem)1 FetchConverter (org.apache.hadoop.hive.common.io.FetchConverter)1 DriverContext (org.apache.hadoop.hive.ql.DriverContext)1 QueryPlan (org.apache.hadoop.hive.ql.QueryPlan)1