use of org.apache.hadoop.hive.ql.QueryState in project hive by apache.
the class ATSHook method run.
@Override
public void run(final HookContext hookContext) throws Exception {
final long currentTime = System.currentTimeMillis();
final HiveConf conf = new HiveConf(hookContext.getConf());
final QueryState queryState = hookContext.getQueryState();
final String queryId = queryState.getQueryId();
final Map<String, Long> durations = new HashMap<String, Long>();
for (String key : hookContext.getPerfLogger().getEndTimes().keySet()) {
durations.put(key, hookContext.getPerfLogger().getDuration(key));
}
try {
setupAtsExecutor(conf);
final String domainId = createOrGetDomain(hookContext);
executor.submit(new Runnable() {
@Override
public void run() {
try {
QueryPlan plan = hookContext.getQueryPlan();
if (plan == null) {
return;
}
String queryId = plan.getQueryId();
String opId = hookContext.getOperationId();
long queryStartTime = plan.getQueryStartTime();
String user = hookContext.getUgi().getShortUserName();
String requestuser = hookContext.getUserName();
if (hookContext.getUserName() == null) {
requestuser = hookContext.getUgi().getUserName();
}
int numMrJobs = Utilities.getMRTasks(plan.getRootTasks()).size();
int numTezJobs = Utilities.getTezTasks(plan.getRootTasks()).size();
if (numMrJobs + numTezJobs <= 0) {
// ignore client only queries
return;
}
switch(hookContext.getHookType()) {
case PRE_EXEC_HOOK:
ExplainConfiguration config = new ExplainConfiguration();
config.setFormatted(true);
ExplainWork work = new // resFile
ExplainWork(// resFile
null, // pCtx
null, // RootTasks
plan.getRootTasks(), // FetchTask
plan.getFetchTask(), // analyzer
null, //explainConfig
config, // cboInfo
null);
@SuppressWarnings("unchecked") ExplainTask explain = (ExplainTask) TaskFactory.get(work, conf);
explain.initialize(queryState, plan, null, null);
String query = plan.getQueryStr();
JSONObject explainPlan = explain.getJSONPlan(null, work);
String logID = conf.getLogIdVar(hookContext.getSessionId());
List<String> tablesRead = getTablesFromEntitySet(hookContext.getInputs());
List<String> tablesWritten = getTablesFromEntitySet(hookContext.getOutputs());
String executionMode = getExecutionMode(plan).name();
String hiveInstanceAddress = hookContext.getHiveInstanceAddress();
if (hiveInstanceAddress == null) {
hiveInstanceAddress = InetAddress.getLocalHost().getHostAddress();
}
String hiveInstanceType = hookContext.isHiveServerQuery() ? "HS2" : "CLI";
ApplicationId llapId = determineLlapId(conf, plan);
fireAndForget(createPreHookEvent(queryId, query, explainPlan, queryStartTime, user, requestuser, numMrJobs, numTezJobs, opId, hookContext.getIpAddress(), hiveInstanceAddress, hiveInstanceType, hookContext.getSessionId(), logID, hookContext.getThreadId(), executionMode, tablesRead, tablesWritten, conf, llapId, domainId));
break;
case POST_EXEC_HOOK:
fireAndForget(createPostHookEvent(queryId, currentTime, user, requestuser, true, opId, durations, domainId));
break;
case ON_FAILURE_HOOK:
fireAndForget(createPostHookEvent(queryId, currentTime, user, requestuser, false, opId, durations, domainId));
break;
default:
//ignore
break;
}
} catch (Exception e) {
LOG.warn("Failed to submit plan to ATS for " + queryId, e);
}
}
});
} catch (Exception e) {
LOG.warn("Failed to submit to ATS for " + queryId, e);
}
}
Aggregations