use of org.apache.hadoop.hive.ql.parse.ExplainConfiguration in project hive by apache.
the class TestExplainTask method explainToString.
private <K, V> String explainToString(Map<K, V> explainMap) throws Exception {
ExplainWork work = new ExplainWork();
ParseContext pCtx = new ParseContext();
HashMap<String, TableScanOperator> topOps = new HashMap<>();
TableScanOperator scanOp = new DummyOperator(new DummyExplainDesc<K, V>(explainMap));
topOps.put("sample", scanOp);
pCtx.setTopOps(topOps);
work.setParseContext(pCtx);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
work.setConfig(new ExplainConfiguration());
ExplainTask newExplainTask = new ExplainTask();
newExplainTask.queryState = uut.queryState;
newExplainTask.getJSONLogicalPlan(new PrintStream(baos), work);
baos.close();
return baos.toString();
}
use of org.apache.hadoop.hive.ql.parse.ExplainConfiguration in project hive by apache.
the class ATSHook method run.
@Override
public void run(final HookContext hookContext) throws Exception {
final long currentTime = System.currentTimeMillis();
final HiveConf conf = new HiveConf(hookContext.getConf());
final QueryState queryState = hookContext.getQueryState();
final String queryId = queryState.getQueryId();
final Map<String, Long> durations = new HashMap<String, Long>();
for (String key : hookContext.getPerfLogger().getEndTimes().keySet()) {
durations.put(key, hookContext.getPerfLogger().getDuration(key));
}
try {
setupAtsExecutor(conf);
final String domainId = createOrGetDomain(hookContext);
executor.submit(new Runnable() {
@Override
public void run() {
try {
QueryPlan plan = hookContext.getQueryPlan();
if (plan == null) {
return;
}
String queryId = plan.getQueryId();
String opId = hookContext.getOperationId();
long queryStartTime = plan.getQueryStartTime();
String user = hookContext.getUgi().getShortUserName();
String requestuser = hookContext.getUserName();
if (hookContext.getUserName() == null) {
requestuser = hookContext.getUgi().getUserName();
}
int numMrJobs = Utilities.getMRTasks(plan.getRootTasks()).size();
int numTezJobs = Utilities.getTezTasks(plan.getRootTasks()).size();
if (numMrJobs + numTezJobs <= 0) {
// ignore client only queries
return;
}
switch(hookContext.getHookType()) {
case PRE_EXEC_HOOK:
ExplainConfiguration config = new ExplainConfiguration();
config.setFormatted(true);
ExplainWork work = new // resFile
ExplainWork(// resFile
null, // pCtx
null, // RootTasks
plan.getRootTasks(), // FetchTask
plan.getFetchTask(), // analyzer
null, // explainConfig
config, // cboInfo
null);
@SuppressWarnings("unchecked") ExplainTask explain = (ExplainTask) TaskFactory.get(work);
explain.initialize(queryState, plan, null, null);
String query = plan.getQueryStr();
JSONObject explainPlan = explain.getJSONPlan(null, work);
String logID = conf.getLogIdVar(hookContext.getSessionId());
List<String> tablesRead = getTablesFromEntitySet(hookContext.getInputs());
List<String> tablesWritten = getTablesFromEntitySet(hookContext.getOutputs());
String executionMode = getExecutionMode(plan).name();
String hiveInstanceAddress = hookContext.getHiveInstanceAddress();
if (hiveInstanceAddress == null) {
hiveInstanceAddress = InetAddress.getLocalHost().getHostAddress();
}
String hiveInstanceType = hookContext.isHiveServerQuery() ? "HS2" : "CLI";
ApplicationId llapId = determineLlapId(conf, plan);
fireAndForget(createPreHookEvent(queryId, query, explainPlan, queryStartTime, user, requestuser, numMrJobs, numTezJobs, opId, hookContext.getIpAddress(), hiveInstanceAddress, hiveInstanceType, hookContext.getSessionId(), logID, hookContext.getThreadId(), executionMode, tablesRead, tablesWritten, conf, llapId, domainId));
break;
case POST_EXEC_HOOK:
fireAndForget(createPostHookEvent(queryId, currentTime, user, requestuser, true, opId, durations, domainId));
break;
case ON_FAILURE_HOOK:
fireAndForget(createPostHookEvent(queryId, currentTime, user, requestuser, false, opId, durations, domainId));
break;
default:
// ignore
break;
}
} catch (Exception e) {
LOG.warn("Failed to submit plan to ATS for " + queryId, e);
}
}
});
} catch (Exception e) {
LOG.warn("Failed to submit to ATS for " + queryId, e);
}
}
use of org.apache.hadoop.hive.ql.parse.ExplainConfiguration in project hive by apache.
the class SparkPlan method getLongFormCallSite.
/**
* Takes a {@link SparkTran} object that creates the longForm for the RDD's {@link CallSite}.
* It does this my creating an {@link ExplainTask} and running it over the
* {@link SparkTran#getBaseWork()} object. The explain output is serialized to the string,
* which is logged and returned. If any errors are encountered while creating the explain plan,
* an error message is simply logged, but no {@link Exception} is thrown.
*
* @param tran the {@link SparkTran} to create the long call site for
*
* @return a {@link String} containing the explain plan for the given {@link SparkTran}
*/
private String getLongFormCallSite(SparkTran tran) {
if (this.jobConf.getBoolean(HiveConf.ConfVars.HIVE_SPARK_LOG_EXPLAIN_WEBUI.varname, HiveConf.ConfVars.HIVE_SPARK_LOG_EXPLAIN_WEBUI.defaultBoolVal)) {
perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.SPARK_CREATE_EXPLAIN_PLAN + tran.getName());
ExplainWork explainWork = new ExplainWork();
explainWork.setConfig(new ExplainConfiguration());
ExplainTask explainTask = new ExplainTask();
explainTask.setWork(explainWork);
String explainOutput = "";
try {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
explainTask.outputPlan(tran.getBaseWork(), new PrintStream(outputStream), false, false, 0, null, this.jobConf.getBoolean(HiveConf.ConfVars.HIVE_IN_TEST.varname, HiveConf.ConfVars.HIVE_IN_TEST.defaultBoolVal));
explainOutput = StringUtils.abbreviate(tran.getName() + " Explain Plan:\n\n" + outputStream.toString(), 100000);
LOG.debug(explainOutput);
} catch (Exception e) {
LOG.error("Error while generating explain plan for " + tran.getName(), e);
}
perfLogger.perfLogEnd(CLASS_NAME, PerfLogger.SPARK_CREATE_EXPLAIN_PLAN + tran.getName());
return explainOutput;
}
return "";
}
Aggregations