use of models.AppHeuristicResult in project dr-elephant by linkedin.
the class Application method restJobGraphData.
/**
* The data for plotting the job history graph. While plotting the job history
* graph an ajax call is made to this to fetch the graph data.
*
* Data Returned:
* <pre>
* {@code
* [
* {
* "flowtime": <Last job's finish time>,
* "score": 1000,
* "stagescores": [
* {
* "stageid:" "id",
* "stagescore": 500
* },
* {
* "stageid:" "id",
* "stagescore": 500
* }
* ]
* },
* {
* "flowtime": <Last job's finish time>,
* "score": 700,
* "stagescores": [
* {
* "stageid:" "id",
* "stagescore": 0
* },
* {
* "stageid:" "id",
* "stagescore": 700
* }
* ]
* }
* ]
* }
* </pre>
*/
public static Result restJobGraphData(String jobDefId) {
JsonArray datasets = new JsonArray();
if (jobDefId == null || jobDefId.isEmpty()) {
return ok(new Gson().toJson(datasets));
}
// Fetch available flow executions with latest JOB_HISTORY_LIMIT mr jobs.
List<AppResult> results = getRestJobAppResults(jobDefId);
if (results.size() == 0) {
logger.info("No results for Job url");
}
Map<IdUrlPair, List<AppResult>> flowExecIdToJobsMap = ControllerUtil.limitHistoryResults(ControllerUtil.groupJobs(results, ControllerUtil.GroupBy.FLOW_EXECUTION_ID), results.size(), MAX_HISTORY_LIMIT);
// Compute the graph data starting from the earliest available execution to latest
List<IdUrlPair> keyList = new ArrayList<IdUrlPair>(flowExecIdToJobsMap.keySet());
for (int i = keyList.size() - 1; i >= 0; i--) {
IdUrlPair flowExecPair = keyList.get(i);
int jobPerfScore = 0;
JsonArray stageScores = new JsonArray();
List<AppResult> mrJobsList = Lists.reverse(flowExecIdToJobsMap.get(flowExecPair));
for (AppResult appResult : flowExecIdToJobsMap.get(flowExecPair)) {
// Each MR job triggered by jobDefId for flowExecId
int mrPerfScore = 0;
for (AppHeuristicResult appHeuristicResult : appResult.yarnAppHeuristicResults) {
mrPerfScore += appHeuristicResult.score;
}
// A particular mr stage
JsonObject stageScore = new JsonObject();
stageScore.addProperty("stageid", appResult.id);
stageScore.addProperty("stagescore", mrPerfScore);
stageScores.add(stageScore);
jobPerfScore += mrPerfScore;
}
// Execution record
JsonObject dataset = new JsonObject();
dataset.addProperty("flowtime", Utils.getFlowTime(mrJobsList.get(mrJobsList.size() - 1)));
dataset.addProperty("score", jobPerfScore);
dataset.add("stagescores", stageScores);
datasets.add(dataset);
}
JsonArray sortedDatasets = Utils.sortJsonArray(datasets);
return ok(new Gson().toJson(sortedDatasets));
}
use of models.AppHeuristicResult in project dr-elephant by linkedin.
the class Web method search.
/**
* Returns the search results for the given query
* @return
* JsonObject:
*
* <pre>
* {
* search-results: {
* id: "id"
* start: 0,
* end: 20,
* total: 0,
* summaries: [
* {
* application_summary_object
* }
* ]
* }
* }
* </pre>
*/
public static Result search() {
DynamicForm form = Form.form().bindFromRequest(request());
JsonObject parent = new JsonObject();
int offset = SEARCH_DEFAULT_PAGE_OFFSET;
int limit = SEARCH_DEFAULT_PAGE_LIMIT;
int end = 0;
int total = 0;
if (form.get("offset") != null && form.get("offset") != "") {
offset = Integer.valueOf(form.get("offset"));
}
if (form.get("limit") != null && form.get("limit") != "") {
limit = Integer.valueOf(form.get("limit"));
}
if (offset < 0) {
offset = 0;
}
if (limit > SEARCH_APPLICATION_MAX_OFFSET) {
limit = SEARCH_APPLICATION_MAX_OFFSET;
} else if (limit <= 0) {
return ok(new Gson().toJson(parent));
}
Query<AppResult> query = Application.generateSearchQuery(AppResult.getSearchFields(), Application.getSearchParams());
total = query.findRowCount();
if (offset > total) {
offset = total;
}
List<AppResult> results = query.setFirstRow(offset).setMaxRows(limit).fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS, AppHeuristicResult.getSearchFields()).findList();
end = offset + results.size();
JsonArray applicationSummaryArray = new JsonArray();
for (AppResult application : results) {
JsonObject applicationObject = new JsonObject();
JsonArray heuristicsArray = new JsonArray();
List<AppHeuristicResult> appHeuristicResult = application.yarnAppHeuristicResults;
for (AppHeuristicResult heuristic : appHeuristicResult) {
JsonObject heuristicObject = new JsonObject();
heuristicObject.addProperty(JsonKeys.NAME, heuristic.heuristicName);
heuristicObject.addProperty(JsonKeys.SEVERITY, heuristic.severity.getText());
heuristicsArray.add(heuristicObject);
}
applicationObject.addProperty(JsonKeys.ID, application.id);
applicationObject.addProperty(JsonKeys.USERNAME, application.username);
applicationObject.addProperty(JsonKeys.START_TIME, application.startTime);
applicationObject.addProperty(JsonKeys.FINISH_TIME, application.finishTime);
applicationObject.addProperty(JsonKeys.RUNTIME, application.finishTime - application.startTime);
applicationObject.addProperty(JsonKeys.WAITTIME, application.totalDelay);
applicationObject.addProperty(JsonKeys.RESOURCE_USED, application.resourceUsed);
applicationObject.addProperty(JsonKeys.RESOURCE_WASTED, application.resourceWasted);
applicationObject.addProperty(JsonKeys.SEVERITY, application.severity.getText());
applicationObject.addProperty(JsonKeys.QUEUE, application.queueName);
applicationObject.add(JsonKeys.HEURISTICS_SUMMARY, heuristicsArray);
applicationSummaryArray.add(applicationObject);
}
JsonObject searchResults = new JsonObject();
searchResults.addProperty(JsonKeys.ID, query.toString());
searchResults.addProperty(JsonKeys.START, offset);
searchResults.addProperty(JsonKeys.END, end);
searchResults.addProperty(JsonKeys.TOTAL, total);
searchResults.add(JsonKeys.SUMMARIES, applicationSummaryArray);
parent.add(JsonKeys.SEARCH_RESULTS, searchResults);
return ok(new Gson().toJson(parent));
}
use of models.AppHeuristicResult in project dr-elephant by linkedin.
the class AnalyticJob method getAnalysis.
/**
* Returns the analysed AppResult that could be directly serialized into DB.
*
* This method fetches the data using the appropriate application fetcher, runs all the heuristics on them and
* loads it into the AppResult model.
*
* @throws Exception if the analysis process encountered a problem.
* @return the analysed AppResult
*/
public AppResult getAnalysis() throws Exception {
ElephantFetcher fetcher = ElephantContext.instance().getFetcherForApplicationType(getAppType());
HadoopApplicationData data = fetcher.fetchData(this);
JobType jobType = ElephantContext.instance().matchJobType(data);
String jobTypeName = jobType == null ? UNKNOWN_JOB_TYPE : jobType.getName();
// Run all heuristics over the fetched data
List<HeuristicResult> analysisResults = new ArrayList<HeuristicResult>();
if (data == null || data.isEmpty()) {
// Example: a MR job has 0 mappers and 0 reducers
logger.info("No Data Received for analytic job: " + getAppId());
analysisResults.add(HeuristicResult.NO_DATA);
} else {
List<Heuristic> heuristics = ElephantContext.instance().getHeuristicsForApplicationType(getAppType());
for (Heuristic heuristic : heuristics) {
String confExcludedApps = heuristic.getHeuristicConfData().getParamMap().get(EXCLUDE_JOBTYPE);
if (confExcludedApps == null || confExcludedApps.length() == 0 || !Arrays.asList(confExcludedApps.split(",")).contains(jobTypeName)) {
HeuristicResult result = heuristic.apply(data);
if (result != null) {
analysisResults.add(result);
}
}
}
}
HadoopMetricsAggregator hadoopMetricsAggregator = ElephantContext.instance().getAggregatorForApplicationType(getAppType());
hadoopMetricsAggregator.aggregate(data);
HadoopAggregatedData hadoopAggregatedData = hadoopMetricsAggregator.getResult();
// Load app information
AppResult result = new AppResult();
result.id = Utils.truncateField(getAppId(), AppResult.ID_LIMIT, getAppId());
result.trackingUrl = Utils.truncateField(getTrackingUrl(), AppResult.TRACKING_URL_LIMIT, getAppId());
result.queueName = Utils.truncateField(getQueueName(), AppResult.QUEUE_NAME_LIMIT, getAppId());
result.username = Utils.truncateField(getUser(), AppResult.USERNAME_LIMIT, getAppId());
result.startTime = getStartTime();
result.finishTime = getFinishTime();
result.name = Utils.truncateField(getName(), AppResult.APP_NAME_LIMIT, getAppId());
result.jobType = Utils.truncateField(jobTypeName, AppResult.JOBTYPE_LIMIT, getAppId());
result.resourceUsed = hadoopAggregatedData.getResourceUsed();
result.totalDelay = hadoopAggregatedData.getTotalDelay();
result.resourceWasted = hadoopAggregatedData.getResourceWasted();
// Load App Heuristic information
int jobScore = 0;
result.yarnAppHeuristicResults = new ArrayList<AppHeuristicResult>();
Severity worstSeverity = Severity.NONE;
for (HeuristicResult heuristicResult : analysisResults) {
AppHeuristicResult detail = new AppHeuristicResult();
detail.heuristicClass = Utils.truncateField(heuristicResult.getHeuristicClassName(), AppHeuristicResult.HEURISTIC_CLASS_LIMIT, getAppId());
detail.heuristicName = Utils.truncateField(heuristicResult.getHeuristicName(), AppHeuristicResult.HEURISTIC_NAME_LIMIT, getAppId());
detail.severity = heuristicResult.getSeverity();
detail.score = heuristicResult.getScore();
// Load Heuristic Details
for (HeuristicResultDetails heuristicResultDetails : heuristicResult.getHeuristicResultDetails()) {
AppHeuristicResultDetails heuristicDetail = new AppHeuristicResultDetails();
heuristicDetail.yarnAppHeuristicResult = detail;
heuristicDetail.name = Utils.truncateField(heuristicResultDetails.getName(), AppHeuristicResultDetails.NAME_LIMIT, getAppId());
heuristicDetail.value = Utils.truncateField(heuristicResultDetails.getValue(), AppHeuristicResultDetails.VALUE_LIMIT, getAppId());
heuristicDetail.details = Utils.truncateField(heuristicResultDetails.getDetails(), AppHeuristicResultDetails.DETAILS_LIMIT, getAppId());
// This was added for AnalyticTest. Commenting this out to fix a bug. Also disabling AnalyticJobTest.
// detail.yarnAppHeuristicResultDetails = new ArrayList<AppHeuristicResultDetails>();
detail.yarnAppHeuristicResultDetails.add(heuristicDetail);
}
result.yarnAppHeuristicResults.add(detail);
worstSeverity = Severity.max(worstSeverity, detail.severity);
jobScore += detail.score;
}
result.severity = worstSeverity;
result.score = jobScore;
// Retrieve information from job configuration like scheduler information and store them into result.
InfoExtractor.loadInfo(result, data);
return result;
}
Aggregations