use of play.data.DynamicForm in project dr-elephant by linkedin.
the class Application method getJobHistory.
/**
* Returns the job history. Returns at max MAX_HISTORY_LIMIT executions.
*
* @param version The version of job history to return
* @return The job history page based on the version.
*/
private static Result getJobHistory(Version version) {
DynamicForm form = Form.form().bindFromRequest(request());
String partialJobDefId = form.get(JOB_DEF_ID);
partialJobDefId = (partialJobDefId != null) ? partialJobDefId.trim() : null;
boolean hasSparkJob = false;
// get the graph type
String graphType = form.get("select-graph-type");
if (graphType == null) {
graphType = "resources";
}
if (!Utils.isSet(partialJobDefId)) {
if (version.equals(Version.NEW)) {
return ok(jobHistoryPage.render(partialJobDefId, graphType, jobHistoryResults.render(null, null, -1, null)));
} else {
return ok(oldJobHistoryPage.render(partialJobDefId, graphType, oldJobHistoryResults.render(null, null, -1, null)));
}
}
IdUrlPair jobDefPair = bestSchedulerInfoMatchGivenPartialId(partialJobDefId, AppResult.TABLE.JOB_DEF_ID);
List<AppResult> results;
if (graphType.equals("time") || graphType.equals("resources")) {
// we don't need APP_HEURISTIC_RESULT_DETAILS data to plot for time and resources
results = AppResult.find.select(AppResult.getSearchFields() + "," + AppResult.TABLE.FLOW_EXEC_ID + "," + AppResult.TABLE.FLOW_EXEC_URL).where().eq(AppResult.TABLE.JOB_DEF_ID, jobDefPair.getId()).order().desc(AppResult.TABLE.FINISH_TIME).setMaxRows(JOB_HISTORY_LIMIT).findList();
} else {
// Fetch all job executions
results = AppResult.find.select(AppResult.getSearchFields() + "," + AppResult.TABLE.FLOW_EXEC_ID + "," + AppResult.TABLE.FLOW_EXEC_URL).where().eq(AppResult.TABLE.JOB_DEF_ID, jobDefPair.getId()).order().desc(AppResult.TABLE.FINISH_TIME).setMaxRows(JOB_HISTORY_LIMIT).fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS, "*").fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS + "." + AppHeuristicResult.TABLE.APP_HEURISTIC_RESULT_DETAILS, "*").findList();
}
for (AppResult result : results) {
if (result.jobType.equals("Spark")) {
hasSparkJob = true;
}
}
if (results.size() == 0) {
return notFound("Unable to find record for job def id: " + jobDefPair.getId());
}
Map<IdUrlPair, List<AppResult>> flowExecIdToJobsMap = ControllerUtil.limitHistoryResults(ControllerUtil.groupJobs(results, ControllerUtil.GroupBy.FLOW_EXECUTION_ID), results.size(), MAX_HISTORY_LIMIT);
// Compute job execution data
List<Long> flowExecTimeList = new ArrayList<Long>();
int maxStages = 0;
Map<IdUrlPair, List<AppResult>> executionMap = new LinkedHashMap<IdUrlPair, List<AppResult>>();
for (Map.Entry<IdUrlPair, List<AppResult>> entry : flowExecIdToJobsMap.entrySet()) {
// Reverse the list content from desc order of finish time to increasing order so that when grouping we get
// the job list in the order of completion.
List<AppResult> mrJobsList = Lists.reverse(entry.getValue());
// Get the finish time of the last mr job that completed in current flow.
flowExecTimeList.add(mrJobsList.get(mrJobsList.size() - 1).finishTime);
// Find the maximum number of mr stages for any job execution
int stageSize = flowExecIdToJobsMap.get(entry.getKey()).size();
if (stageSize > maxStages) {
maxStages = stageSize;
}
executionMap.put(entry.getKey(), Lists.reverse(flowExecIdToJobsMap.get(entry.getKey())));
}
if (maxStages > STAGE_LIMIT) {
maxStages = STAGE_LIMIT;
}
if (version.equals(Version.NEW)) {
if (graphType.equals("heuristics")) {
return ok(jobHistoryPage.render(jobDefPair.getId(), graphType, jobHistoryResults.render(jobDefPair, executionMap, maxStages, flowExecTimeList)));
} else if (graphType.equals("resources") || graphType.equals("time")) {
return ok(jobHistoryPage.render(jobDefPair.getId(), graphType, jobMetricsHistoryResults.render(jobDefPair, graphType, executionMap, maxStages, flowExecTimeList)));
}
} else {
if (graphType.equals("heuristics")) {
return ok(oldJobHistoryPage.render(jobDefPair.getId(), graphType, oldJobHistoryResults.render(jobDefPair, executionMap, maxStages, flowExecTimeList)));
} else if (graphType.equals("resources") || graphType.equals("time")) {
if (hasSparkJob) {
return notFound("Resource and time graph are not supported for spark right now");
} else {
return ok(oldJobHistoryPage.render(jobDefPair.getId(), graphType, oldJobMetricsHistoryResults.render(jobDefPair, graphType, executionMap, maxStages, flowExecTimeList)));
}
}
}
return notFound("Unable to find graph type: " + graphType);
}
use of play.data.DynamicForm in project dr-elephant by linkedin.
the class Application method getSearchParams.
public static Map<String, String> getSearchParams() {
Map<String, String> searchParams = new HashMap<String, String>();
DynamicForm form = Form.form().bindFromRequest(request());
String username = form.get(USERNAME);
username = username != null ? username.trim().toLowerCase() : null;
searchParams.put(USERNAME, username);
String queuename = form.get(QUEUE_NAME);
queuename = queuename != null ? queuename.trim().toLowerCase() : null;
searchParams.put(QUEUE_NAME, queuename);
searchParams.put(SEVERITY, form.get(SEVERITY));
searchParams.put(JOB_TYPE, form.get(JOB_TYPE));
searchParams.put(ANALYSIS, form.get(ANALYSIS));
searchParams.put(FINISHED_TIME_BEGIN, form.get(FINISHED_TIME_BEGIN));
searchParams.put(FINISHED_TIME_END, form.get(FINISHED_TIME_END));
searchParams.put(STARTED_TIME_BEGIN, form.get(STARTED_TIME_BEGIN));
searchParams.put(STARTED_TIME_END, form.get(STARTED_TIME_END));
return searchParams;
}
use of play.data.DynamicForm in project dr-elephant by linkedin.
the class Application method getFlowHistory.
/**
* Returns the flowHistory based on the version provided
*
* @param version Can be either new or old
* @return The flowhistory page based on the version provided
*/
private static Result getFlowHistory(Version version) {
DynamicForm form = Form.form().bindFromRequest(request());
String partialFlowDefId = form.get(FLOW_DEF_ID);
partialFlowDefId = (partialFlowDefId != null) ? partialFlowDefId.trim() : null;
boolean hasSparkJob = false;
String graphType = form.get("select-graph-type");
// get the graph type
if (graphType == null) {
graphType = "resources";
}
if (!Utils.isSet(partialFlowDefId)) {
if (version.equals(Version.NEW)) {
return ok(flowHistoryPage.render(partialFlowDefId, graphType, flowHistoryResults.render(null, null, null, null)));
} else {
return ok(oldFlowHistoryPage.render(partialFlowDefId, graphType, oldFlowHistoryResults.render(null, null, null, null)));
}
}
IdUrlPair flowDefPair = bestSchedulerInfoMatchGivenPartialId(partialFlowDefId, AppResult.TABLE.FLOW_DEF_ID);
List<AppResult> results;
if (graphType.equals("time") || graphType.equals("resources")) {
// if graph type is time or resources, we don't need the result from APP_HEURISTIC_RESULTS
results = AppResult.find.select(AppResult.getSearchFields() + "," + AppResult.TABLE.FLOW_EXEC_ID + "," + AppResult.TABLE.FLOW_EXEC_URL + "," + AppResult.TABLE.JOB_DEF_ID + "," + AppResult.TABLE.JOB_DEF_URL + "," + AppResult.TABLE.JOB_NAME).where().eq(AppResult.TABLE.FLOW_DEF_ID, flowDefPair.getId()).order().desc(AppResult.TABLE.FINISH_TIME).setMaxRows(JOB_HISTORY_LIMIT).findList();
} else {
// Fetch available flow executions with latest JOB_HISTORY_LIMIT mr jobs.
results = AppResult.find.select(AppResult.getSearchFields() + "," + AppResult.TABLE.FLOW_EXEC_ID + "," + AppResult.TABLE.FLOW_EXEC_URL + "," + AppResult.TABLE.JOB_DEF_ID + "," + AppResult.TABLE.JOB_DEF_URL + "," + AppResult.TABLE.JOB_NAME).where().eq(AppResult.TABLE.FLOW_DEF_ID, flowDefPair.getId()).order().desc(AppResult.TABLE.FINISH_TIME).setMaxRows(JOB_HISTORY_LIMIT).fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS, AppHeuristicResult.getSearchFields()).findList();
}
if (results.size() == 0) {
return notFound("Unable to find record for flow def id: " + flowDefPair.getId());
}
for (AppResult result : results) {
if (result.jobType.equals("Spark")) {
hasSparkJob = true;
}
}
Map<IdUrlPair, List<AppResult>> flowExecIdToJobsMap = ControllerUtil.limitHistoryResults(ControllerUtil.groupJobs(results, ControllerUtil.GroupBy.FLOW_EXECUTION_ID), results.size(), MAX_HISTORY_LIMIT);
// Compute flow execution data
// All jobs starting from latest execution
List<AppResult> filteredResults = new ArrayList<AppResult>();
// To map executions to resp execution time
List<Long> flowExecTimeList = new ArrayList<Long>();
Map<IdUrlPair, Map<IdUrlPair, List<AppResult>>> executionMap = new LinkedHashMap<IdUrlPair, Map<IdUrlPair, List<AppResult>>>();
for (Map.Entry<IdUrlPair, List<AppResult>> entry : flowExecIdToJobsMap.entrySet()) {
// Reverse the list content from desc order of finish time to increasing order so that when grouping we get
// the job list in the order of completion.
List<AppResult> mrJobsList = Lists.reverse(entry.getValue());
// Flow exec time is the finish time of the last mr job in the flow
flowExecTimeList.add(mrJobsList.get(mrJobsList.size() - 1).finishTime);
filteredResults.addAll(mrJobsList);
executionMap.put(entry.getKey(), ControllerUtil.groupJobs(mrJobsList, ControllerUtil.GroupBy.JOB_DEFINITION_ID));
}
// Calculate unique list of jobs (job def url) to maintain order across executions. List will contain job def urls
// from latest execution first followed by any other extra job def url that may appear in previous executions.
final Map<IdUrlPair, String> idPairToJobNameMap = new ListOrderedMap();
Map<IdUrlPair, List<AppResult>> filteredTempMap = ControllerUtil.groupJobs(filteredResults, ControllerUtil.GroupBy.JOB_DEFINITION_ID);
List<Map.Entry<IdUrlPair, List<AppResult>>> filteredMapList = new LinkedList<Map.Entry<IdUrlPair, List<AppResult>>>(filteredTempMap.entrySet());
Collections.sort(filteredMapList, new Comparator<Map.Entry<IdUrlPair, List<AppResult>>>() {
@Override
public int compare(Map.Entry<IdUrlPair, List<AppResult>> idUrlPairListMap, Map.Entry<IdUrlPair, List<AppResult>> t1) {
return (new Long(idUrlPairListMap.getValue().get(0).finishTime)).compareTo(t1.getValue().get(0).finishTime);
}
});
for (Map.Entry<IdUrlPair, List<AppResult>> entry : filteredMapList) {
idPairToJobNameMap.put(entry.getKey(), entry.getValue().get(0).jobName);
}
if (version.equals(Version.NEW)) {
if (graphType.equals("heuristics")) {
return ok(flowHistoryPage.render(flowDefPair.getId(), graphType, flowHistoryResults.render(flowDefPair, executionMap, idPairToJobNameMap, flowExecTimeList)));
} else if (graphType.equals("resources") || graphType.equals("time")) {
return ok(flowHistoryPage.render(flowDefPair.getId(), graphType, flowMetricsHistoryResults.render(flowDefPair, graphType, executionMap, idPairToJobNameMap, flowExecTimeList)));
}
} else {
if (graphType.equals("heuristics")) {
return ok(oldFlowHistoryPage.render(flowDefPair.getId(), graphType, oldFlowHistoryResults.render(flowDefPair, executionMap, idPairToJobNameMap, flowExecTimeList)));
} else if (graphType.equals("resources") || graphType.equals("time")) {
if (hasSparkJob) {
return notFound("Cannot plot graph for " + graphType + " since it contains a spark job. " + graphType + " graphs are not supported for spark right now");
} else {
return ok(oldFlowHistoryPage.render(flowDefPair.getId(), graphType, oldFlowMetricsHistoryResults.render(flowDefPair, graphType, executionMap, idPairToJobNameMap, flowExecTimeList)));
}
}
}
return notFound("Unable to find graph type: " + graphType);
}
use of play.data.DynamicForm in project dr-elephant by linkedin.
the class Application method getHelp.
/**
* Returns the help based on the version
*
* @param version The version for which help page has to be returned
* @return The help page based on the version
*/
private static Result getHelp(Version version) {
DynamicForm form = Form.form().bindFromRequest(request());
String topic = form.get("topic");
Html page = null;
String title = "Help";
if (topic != null && !topic.isEmpty()) {
// check if it is a heuristic help
page = ElephantContext.instance().getHeuristicToView().get(topic);
// check if it is a metrics help
if (page == null) {
page = getMetricsNameView().get(topic);
}
if (page != null) {
title = topic;
}
}
if (version.equals(Version.NEW)) {
return ok(helpPage.render(title, page));
}
return ok(oldHelpPage.render(title, page));
}
use of play.data.DynamicForm in project dr-elephant by linkedin.
the class Web method restGetUsersSummaryStats.
/**
* The rest interface to return the results for a particular user. When the date is not specified, it returns the result
* for the last seven days.
* @return The json object of the form:
* result:
* * {
* "user-details": {
* "id": "user",
* "totalapplications": 3,
* "totaljobs": 3,
* "totalworkflows": 3,
* "resourceused": 101394532,
* "resourcewasted": 15999828,
* "runtime": 312283,
* "waittime": 46234,
* "start": 0,
* "end": 3,
* "total": 3,
* "summaries": [
* {
* "id": "application_12432132131",
* "username": "user",
* "starttime": 1477389986871,
* "finishtime": 1477390004463,
* "runtime": 17592,
* "waittime": 0,
* "resourceused": 12288,
* "resourcewasted": 6360,
* "severity": "Critical",
* "queue": "spark_default",
* "heuristicsummary": [
* {
* "name": "Spark Configuration Best Practice",
* "severity": "None"
* },
* {
* "name": "Spark Memory Limit",
* "severity": "None"
* },
* {
* "name": "Spark Stage Runtime",
* "severity": "Low"
* },
* {
* "name": "Spark Job Runtime",
* "severity": "Low"
* },
* {
* "name": "Spark Executor Load Balance",
* "severity": "Critical"
* },
* {
* "name": "Spark Event Log Limit",
* "severity": "None"
* }
* ]
* }
* }
* }
*/
public static Result restGetUsersSummaryStats() {
DynamicForm form = Form.form().bindFromRequest(request());
int offset = SEARCH_DEFAULT_PAGE_OFFSET;
int limit = SEARCH_DEFAULT_PAGE_LIMIT;
int end = 0;
int total = 0;
if (form.get("offset") != null && form.get("offset") != "") {
offset = Integer.valueOf(form.get("offset"));
}
if (form.get("limit") != null && form.get("limit") != "") {
limit = Integer.valueOf(form.get("limit"));
}
if (offset < 0) {
offset = 0;
}
if (limit > SEARCH_APPLICATION_MAX_OFFSET) {
limit = SEARCH_APPLICATION_MAX_OFFSET;
} else if (limit <= 0) {
return ok(new Gson().toJson(new JsonObject()));
}
String sortBy = "severity";
boolean increasing = true;
String usernameString = form.get("usernames");
if (usernameString == null || usernameString.isEmpty()) {
JsonObject parent = new JsonObject();
parent.add(JsonKeys.USER_RESULTS, new JsonObject());
return notFound(new Gson().toJson(parent));
}
List<String> usernames = Arrays.asList(usernameString.split(","));
Map<String, String> filterParamsForUserSummary = getFilterParamsForUserSummary();
if (form.get("sortKey") != null) {
sortBy = form.get("sortKey");
}
if (form.get("increasing") != null) {
increasing = Boolean.valueOf(form.get("increasing"));
}
JsonObject userResult = new JsonObject();
List<String> usernameQueryList = new ArrayList<String>();
for (int i = 0; i < usernames.size(); i++) {
usernameQueryList.add("username=:user" + i);
}
String usernameQueryString = StringUtils.join(usernameQueryList, " or ");
// by default, fetch data from last week
// week of data if not specified
String finishedTimeBegin = String.valueOf(System.currentTimeMillis() - DAY * 7);
String finishedTimeEnd = String.valueOf(System.currentTimeMillis());
if (Utils.isSet(filterParamsForUserSummary.get(Application.FINISHED_TIME_BEGIN))) {
finishedTimeBegin = filterParamsForUserSummary.get(Application.FINISHED_TIME_BEGIN);
}
if (Utils.isSet(filterParamsForUserSummary.get(Application.FINISHED_TIME_END))) {
finishedTimeEnd = filterParamsForUserSummary.get(Application.FINISHED_TIME_END);
}
StringBuilder timeFilterStringBuilder = new StringBuilder();
if (finishedTimeBegin != null) {
timeFilterStringBuilder.append("finish_time");
timeFilterStringBuilder.append(">=");
timeFilterStringBuilder.append(parseTime(String.valueOf(finishedTimeBegin)));
if (finishedTimeEnd != null) {
timeFilterStringBuilder.append(" and ");
}
}
if (finishedTimeEnd != null) {
timeFilterStringBuilder.append("finish_time");
timeFilterStringBuilder.append("<=");
timeFilterStringBuilder.append(parseTime(String.valueOf(finishedTimeEnd)));
}
String timeFilterString = timeFilterStringBuilder.toString();
String sql;
StringBuilder sqlBuilder = new StringBuilder();
sqlBuilder.append("select count(id) as num_of_applications, count(distinct(job_exec_id)) as num_of_jobs, count(distinct(flow_exec_id)) as num_of_flows, sum(resource_used) as total_resource_used, sum(resource_wasted) as total_resource_wasted, sum(finish_time) - sum(start_time) as execution_time, sum(total_delay) as total_delay from yarn_app_result where");
if (timeFilterString != null && !timeFilterString.isEmpty()) {
sqlBuilder.append(" ( ");
sqlBuilder.append(usernameQueryString);
sqlBuilder.append(" ) and ");
sqlBuilder.append(timeFilterString);
} else {
sqlBuilder.append(" ");
sqlBuilder.append(usernameQueryString);
}
sql = sqlBuilder.toString();
SqlQuery query = Ebean.createSqlQuery(sql);
int iUserIndex = 0;
for (String username : usernames) {
query.setParameter("user" + iUserIndex, username);
iUserIndex++;
}
SqlRow resultRow = query.findUnique();
userResult.addProperty(JsonKeys.ID, usernameString);
userResult.addProperty(JsonKeys.TOTAL_APPLICATIONS, resultRow.getLong("num_of_applications"));
userResult.addProperty(JsonKeys.TOTAL_JOBS, resultRow.getLong("num_of_jobs"));
userResult.addProperty(JsonKeys.TOTAL_WORKFLOWS, resultRow.getLong("num_of_flows"));
userResult.addProperty(JsonKeys.RESOURCE_USED, resultRow.getLong("total_resource_used"));
userResult.addProperty(JsonKeys.RESOURCE_WASTED, resultRow.getLong("total_resource_wasted"));
userResult.addProperty(JsonKeys.RUNTIME, resultRow.getLong("execution_time"));
userResult.addProperty(JsonKeys.WAITTIME, resultRow.getLong("total_delay"));
Query<AppResult> userSummaryQuery = generateUserApplicationSummaryQuery(usernames, filterParamsForUserSummary, sortBy, increasing);
total = userSummaryQuery.findRowCount();
List<AppResult> results = userSummaryQuery.setFirstRow(offset).setMaxRows(limit).fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS, AppHeuristicResult.getSearchFields()).findList();
end = offset + results.size();
JsonArray applicationSummaryArray = new JsonArray();
for (AppResult application : results) {
JsonObject applicationObject = new JsonObject();
JsonArray heuristicsArray = new JsonArray();
List<AppHeuristicResult> appHeuristicResult = application.yarnAppHeuristicResults;
for (AppHeuristicResult heuristic : appHeuristicResult) {
JsonObject heuristicObject = new JsonObject();
heuristicObject.addProperty(JsonKeys.NAME, heuristic.heuristicName);
heuristicObject.addProperty(JsonKeys.SEVERITY, heuristic.severity.getText());
heuristicsArray.add(heuristicObject);
}
applicationObject.addProperty(JsonKeys.ID, application.id);
applicationObject.addProperty(JsonKeys.USERNAME, application.username);
applicationObject.addProperty(JsonKeys.START_TIME, application.startTime);
applicationObject.addProperty(JsonKeys.FINISH_TIME, application.finishTime);
applicationObject.addProperty(JsonKeys.RUNTIME, application.finishTime - application.startTime);
applicationObject.addProperty(JsonKeys.WAITTIME, application.totalDelay);
applicationObject.addProperty(JsonKeys.RESOURCE_USED, application.resourceUsed);
applicationObject.addProperty(JsonKeys.RESOURCE_WASTED, application.resourceWasted);
applicationObject.addProperty(JsonKeys.SEVERITY, application.severity.getText());
applicationObject.addProperty(JsonKeys.QUEUE, application.queueName);
applicationObject.add(JsonKeys.HEURISTICS_SUMMARY, heuristicsArray);
applicationSummaryArray.add(applicationObject);
}
userResult.addProperty(JsonKeys.START, offset);
userResult.addProperty(JsonKeys.END, end);
userResult.addProperty(JsonKeys.TOTAL, total);
userResult.add(JsonKeys.SUMMARIES, applicationSummaryArray);
JsonObject parent = new JsonObject();
parent.add(JsonKeys.USER_DETAILS, userResult);
return ok(new Gson().toJson(parent));
}
Aggregations