use of com.avaje.ebean.SqlRow in project dr-elephant by linkedin.
the class Web method restGetUsersSummaryStats.
/**
* The rest interface to return the results for a particular user. When the date is not specified, it returns the result
* for the last seven days.
* @return The json object of the form:
* result:
* * {
* "user-details": {
* "id": "user",
* "totalapplications": 3,
* "totaljobs": 3,
* "totalworkflows": 3,
* "resourceused": 101394532,
* "resourcewasted": 15999828,
* "runtime": 312283,
* "waittime": 46234,
* "start": 0,
* "end": 3,
* "total": 3,
* "summaries": [
* {
* "id": "application_12432132131",
* "username": "user",
* "starttime": 1477389986871,
* "finishtime": 1477390004463,
* "runtime": 17592,
* "waittime": 0,
* "resourceused": 12288,
* "resourcewasted": 6360,
* "severity": "Critical",
* "queue": "spark_default",
* "heuristicsummary": [
* {
* "name": "Spark Configuration Best Practice",
* "severity": "None"
* },
* {
* "name": "Spark Memory Limit",
* "severity": "None"
* },
* {
* "name": "Spark Stage Runtime",
* "severity": "Low"
* },
* {
* "name": "Spark Job Runtime",
* "severity": "Low"
* },
* {
* "name": "Spark Executor Load Balance",
* "severity": "Critical"
* },
* {
* "name": "Spark Event Log Limit",
* "severity": "None"
* }
* ]
* }
* }
* }
*/
public static Result restGetUsersSummaryStats() {
DynamicForm form = Form.form().bindFromRequest(request());
int offset = SEARCH_DEFAULT_PAGE_OFFSET;
int limit = SEARCH_DEFAULT_PAGE_LIMIT;
int end = 0;
int total = 0;
if (form.get("offset") != null && form.get("offset") != "") {
offset = Integer.valueOf(form.get("offset"));
}
if (form.get("limit") != null && form.get("limit") != "") {
limit = Integer.valueOf(form.get("limit"));
}
if (offset < 0) {
offset = 0;
}
if (limit > SEARCH_APPLICATION_MAX_OFFSET) {
limit = SEARCH_APPLICATION_MAX_OFFSET;
} else if (limit <= 0) {
return ok(new Gson().toJson(new JsonObject()));
}
String sortBy = "severity";
boolean increasing = true;
String usernameString = form.get("usernames");
if (usernameString == null || usernameString.isEmpty()) {
JsonObject parent = new JsonObject();
parent.add(JsonKeys.USER_RESULTS, new JsonObject());
return notFound(new Gson().toJson(parent));
}
List<String> usernames = Arrays.asList(usernameString.split(","));
Map<String, String> filterParamsForUserSummary = getFilterParamsForUserSummary();
if (form.get("sortKey") != null) {
sortBy = form.get("sortKey");
}
if (form.get("increasing") != null) {
increasing = Boolean.valueOf(form.get("increasing"));
}
JsonObject userResult = new JsonObject();
List<String> usernameQueryList = new ArrayList<String>();
for (int i = 0; i < usernames.size(); i++) {
usernameQueryList.add("username=:user" + i);
}
String usernameQueryString = StringUtils.join(usernameQueryList, " or ");
// by default, fetch data from last week
// week of data if not specified
String finishedTimeBegin = String.valueOf(System.currentTimeMillis() - DAY * 7);
String finishedTimeEnd = String.valueOf(System.currentTimeMillis());
if (Utils.isSet(filterParamsForUserSummary.get(Application.FINISHED_TIME_BEGIN))) {
finishedTimeBegin = filterParamsForUserSummary.get(Application.FINISHED_TIME_BEGIN);
}
if (Utils.isSet(filterParamsForUserSummary.get(Application.FINISHED_TIME_END))) {
finishedTimeEnd = filterParamsForUserSummary.get(Application.FINISHED_TIME_END);
}
StringBuilder timeFilterStringBuilder = new StringBuilder();
if (finishedTimeBegin != null) {
timeFilterStringBuilder.append("finish_time");
timeFilterStringBuilder.append(">=");
timeFilterStringBuilder.append(parseTime(String.valueOf(finishedTimeBegin)));
if (finishedTimeEnd != null) {
timeFilterStringBuilder.append(" and ");
}
}
if (finishedTimeEnd != null) {
timeFilterStringBuilder.append("finish_time");
timeFilterStringBuilder.append("<=");
timeFilterStringBuilder.append(parseTime(String.valueOf(finishedTimeEnd)));
}
String timeFilterString = timeFilterStringBuilder.toString();
String sql;
StringBuilder sqlBuilder = new StringBuilder();
sqlBuilder.append("select count(id) as num_of_applications, count(distinct(job_exec_id)) as num_of_jobs, count(distinct(flow_exec_id)) as num_of_flows, sum(resource_used) as total_resource_used, sum(resource_wasted) as total_resource_wasted, sum(finish_time) - sum(start_time) as execution_time, sum(total_delay) as total_delay from yarn_app_result where");
if (timeFilterString != null && !timeFilterString.isEmpty()) {
sqlBuilder.append(" ( ");
sqlBuilder.append(usernameQueryString);
sqlBuilder.append(" ) and ");
sqlBuilder.append(timeFilterString);
} else {
sqlBuilder.append(" ");
sqlBuilder.append(usernameQueryString);
}
sql = sqlBuilder.toString();
SqlQuery query = Ebean.createSqlQuery(sql);
int iUserIndex = 0;
for (String username : usernames) {
query.setParameter("user" + iUserIndex, username);
iUserIndex++;
}
SqlRow resultRow = query.findUnique();
userResult.addProperty(JsonKeys.ID, usernameString);
userResult.addProperty(JsonKeys.TOTAL_APPLICATIONS, resultRow.getLong("num_of_applications"));
userResult.addProperty(JsonKeys.TOTAL_JOBS, resultRow.getLong("num_of_jobs"));
userResult.addProperty(JsonKeys.TOTAL_WORKFLOWS, resultRow.getLong("num_of_flows"));
userResult.addProperty(JsonKeys.RESOURCE_USED, resultRow.getLong("total_resource_used"));
userResult.addProperty(JsonKeys.RESOURCE_WASTED, resultRow.getLong("total_resource_wasted"));
userResult.addProperty(JsonKeys.RUNTIME, resultRow.getLong("execution_time"));
userResult.addProperty(JsonKeys.WAITTIME, resultRow.getLong("total_delay"));
Query<AppResult> userSummaryQuery = generateUserApplicationSummaryQuery(usernames, filterParamsForUserSummary, sortBy, increasing);
total = userSummaryQuery.findRowCount();
List<AppResult> results = userSummaryQuery.setFirstRow(offset).setMaxRows(limit).fetch(AppResult.TABLE.APP_HEURISTIC_RESULTS, AppHeuristicResult.getSearchFields()).findList();
end = offset + results.size();
JsonArray applicationSummaryArray = new JsonArray();
for (AppResult application : results) {
JsonObject applicationObject = new JsonObject();
JsonArray heuristicsArray = new JsonArray();
List<AppHeuristicResult> appHeuristicResult = application.yarnAppHeuristicResults;
for (AppHeuristicResult heuristic : appHeuristicResult) {
JsonObject heuristicObject = new JsonObject();
heuristicObject.addProperty(JsonKeys.NAME, heuristic.heuristicName);
heuristicObject.addProperty(JsonKeys.SEVERITY, heuristic.severity.getText());
heuristicsArray.add(heuristicObject);
}
applicationObject.addProperty(JsonKeys.ID, application.id);
applicationObject.addProperty(JsonKeys.USERNAME, application.username);
applicationObject.addProperty(JsonKeys.START_TIME, application.startTime);
applicationObject.addProperty(JsonKeys.FINISH_TIME, application.finishTime);
applicationObject.addProperty(JsonKeys.RUNTIME, application.finishTime - application.startTime);
applicationObject.addProperty(JsonKeys.WAITTIME, application.totalDelay);
applicationObject.addProperty(JsonKeys.RESOURCE_USED, application.resourceUsed);
applicationObject.addProperty(JsonKeys.RESOURCE_WASTED, application.resourceWasted);
applicationObject.addProperty(JsonKeys.SEVERITY, application.severity.getText());
applicationObject.addProperty(JsonKeys.QUEUE, application.queueName);
applicationObject.add(JsonKeys.HEURISTICS_SUMMARY, heuristicsArray);
applicationSummaryArray.add(applicationObject);
}
userResult.addProperty(JsonKeys.START, offset);
userResult.addProperty(JsonKeys.END, end);
userResult.addProperty(JsonKeys.TOTAL, total);
userResult.add(JsonKeys.SUMMARIES, applicationSummaryArray);
JsonObject parent = new JsonObject();
parent.add(JsonKeys.USER_DETAILS, userResult);
return ok(new Gson().toJson(parent));
}
use of com.avaje.ebean.SqlRow in project dr-elephant by linkedin.
the class BaselineComputeUtil method updateBaselineForJob.
/**
* Adds baseline metric values for a job
* @param tuningJobDefinition Job for which baseline is to be added
*/
private void updateBaselineForJob(TuningJobDefinition tuningJobDefinition) {
logger.info("Computing and updating baseline metric values for job: " + tuningJobDefinition.job.jobName);
String sql = "SELECT AVG(resource_used) AS resource_used, AVG(execution_time) AS execution_time FROM " + "(SELECT job_exec_id, SUM(resource_used/(1024 * 3600)) AS resource_used, " + "SUM((finish_time - start_time - total_delay)/(1000 * 60)) AS execution_time, " + "MAX(start_time) AS start_time " + "FROM yarn_app_result WHERE job_def_id=:jobDefId " + "GROUP BY job_exec_id " + "ORDER BY start_time DESC " + "LIMIT :num) temp";
logger.debug("Running query for baseline computation " + sql);
SqlRow baseline = Ebean.createSqlQuery(sql).setParameter("jobDefId", tuningJobDefinition.job.jobDefId).setParameter("num", _numJobsForBaseline).findUnique();
Double avgResourceUsage = 0D;
Double avgExecutionTime = 0D;
avgResourceUsage = baseline.getDouble("resource_used");
avgExecutionTime = baseline.getDouble("execution_time");
tuningJobDefinition.averageExecutionTime = avgExecutionTime;
tuningJobDefinition.averageResourceUsage = avgResourceUsage;
tuningJobDefinition.averageInputSizeInBytes = getAvgInputSizeInBytes(tuningJobDefinition.job.jobDefId);
logger.debug("Baseline metric values: Average resource usage=" + avgResourceUsage + " and Average execution time=" + avgExecutionTime);
tuningJobDefinition.update();
logger.info("Updated baseline metric value for job: " + tuningJobDefinition.job.jobName);
}
use of com.avaje.ebean.SqlRow in project dr-elephant by linkedin.
the class BaselineComputeUtil method getAvgInputSizeInBytes.
/**
* Returns the average input size in bytes of a job (over last _numJobsForBaseline executions)
* @param jobDefId job definition id of the job
* @return average input size in bytes as long
*/
private Long getAvgInputSizeInBytes(String jobDefId) {
String sql = "SELECT AVG(inputSizeInBytes) as avgInputSizeInMB FROM " + "(SELECT job_exec_id, SUM(cast(value as decimal)) inputSizeInBytes, MAX(start_time) AS start_time " + "FROM yarn_app_result yar INNER JOIN yarn_app_heuristic_result yahr " + "ON yar.id=yahr.yarn_app_result_id " + "INNER JOIN yarn_app_heuristic_result_details yahrd " + "ON yahr.id=yahrd.yarn_app_heuristic_result_id " + "WHERE job_def_id=:jobDefId AND yahr.heuristic_name='" + CommonConstantsHeuristic.MAPPER_SPEED + "' " + "AND yahrd.name='Total input size in MB' " + "GROUP BY job_exec_id ORDER BY start_time DESC LIMIT :num ) temp";
logger.debug("Running query for average input size computation " + sql);
SqlRow baseline = Ebean.createSqlQuery(sql).setParameter("jobDefId", jobDefId).setParameter("num", _numJobsForBaseline).findUnique();
Double avgInputSizeInBytes = baseline.getDouble("avgInputSizeInMB") * FileUtils.ONE_MB;
return avgInputSizeInBytes.longValue();
}
Aggregations