use of com.linkedin.drelephant.mapreduce.data.MapReduceTaskData in project dr-elephant by linkedin.
the class ReducerSkewHeuristicTest method analyzeJobTime.
private Severity analyzeJobTime(int numSmallTasks, int numLongTasks, long smallTimeTaken, long longTimeTaken) throws IOException {
MapReduceTaskData[] reducers = new MapReduceTaskData[numSmallTasks + numLongTasks + 1];
int i = 0;
for (; i < numSmallTasks; i++) {
reducers[i] = new MapReduceTaskData("task-id-" + i, "task-attempt-id-" + i);
reducers[i].setTotalTimeMs(smallTimeTaken, true);
}
for (; i < numSmallTasks + numLongTasks; i++) {
reducers[i] = new MapReduceTaskData("task-id-" + i, "task-attempt-id-" + i);
reducers[i].setTotalTimeMs(longTimeTaken, true);
}
// Non-sampled task, which does not contain time data
reducers[i] = new MapReduceTaskData("task-id-" + i, "task-attempt-id-" + i);
MapReduceApplicationData data = new MapReduceApplicationData().setReducerData(reducers);
HeuristicResult result = _heuristic.apply(data);
return result.getSeverity();
}
use of com.linkedin.drelephant.mapreduce.data.MapReduceTaskData in project dr-elephant by linkedin.
the class GenericGCHeuristic method apply.
@Override
public HeuristicResult apply(MapReduceApplicationData data) {
if (!data.getSucceeded()) {
return null;
}
MapReduceTaskData[] tasks = getTasks(data);
List<Long> gcMs = new ArrayList<Long>();
List<Long> cpuMs = new ArrayList<Long>();
List<Long> runtimesMs = new ArrayList<Long>();
for (MapReduceTaskData task : tasks) {
if (task.isTimeAndCounterDataPresent()) {
runtimesMs.add(task.getTotalRunTimeMs());
gcMs.add(task.getCounters().get(MapReduceCounterData.CounterName.GC_MILLISECONDS));
cpuMs.add(task.getCounters().get(MapReduceCounterData.CounterName.CPU_MILLISECONDS));
}
}
long avgRuntimeMs = Statistics.average(runtimesMs);
long avgCpuMs = Statistics.average(cpuMs);
long avgGcMs = Statistics.average(gcMs);
double ratio = avgCpuMs != 0 ? avgGcMs * (1.0) / avgCpuMs : 0;
Severity severity;
if (tasks.length == 0) {
severity = Severity.NONE;
} else {
severity = getGcRatioSeverity(avgRuntimeMs, avgCpuMs, avgGcMs);
}
HeuristicResult result = new HeuristicResult(_heuristicConfData.getClassName(), _heuristicConfData.getHeuristicName(), severity, Utils.getHeuristicScore(severity, tasks.length));
result.addResultDetail("Number of tasks", Integer.toString(tasks.length));
result.addResultDetail("Avg task runtime (ms)", Long.toString(avgRuntimeMs));
result.addResultDetail("Avg task CPU time (ms)", Long.toString(avgCpuMs));
result.addResultDetail("Avg task GC time (ms)", Long.toString(avgGcMs));
result.addResultDetail("Task GC/CPU ratio", Double.toString(ratio));
return result;
}
use of com.linkedin.drelephant.mapreduce.data.MapReduceTaskData in project dr-elephant by linkedin.
the class GenericMemoryHeuristic method apply.
@Override
public HeuristicResult apply(MapReduceApplicationData data) {
if (!data.getSucceeded()) {
return null;
}
String containerSizeStr = data.getConf().getProperty(_containerMemConf);
long containerMem = -1L;
if (containerSizeStr != null) {
try {
containerMem = Long.parseLong(containerSizeStr);
} catch (NumberFormatException e0) {
// Some job has a string var like "${VAR}" for this config.
if (containerSizeStr.startsWith("$")) {
String realContainerConf = containerSizeStr.substring(containerSizeStr.indexOf("{") + 1, containerSizeStr.indexOf("}"));
String realContainerSizeStr = data.getConf().getProperty(realContainerConf);
try {
containerMem = Long.parseLong(realContainerSizeStr);
} catch (NumberFormatException e1) {
logger.warn(realContainerConf + ": expected number [" + realContainerSizeStr + "]");
}
} else {
logger.warn(_containerMemConf + ": expected number [" + containerSizeStr + "]");
}
}
}
if (containerMem < 0) {
containerMem = getContainerMemDefaultMBytes();
}
containerMem *= FileUtils.ONE_MB;
MapReduceTaskData[] tasks = getTasks(data);
List<Long> taskPMems = new ArrayList<Long>();
List<Long> taskVMems = new ArrayList<Long>();
List<Long> runtimesMs = new ArrayList<Long>();
long taskPMin = Long.MAX_VALUE;
long taskPMax = 0;
for (MapReduceTaskData task : tasks) {
if (task.isTimeAndCounterDataPresent()) {
runtimesMs.add(task.getTotalRunTimeMs());
long taskPMem = task.getCounters().get(MapReduceCounterData.CounterName.PHYSICAL_MEMORY_BYTES);
long taskVMem = task.getCounters().get(MapReduceCounterData.CounterName.VIRTUAL_MEMORY_BYTES);
taskPMems.add(taskPMem);
taskPMin = Math.min(taskPMin, taskPMem);
taskPMax = Math.max(taskPMax, taskPMem);
taskVMems.add(taskVMem);
}
}
if (taskPMin == Long.MAX_VALUE) {
taskPMin = 0;
}
long taskPMemAvg = Statistics.average(taskPMems);
long taskVMemAvg = Statistics.average(taskVMems);
long averageTimeMs = Statistics.average(runtimesMs);
Severity severity;
if (tasks.length == 0) {
severity = Severity.NONE;
} else {
severity = getTaskMemoryUtilSeverity(taskPMemAvg, containerMem);
}
HeuristicResult result = new HeuristicResult(_heuristicConfData.getClassName(), _heuristicConfData.getHeuristicName(), severity, Utils.getHeuristicScore(severity, tasks.length));
result.addResultDetail("Number of tasks", Integer.toString(tasks.length));
result.addResultDetail("Avg task runtime", Statistics.readableTimespan(averageTimeMs));
result.addResultDetail("Avg Physical Memory (MB)", Long.toString(taskPMemAvg / FileUtils.ONE_MB));
result.addResultDetail("Max Physical Memory (MB)", Long.toString(taskPMax / FileUtils.ONE_MB));
result.addResultDetail("Min Physical Memory (MB)", Long.toString(taskPMin / FileUtils.ONE_MB));
result.addResultDetail("Avg Virtual Memory (MB)", Long.toString(taskVMemAvg / FileUtils.ONE_MB));
result.addResultDetail("Requested Container Memory", FileUtils.byteCountToDisplaySize(containerMem));
return result;
}
use of com.linkedin.drelephant.mapreduce.data.MapReduceTaskData in project dr-elephant by linkedin.
the class JobQueueLimitHeuristic method apply.
@Override
public HeuristicResult apply(MapReduceApplicationData data) {
HeuristicResult result = new HeuristicResult(_heuristicConfData.getClassName(), _heuristicConfData.getHeuristicName(), Severity.NONE, 0);
Properties jobConf = data.getConf();
long queueTimeoutLimitMs = TimeUnit.MINUTES.toMillis(15);
// Fetch the Queue to which the job is submitted.
String queueName = jobConf.getProperty("mapred.job.queue.name");
if (queueName == null) {
throw new IllegalStateException("Queue Name not found.");
}
// Compute severity if job is submitted to default queue else set severity to NONE.
MapReduceTaskData[] mapTasks = data.getMapperData();
MapReduceTaskData[] redTasks = data.getReducerData();
Severity[] mapTasksSeverity = new Severity[mapTasks.length];
Severity[] redTasksSeverity = new Severity[redTasks.length];
if (queueName.equals("default")) {
result.addResultDetail("Queue: ", queueName, null);
result.addResultDetail("Number of Map tasks", Integer.toString(mapTasks.length));
result.addResultDetail("Number of Reduce tasks", Integer.toString(redTasks.length));
// Calculate Severity of Mappers
mapTasksSeverity = getTasksSeverity(mapTasks, queueTimeoutLimitMs);
result.addResultDetail("Number of Map tasks that are in severe state (14 to 14.5 min)", Long.toString(getSeverityFrequency(Severity.SEVERE, mapTasksSeverity)));
result.addResultDetail("Number of Map tasks that are in critical state (over 14.5 min)", Long.toString(getSeverityFrequency(Severity.CRITICAL, mapTasksSeverity)));
// Calculate Severity of Reducers
redTasksSeverity = getTasksSeverity(redTasks, queueTimeoutLimitMs);
result.addResultDetail("Number of Reduce tasks that are in severe state (14 to 14.5 min)", Long.toString(getSeverityFrequency(Severity.SEVERE, redTasksSeverity)));
result.addResultDetail("Number of Reduce tasks that are in critical state (over 14.5 min)", Long.toString(getSeverityFrequency(Severity.CRITICAL, redTasksSeverity)));
// Calculate Job severity
result.setSeverity(Severity.max(Severity.max(mapTasksSeverity), Severity.max(redTasksSeverity)));
} else {
result.addResultDetail("Not Applicable", "This Heuristic is not applicable to " + queueName + " queue");
result.setSeverity(Severity.NONE);
}
return result;
}
use of com.linkedin.drelephant.mapreduce.data.MapReduceTaskData in project dr-elephant by linkedin.
the class ReducerTimeHeuristic method apply.
@Override
public HeuristicResult apply(MapReduceApplicationData data) {
if (!data.getSucceeded()) {
return null;
}
MapReduceTaskData[] tasks = data.getReducerData();
List<Long> runTimesMs = new ArrayList<Long>();
long taskMinMs = Long.MAX_VALUE;
long taskMaxMs = 0;
for (MapReduceTaskData task : tasks) {
if (task.isTimeDataPresent()) {
long taskTime = task.getTotalRunTimeMs();
runTimesMs.add(taskTime);
taskMinMs = Math.min(taskMinMs, taskTime);
taskMaxMs = Math.max(taskMaxMs, taskTime);
}
}
if (taskMinMs == Long.MAX_VALUE) {
taskMinMs = 0;
}
// Analyze data
long averageRuntimeMs = Statistics.average(runTimesMs);
Severity shortTimeSeverity = shortTimeSeverity(averageRuntimeMs, tasks.length);
Severity longTimeSeverity = longTimeSeverity(averageRuntimeMs, tasks.length);
Severity severity = Severity.max(shortTimeSeverity, longTimeSeverity);
HeuristicResult result = new HeuristicResult(_heuristicConfData.getClassName(), _heuristicConfData.getHeuristicName(), severity, Utils.getHeuristicScore(severity, tasks.length));
result.addResultDetail("Number of tasks", Integer.toString(tasks.length));
result.addResultDetail("Average task runtime", Statistics.readableTimespan(averageRuntimeMs));
result.addResultDetail("Max task runtime", Statistics.readableTimespan(taskMaxMs));
result.addResultDetail("Min task runtime", Statistics.readableTimespan(taskMinMs));
return result;
}
Aggregations