use of com.linkedin.drelephant.analysis.Severity in project dr-elephant by linkedin.
the class MapperTimeHeuristic method apply.
@Override
public HeuristicResult apply(MapReduceApplicationData data) {
if (!data.getSucceeded()) {
return null;
}
MapReduceTaskData[] tasks = data.getMapperData();
List<Long> inputBytes = new ArrayList<Long>();
List<Long> runtimesMs = new ArrayList<Long>();
long taskMinMs = Long.MAX_VALUE;
long taskMaxMs = 0;
for (MapReduceTaskData task : tasks) {
if (task.isTimeAndCounterDataPresent()) {
long inputByte = 0;
for (MapReduceCounterData.CounterName counterName : _counterNames) {
inputByte += task.getCounters().get(counterName);
}
inputBytes.add(inputByte);
long taskTime = task.getTotalRunTimeMs();
runtimesMs.add(taskTime);
taskMinMs = Math.min(taskMinMs, taskTime);
taskMaxMs = Math.max(taskMaxMs, taskTime);
}
}
if (taskMinMs == Long.MAX_VALUE) {
taskMinMs = 0;
}
long averageSize = Statistics.average(inputBytes);
long averageTimeMs = Statistics.average(runtimesMs);
Severity shortTaskSeverity = shortTaskSeverity(tasks.length, averageTimeMs);
Severity longTaskSeverity = longTaskSeverity(tasks.length, averageTimeMs);
Severity severity = Severity.max(shortTaskSeverity, longTaskSeverity);
HeuristicResult result = new HeuristicResult(_heuristicConfData.getClassName(), _heuristicConfData.getHeuristicName(), severity, Utils.getHeuristicScore(severity, tasks.length));
result.addResultDetail("Number of tasks", Integer.toString(tasks.length));
result.addResultDetail("Average task input size", FileUtils.byteCountToDisplaySize(averageSize));
result.addResultDetail("Average task runtime", Statistics.readableTimespan(averageTimeMs));
result.addResultDetail("Max task runtime", Statistics.readableTimespan(taskMaxMs));
result.addResultDetail("Min task runtime", Statistics.readableTimespan(taskMinMs));
return result;
}
use of com.linkedin.drelephant.analysis.Severity in project dr-elephant by linkedin.
the class GenericGCHeuristic method apply.
public HeuristicResult apply(TezApplicationData data) {
if (!data.getSucceeded()) {
return null;
}
TezTaskData[] tasks = getTasks(data);
List<Long> gcMs = new ArrayList<Long>();
List<Long> cpuMs = new ArrayList<Long>();
List<Long> runtimesMs = new ArrayList<Long>();
for (TezTaskData task : tasks) {
if (task.isSampled()) {
runtimesMs.add(task.getTotalRunTimeMs());
gcMs.add(task.getCounters().get(TezCounterData.CounterName.GC_TIME_MILLIS));
cpuMs.add(task.getCounters().get(TezCounterData.CounterName.CPU_MILLISECONDS));
}
}
long avgRuntimeMs = Statistics.average(runtimesMs);
long avgCpuMs = Statistics.average(cpuMs);
long avgGcMs = Statistics.average(gcMs);
double ratio = avgCpuMs != 0 ? avgGcMs * (1.0) / avgCpuMs : 0;
Severity severity;
if (tasks.length == 0) {
severity = Severity.NONE;
} else {
severity = getGcRatioSeverity(avgRuntimeMs, avgCpuMs, avgGcMs);
}
HeuristicResult result = new HeuristicResult(_heuristicConfData.getClassName(), _heuristicConfData.getHeuristicName(), severity, Utils.getHeuristicScore(severity, tasks.length));
result.addResultDetail("Number of tasks", Integer.toString(tasks.length));
result.addResultDetail("Avg task runtime (ms)", Long.toString(avgRuntimeMs));
result.addResultDetail("Avg task CPU time (ms)", Long.toString(avgCpuMs));
result.addResultDetail("Avg task GC time (ms)", Long.toString(avgGcMs));
result.addResultDetail("Task GC/CPU ratio", Double.toString(ratio));
return result;
}
use of com.linkedin.drelephant.analysis.Severity in project dr-elephant by linkedin.
the class GenericMemoryHeuristic method apply.
public HeuristicResult apply(TezApplicationData data) {
if (!data.getSucceeded()) {
return null;
}
TezTaskData[] tasks = getTasks(data);
List<Long> totalPhysicalMemory = new LinkedList<Long>();
List<Long> totalVirtualMemory = new LinkedList<Long>();
List<Long> runTime = new LinkedList<Long>();
for (TezTaskData task : tasks) {
if (task.isSampled()) {
totalPhysicalMemory.add(task.getCounters().get(TezCounterData.CounterName.PHYSICAL_MEMORY_BYTES));
totalVirtualMemory.add(task.getCounters().get(TezCounterData.CounterName.VIRTUAL_MEMORY_BYTES));
runTime.add(task.getTotalRunTimeMs());
}
}
long averagePMem = Statistics.average(totalPhysicalMemory);
long averageVMem = Statistics.average(totalVirtualMemory);
long maxPMem;
long minPMem;
try {
maxPMem = Collections.max(totalPhysicalMemory);
minPMem = Collections.min(totalPhysicalMemory);
} catch (Exception exception) {
maxPMem = 0;
minPMem = 0;
}
long averageRunTime = Statistics.average(runTime);
String containerSizeStr;
if (!Strings.isNullOrEmpty(data.getConf().getProperty(_containerMemConf))) {
containerSizeStr = data.getConf().getProperty(_containerMemConf);
} else {
containerSizeStr = getContainerMemDefaultMBytes();
}
long containerSize = Long.valueOf(containerSizeStr) * FileUtils.ONE_MB;
double averageMemMb = (double) ((averagePMem) / FileUtils.ONE_MB);
double ratio = averageMemMb / ((double) (containerSize / FileUtils.ONE_MB));
Severity severity;
if (tasks.length == 0) {
severity = Severity.NONE;
} else {
severity = getMemoryRatioSeverity(ratio);
}
HeuristicResult result = new HeuristicResult(_heuristicConfData.getClassName(), _heuristicConfData.getHeuristicName(), severity, Utils.getHeuristicScore(severity, tasks.length));
result.addResultDetail("Number of tasks", Integer.toString(tasks.length));
result.addResultDetail("Maximum Physical Memory (MB)", tasks.length == 0 ? "0" : Long.toString(maxPMem / FileUtils.ONE_MB));
result.addResultDetail("Minimum Physical memory (MB)", tasks.length == 0 ? "0" : Long.toString(minPMem / FileUtils.ONE_MB));
result.addResultDetail("Average Physical Memory (MB)", tasks.length == 0 ? "0" : Long.toString(averagePMem / FileUtils.ONE_MB));
result.addResultDetail("Average Virtual Memory (MB)", tasks.length == 0 ? "0" : Long.toString(averageVMem / FileUtils.ONE_MB));
result.addResultDetail("Average Task RunTime", tasks.length == 0 ? "0" : Statistics.readableTimespan(averageRunTime));
result.addResultDetail("Requested Container Memory (MB)", (tasks.length == 0 || containerSize == 0 || containerSize == -1) ? "0" : String.valueOf(containerSize / FileUtils.ONE_MB));
return result;
}
use of com.linkedin.drelephant.analysis.Severity in project dr-elephant by linkedin.
the class ShuffleSortHeuristic method getShuffleSortSeverity.
private Severity getShuffleSortSeverity(long runtimeMs, long codetimeMs) {
Severity runtimeSeverity = Severity.getSeverityAscending(runtimeMs, runtimeLimits[0], runtimeLimits[1], runtimeLimits[2], runtimeLimits[3]);
if (codetimeMs <= 0) {
return runtimeSeverity;
}
long value = runtimeMs * 2 / codetimeMs;
Severity runtimeRatioSeverity = Severity.getSeverityAscending(value, runtimeRatioLimits[0], runtimeRatioLimits[1], runtimeRatioLimits[2], runtimeRatioLimits[3]);
return Severity.min(runtimeSeverity, runtimeRatioSeverity);
}
Aggregations