use of com.linkedin.drelephant.analysis.HeuristicResult in project dr-elephant by linkedin.
the class DistributedCacheLimitHeuristicTest method testHeuristicResultArchiveCacheFilesAndSizeLengthMismatch.
/**
* File size not found for all the files in archive cache.
*/
@Test
public void testHeuristicResultArchiveCacheFilesAndSizeLengthMismatch() {
jobConf.setProperty("mapreduce.job.cache.files.filesizes", "100,200,300");
jobConf.setProperty("mapreduce.job.cache.archives.filesizes", "400,500");
MapReduceApplicationData data = new MapReduceApplicationData().setJobConf(jobConf);
HeuristicResult result = _heuristic.apply(data);
assertTrue("Failed to match on expected severity", result.getSeverity() == Severity.MODERATE);
}
use of com.linkedin.drelephant.analysis.HeuristicResult in project dr-elephant by linkedin.
the class DistributedCacheLimitHeuristicTest method testHeuristicResultArchiveCacheFileLimitViolated.
/**
* File size limit exceeded for file in archive cache.
*/
@Test
public void testHeuristicResultArchiveCacheFileLimitViolated() {
jobConf.setProperty("mapreduce.job.cache.files.filesizes", "100,200,300");
jobConf.setProperty("mapreduce.job.cache.archives.filesizes", "400,500,600000000");
MapReduceApplicationData data = new MapReduceApplicationData().setJobConf(jobConf);
HeuristicResult result = _heuristic.apply(data);
assertTrue("Failed to match on expected severity", result.getSeverity() == Severity.CRITICAL);
}
use of com.linkedin.drelephant.analysis.HeuristicResult in project dr-elephant by linkedin.
the class DistributedCacheLimitHeuristicTest method testHeuristicResultWithEmptyArchiveCacheFiles.
/**
* Archive cache not used by the application.
*/
@Test
public void testHeuristicResultWithEmptyArchiveCacheFiles() {
jobConf.remove("mapreduce.job.cache.archives");
jobConf.setProperty("mapreduce.job.cache.files.filesizes", "100,200,300");
MapReduceApplicationData data = new MapReduceApplicationData().setJobConf(jobConf);
HeuristicResult result = _heuristic.apply(data);
assertTrue("Failed to match on expected severity", result.getSeverity() == Severity.NONE);
}
use of com.linkedin.drelephant.analysis.HeuristicResult in project dr-elephant by linkedin.
the class MapperGCHeuristicTest method analyzeJob.
private Severity analyzeJob(long runtimeMs, long cpuMs, long gcMs) throws IOException {
TezCounterData jobCounter = new TezCounterData();
TezTaskData[] mappers = new TezTaskData[NUMTASKS + 1];
TezCounterData counter = new TezCounterData();
counter.set(TezCounterData.CounterName.CPU_MILLISECONDS, cpuMs);
counter.set(TezCounterData.CounterName.GC_TIME_MILLIS, gcMs);
int i = 0;
for (; i < NUMTASKS; i++) {
mappers[i] = new TezTaskData("task-id-" + i, "task-attempt-id-" + i);
mappers[i].setTimeAndCounter(new long[] { runtimeMs, 0, 0, 0, 0 }, counter);
}
// Non-sampled task, which does not contain time and counter data
mappers[i] = new TezTaskData("task-id-" + i, "task-attempt-id-" + i);
TezApplicationData data = new TezApplicationData().setCounters(jobCounter).setMapTaskData(mappers);
HeuristicResult result = _heuristic.apply(data);
return result.getSeverity();
}
use of com.linkedin.drelephant.analysis.HeuristicResult in project dr-elephant by linkedin.
the class MapperSpeedHeuristicTest method analyzeJob.
private Severity analyzeJob(long runtimeMs, long readBytes) throws IOException {
TezCounterData jobCounter = new TezCounterData();
TezTaskData[] mappers = new TezTaskData[NUMTASKS + 1];
TezCounterData counter = new TezCounterData();
counter.set(TezCounterData.CounterName.HDFS_BYTES_READ, readBytes / 2);
counter.set(TezCounterData.CounterName.S3A_BYTES_READ, readBytes / 2);
int i = 0;
for (; i < NUMTASKS; i++) {
mappers[i] = new TezTaskData(counter, new long[] { runtimeMs, 0, 0, 0, 0 });
}
// Non-sampled task, which does not contain time and counter data
mappers[i] = new TezTaskData("task-id-" + i, "task-attempt-id-" + i);
TezApplicationData data = new TezApplicationData().setCounters(jobCounter).setMapTaskData(mappers);
HeuristicResult result = _heuristic.apply(data);
return result.getSeverity();
}
Aggregations