use of com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData in project dr-elephant by linkedin.
the class ReducerTimeHeuristicTest method analyzeJob.
private Severity analyzeJob(long runtimeMs, int numTasks) throws IOException {
MapReduceCounterData dummyCounter = new MapReduceCounterData();
MapReduceTaskData[] reducers = new MapReduceTaskData[numTasks + 1];
int i = 0;
for (; i < numTasks; i++) {
reducers[i] = new MapReduceTaskData("task-id-" + i, "task-attempt-id-" + i);
reducers[i].setTimeAndCounter(new long[] { runtimeMs, 0, 0, 0, 0 }, dummyCounter);
}
// Non-sampled task, which does not contain time and counter data
reducers[i] = new MapReduceTaskData("task-id-" + i, "task-attempt-id-" + i);
MapReduceApplicationData data = new MapReduceApplicationData().setCounters(dummyCounter).setReducerData(reducers);
HeuristicResult result = _heuristic.apply(data);
return result.getSeverity();
}
use of com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData in project dr-elephant by linkedin.
the class DistributedCacheLimitHeuristicTest method testHeuristicResultCacheFileLimitViolated.
/**
* File size limit exceeded for file in cache.
*/
@Test
public void testHeuristicResultCacheFileLimitViolated() {
jobConf.setProperty("mapreduce.job.cache.files.filesizes", "100,200,600000000");
jobConf.setProperty("mapreduce.job.cache.archives.filesizes", "400,500,600");
MapReduceApplicationData data = new MapReduceApplicationData().setJobConf(jobConf);
HeuristicResult result = _heuristic.apply(data);
assertTrue("Failed to match on expected severity", result.getSeverity() == Severity.CRITICAL);
}
use of com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData in project dr-elephant by linkedin.
the class DistributedCacheLimitHeuristicTest method testHeuristicResultArchiveCacheFilesAndSizeLengthMismatch.
/**
* File size not found for all the files in archive cache.
*/
@Test
public void testHeuristicResultArchiveCacheFilesAndSizeLengthMismatch() {
jobConf.setProperty("mapreduce.job.cache.files.filesizes", "100,200,300");
jobConf.setProperty("mapreduce.job.cache.archives.filesizes", "400,500");
MapReduceApplicationData data = new MapReduceApplicationData().setJobConf(jobConf);
HeuristicResult result = _heuristic.apply(data);
assertTrue("Failed to match on expected severity", result.getSeverity() == Severity.MODERATE);
}
use of com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData in project dr-elephant by linkedin.
the class DistributedCacheLimitHeuristicTest method testHeuristicResultArchiveCacheFileLimitViolated.
/**
* File size limit exceeded for file in archive cache.
*/
@Test
public void testHeuristicResultArchiveCacheFileLimitViolated() {
jobConf.setProperty("mapreduce.job.cache.files.filesizes", "100,200,300");
jobConf.setProperty("mapreduce.job.cache.archives.filesizes", "400,500,600000000");
MapReduceApplicationData data = new MapReduceApplicationData().setJobConf(jobConf);
HeuristicResult result = _heuristic.apply(data);
assertTrue("Failed to match on expected severity", result.getSeverity() == Severity.CRITICAL);
}
use of com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData in project dr-elephant by linkedin.
the class DistributedCacheLimitHeuristicTest method testHeuristicResultWithEmptyArchiveCacheFiles.
/**
* Archive cache not used by the application.
*/
@Test
public void testHeuristicResultWithEmptyArchiveCacheFiles() {
jobConf.remove("mapreduce.job.cache.archives");
jobConf.setProperty("mapreduce.job.cache.files.filesizes", "100,200,300");
MapReduceApplicationData data = new MapReduceApplicationData().setJobConf(jobConf);
HeuristicResult result = _heuristic.apply(data);
assertTrue("Failed to match on expected severity", result.getSeverity() == Severity.NONE);
}
Aggregations