use of com.linkedin.drelephant.analysis.HeuristicResult in project dr-elephant by linkedin.
the class MapperSpillHeuristicTest method analyzeJob.
private Severity analyzeJob(long spilledRecords, long mapRecords, int numTasks) throws IOException {
MapReduceCounterData jobCounter = new MapReduceCounterData();
MapReduceTaskData[] mappers = new MapReduceTaskData[numTasks + 1];
MapReduceCounterData counter = new MapReduceCounterData();
counter.set(MapReduceCounterData.CounterName.SPILLED_RECORDS, spilledRecords);
counter.set(MapReduceCounterData.CounterName.MAP_OUTPUT_RECORDS, mapRecords);
int i = 0;
for (; i < numTasks; i++) {
mappers[i] = new MapReduceTaskData("task-id-" + i, "task-attempt-id-" + i);
mappers[i].setTimeAndCounter(new long[5], counter);
}
// Non-sampled task, which does not contain time and counter data
mappers[i] = new MapReduceTaskData("task-id-" + i, "task-attempt-id-" + i);
MapReduceApplicationData data = new MapReduceApplicationData().setCounters(jobCounter).setMapperData(mappers);
HeuristicResult result = _heuristic.apply(data);
return result.getSeverity();
}
use of com.linkedin.drelephant.analysis.HeuristicResult in project dr-elephant by linkedin.
the class MapperTimeHeuristicTest method analyzeJob.
private Severity analyzeJob(int numTasks, long runtime) throws IOException {
MapReduceCounterData jobCounter = new MapReduceCounterData();
MapReduceTaskData[] mappers = new MapReduceTaskData[numTasks + 1];
MapReduceCounterData taskCounter = new MapReduceCounterData();
taskCounter.set(MapReduceCounterData.CounterName.HDFS_BYTES_READ, DUMMY_INPUT_SIZE / 4);
taskCounter.set(MapReduceCounterData.CounterName.S3_BYTES_READ, DUMMY_INPUT_SIZE / 4);
taskCounter.set(MapReduceCounterData.CounterName.S3A_BYTES_READ, DUMMY_INPUT_SIZE / 4);
taskCounter.set(MapReduceCounterData.CounterName.S3N_BYTES_READ, DUMMY_INPUT_SIZE / 4);
int i = 0;
for (; i < numTasks; i++) {
mappers[i] = new MapReduceTaskData("task-id-" + i, "task-attempt-id-" + i);
mappers[i].setTimeAndCounter(new long[] { runtime, 0, 0, 0, 0 }, taskCounter);
}
// Non-sampled task, which does not contain time and counter data
mappers[i] = new MapReduceTaskData("task-id-" + i, "task-attempt-id-" + i);
MapReduceApplicationData data = new MapReduceApplicationData().setCounters(jobCounter).setMapperData(mappers);
HeuristicResult result = _heuristic.apply(data);
return result.getSeverity();
}
use of com.linkedin.drelephant.analysis.HeuristicResult in project dr-elephant by linkedin.
the class ReducerMemoryHeuristicTest method analyzeJob.
private Severity analyzeJob(long taskAvgMemMB, long containerMemMB) throws IOException {
MapReduceCounterData jobCounter = new MapReduceCounterData();
MapReduceTaskData[] reducers = new MapReduceTaskData[NUMTASKS + 1];
MapReduceCounterData counter = new MapReduceCounterData();
counter.set(MapReduceCounterData.CounterName.PHYSICAL_MEMORY_BYTES, taskAvgMemMB * FileUtils.ONE_MB);
Properties p = new Properties();
p.setProperty(ReducerMemoryHeuristic.REDUCER_MEMORY_CONF, Long.toString(containerMemMB));
int i = 0;
for (; i < NUMTASKS; i++) {
reducers[i] = new MapReduceTaskData("task-id-" + i, "task-attempt-id-" + i);
reducers[i].setTimeAndCounter(new long[5], counter);
}
// Non-sampled task, which does not contain time and counter data
reducers[i] = new MapReduceTaskData("task-id-" + i, "task-attempt-id-" + i);
MapReduceApplicationData data = new MapReduceApplicationData().setCounters(jobCounter).setReducerData(reducers);
data.setJobConf(p);
HeuristicResult result = _heuristic.apply(data);
return result.getSeverity();
}
use of com.linkedin.drelephant.analysis.HeuristicResult in project dr-elephant by linkedin.
the class ReducerSkewHeuristicTest method analyzeJob.
private Severity analyzeJob(int numSmallTasks, int numLargeTasks, long smallInputSize, long largeInputSize) throws IOException {
MapReduceCounterData jobCounter = new MapReduceCounterData();
MapReduceTaskData[] reducers = new MapReduceTaskData[numSmallTasks + numLargeTasks + 1];
MapReduceCounterData smallCounter = new MapReduceCounterData();
smallCounter.set(MapReduceCounterData.CounterName.REDUCE_SHUFFLE_BYTES, smallInputSize);
MapReduceCounterData largeCounter = new MapReduceCounterData();
largeCounter.set(MapReduceCounterData.CounterName.REDUCE_SHUFFLE_BYTES, largeInputSize);
int i = 0;
for (; i < numSmallTasks; i++) {
reducers[i] = new MapReduceTaskData("task-id-" + i, "task-attempt-id-" + i);
reducers[i].setTimeAndCounter(new long[5], smallCounter);
}
for (; i < numSmallTasks + numLargeTasks; i++) {
reducers[i] = new MapReduceTaskData("task-id-" + i, "task-attempt-id-" + i);
reducers[i].setTimeAndCounter(new long[5], largeCounter);
}
// Non-sampled task, which does not contain time and counter data
reducers[i] = new MapReduceTaskData("task-id-" + i, "task-attempt-id-" + i);
MapReduceApplicationData data = new MapReduceApplicationData().setCounters(jobCounter).setReducerData(reducers);
HeuristicResult result = _heuristic.apply(data);
return result.getSeverity();
}
use of com.linkedin.drelephant.analysis.HeuristicResult in project dr-elephant by linkedin.
the class MapperMemoryHeuristicTest method analyzeJob.
private Severity analyzeJob(long taskAvgMemMB, long containerMemMB) throws IOException {
TezCounterData jobCounter = new TezCounterData();
TezTaskData[] mappers = new TezTaskData[NUMTASKS + 1];
TezCounterData counter = new TezCounterData();
counter.set(TezCounterData.CounterName.PHYSICAL_MEMORY_BYTES, taskAvgMemMB * FileUtils.ONE_MB);
Properties p = new Properties();
p.setProperty(MapperMemoryHeuristic.MAPPER_MEMORY_CONF, Long.toString(containerMemMB));
int i = 0;
for (; i < NUMTASKS; i++) {
mappers[i] = new TezTaskData("task-id-" + i, "task-attempt-id-" + i);
mappers[i].setTime(new long[5]);
mappers[i].setCounter(counter);
}
// Non-sampled task, which does not contain time and counter data
mappers[i] = new TezTaskData("task-id-" + i, "task-attempt-id-" + i);
TezApplicationData data = new TezApplicationData().setCounters(jobCounter).setMapTaskData(mappers);
data.setConf(p);
HeuristicResult result = _heuristic.apply(data);
return result.getSeverity();
}
Aggregations