use of com.linkedin.drelephant.mapreduce.data.MapReduceTaskData in project dr-elephant by linkedin.
the class MapperMemoryHeuristicTest method analyzeJob.
private Severity analyzeJob(long taskAvgMemMB, long containerMemMB) throws IOException {
MapReduceCounterData jobCounter = new MapReduceCounterData();
MapReduceTaskData[] mappers = new MapReduceTaskData[NUMTASKS + 1];
MapReduceCounterData counter = new MapReduceCounterData();
counter.set(MapReduceCounterData.CounterName.PHYSICAL_MEMORY_BYTES, taskAvgMemMB * FileUtils.ONE_MB);
Properties p = new Properties();
p.setProperty(MapperMemoryHeuristic.MAPPER_MEMORY_CONF, Long.toString(containerMemMB));
int i = 0;
for (; i < NUMTASKS; i++) {
mappers[i] = new MapReduceTaskData("task-id-" + i, "task-attempt-id-" + i);
mappers[i].setTimeAndCounter(new long[5], counter);
}
// Non-sampled task, which does not contain time and counter data
mappers[i] = new MapReduceTaskData("task-id-" + i, "task-attempt-id-" + i);
MapReduceApplicationData data = new MapReduceApplicationData().setCounters(jobCounter).setMapperData(mappers);
data.setJobConf(p);
HeuristicResult result = _heuristic.apply(data);
return result.getSeverity();
}
use of com.linkedin.drelephant.mapreduce.data.MapReduceTaskData in project dr-elephant by linkedin.
the class MapperSpeedHeuristicTest method analyzeJob.
private Severity analyzeJob(long runtimeMs, long readBytes) throws IOException {
MapReduceCounterData jobCounter = new MapReduceCounterData();
MapReduceTaskData[] mappers = new MapReduceTaskData[NUMTASKS + 1];
MapReduceCounterData counter = new MapReduceCounterData();
counter.set(MapReduceCounterData.CounterName.HDFS_BYTES_READ, readBytes / 4);
counter.set(MapReduceCounterData.CounterName.S3_BYTES_READ, readBytes / 4);
counter.set(MapReduceCounterData.CounterName.S3A_BYTES_READ, readBytes / 4);
counter.set(MapReduceCounterData.CounterName.S3N_BYTES_READ, readBytes / 4);
int i = 0;
for (; i < NUMTASKS; i++) {
mappers[i] = new MapReduceTaskData("task-id-" + i, "task-attempt-id-" + i);
mappers[i].setTimeAndCounter(new long[] { runtimeMs, 0, 0, 0, 0 }, counter);
}
// Non-sampled task, which does not contain time and counter data
mappers[i] = new MapReduceTaskData("task-id-" + i, "task-attempt-id-" + i);
MapReduceApplicationData data = new MapReduceApplicationData().setCounters(jobCounter).setMapperData(mappers);
HeuristicResult result = _heuristic.apply(data);
return result.getSeverity();
}
use of com.linkedin.drelephant.mapreduce.data.MapReduceTaskData in project dr-elephant by linkedin.
the class MapperSpillHeuristicTest method analyzeJob.
private Severity analyzeJob(long spilledRecords, long mapRecords, int numTasks) throws IOException {
MapReduceCounterData jobCounter = new MapReduceCounterData();
MapReduceTaskData[] mappers = new MapReduceTaskData[numTasks + 1];
MapReduceCounterData counter = new MapReduceCounterData();
counter.set(MapReduceCounterData.CounterName.SPILLED_RECORDS, spilledRecords);
counter.set(MapReduceCounterData.CounterName.MAP_OUTPUT_RECORDS, mapRecords);
int i = 0;
for (; i < numTasks; i++) {
mappers[i] = new MapReduceTaskData("task-id-" + i, "task-attempt-id-" + i);
mappers[i].setTimeAndCounter(new long[5], counter);
}
// Non-sampled task, which does not contain time and counter data
mappers[i] = new MapReduceTaskData("task-id-" + i, "task-attempt-id-" + i);
MapReduceApplicationData data = new MapReduceApplicationData().setCounters(jobCounter).setMapperData(mappers);
HeuristicResult result = _heuristic.apply(data);
return result.getSeverity();
}
use of com.linkedin.drelephant.mapreduce.data.MapReduceTaskData in project dr-elephant by linkedin.
the class MapperTimeHeuristicTest method analyzeJob.
private Severity analyzeJob(int numTasks, long runtime) throws IOException {
MapReduceCounterData jobCounter = new MapReduceCounterData();
MapReduceTaskData[] mappers = new MapReduceTaskData[numTasks + 1];
MapReduceCounterData taskCounter = new MapReduceCounterData();
taskCounter.set(MapReduceCounterData.CounterName.HDFS_BYTES_READ, DUMMY_INPUT_SIZE / 4);
taskCounter.set(MapReduceCounterData.CounterName.S3_BYTES_READ, DUMMY_INPUT_SIZE / 4);
taskCounter.set(MapReduceCounterData.CounterName.S3A_BYTES_READ, DUMMY_INPUT_SIZE / 4);
taskCounter.set(MapReduceCounterData.CounterName.S3N_BYTES_READ, DUMMY_INPUT_SIZE / 4);
int i = 0;
for (; i < numTasks; i++) {
mappers[i] = new MapReduceTaskData("task-id-" + i, "task-attempt-id-" + i);
mappers[i].setTimeAndCounter(new long[] { runtime, 0, 0, 0, 0 }, taskCounter);
}
// Non-sampled task, which does not contain time and counter data
mappers[i] = new MapReduceTaskData("task-id-" + i, "task-attempt-id-" + i);
MapReduceApplicationData data = new MapReduceApplicationData().setCounters(jobCounter).setMapperData(mappers);
HeuristicResult result = _heuristic.apply(data);
return result.getSeverity();
}
use of com.linkedin.drelephant.mapreduce.data.MapReduceTaskData in project dr-elephant by linkedin.
the class ReducerMemoryHeuristicTest method analyzeJob.
private Severity analyzeJob(long taskAvgMemMB, long containerMemMB) throws IOException {
MapReduceCounterData jobCounter = new MapReduceCounterData();
MapReduceTaskData[] reducers = new MapReduceTaskData[NUMTASKS + 1];
MapReduceCounterData counter = new MapReduceCounterData();
counter.set(MapReduceCounterData.CounterName.PHYSICAL_MEMORY_BYTES, taskAvgMemMB * FileUtils.ONE_MB);
Properties p = new Properties();
p.setProperty(ReducerMemoryHeuristic.REDUCER_MEMORY_CONF, Long.toString(containerMemMB));
int i = 0;
for (; i < NUMTASKS; i++) {
reducers[i] = new MapReduceTaskData("task-id-" + i, "task-attempt-id-" + i);
reducers[i].setTimeAndCounter(new long[5], counter);
}
// Non-sampled task, which does not contain time and counter data
reducers[i] = new MapReduceTaskData("task-id-" + i, "task-attempt-id-" + i);
MapReduceApplicationData data = new MapReduceApplicationData().setCounters(jobCounter).setReducerData(reducers);
data.setJobConf(p);
HeuristicResult result = _heuristic.apply(data);
return result.getSeverity();
}
Aggregations