use of com.linkedin.drelephant.mapreduce.data.MapReduceCounterData in project dr-elephant by linkedin.
the class MapReduceFSFetcherHadoop2 method getTaskData.
protected MapReduceTaskData[] getTaskData(String jobId, List<JobHistoryParser.TaskInfo> infoList) {
int sampleSize = sampleAndGetSize(jobId, infoList);
List<MapReduceTaskData> taskList = new ArrayList<MapReduceTaskData>();
for (int i = 0; i < sampleSize; i++) {
JobHistoryParser.TaskInfo tInfo = infoList.get(i);
String taskId = tInfo.getTaskId().toString();
TaskAttemptID attemptId = null;
if (tInfo.getTaskStatus().equals("SUCCEEDED")) {
attemptId = tInfo.getSuccessfulAttemptId();
} else {
attemptId = tInfo.getFailedDueToAttemptId();
}
MapReduceTaskData taskData = new MapReduceTaskData(taskId, attemptId == null ? "" : attemptId.toString(), tInfo.getTaskStatus());
MapReduceCounterData taskCounterData = getCounterData(tInfo.getCounters());
long[] taskExecTime = null;
if (attemptId != null) {
taskExecTime = getTaskExecTime(tInfo.getAllTaskAttempts().get(attemptId));
}
taskData.setTimeAndCounter(taskExecTime, taskCounterData);
taskList.add(taskData);
}
return taskList.toArray(new MapReduceTaskData[taskList.size()]);
}
use of com.linkedin.drelephant.mapreduce.data.MapReduceCounterData in project dr-elephant by linkedin.
the class MapReduceFetcherHadoop2 method fetchData.
@Override
public MapReduceApplicationData fetchData(AnalyticJob analyticJob) throws IOException, AuthenticationException {
String appId = analyticJob.getAppId();
MapReduceApplicationData jobData = new MapReduceApplicationData();
String jobId = Utils.getJobIdFromApplicationId(appId);
jobData.setAppId(appId).setJobId(jobId);
// Change job tracking url to job history page
analyticJob.setTrackingUrl(_jhistoryWebAddr + jobId);
try {
// Fetch job config
Properties jobConf = _jsonFactory.getProperties(_urlFactory.getJobConfigURL(jobId));
jobData.setJobConf(jobConf);
URL jobURL = _urlFactory.getJobURL(jobId);
String state = _jsonFactory.getState(jobURL);
jobData.setSubmitTime(_jsonFactory.getSubmitTime(jobURL));
jobData.setStartTime(_jsonFactory.getStartTime(jobURL));
jobData.setFinishTime(_jsonFactory.getFinishTime(jobURL));
if (state.equals("SUCCEEDED")) {
jobData.setSucceeded(true);
// Fetch job counter
MapReduceCounterData jobCounter = _jsonFactory.getJobCounter(_urlFactory.getJobCounterURL(jobId));
// Fetch task data
URL taskListURL = _urlFactory.getTaskListURL(jobId);
List<MapReduceTaskData> mapperList = new ArrayList<MapReduceTaskData>();
List<MapReduceTaskData> reducerList = new ArrayList<MapReduceTaskData>();
_jsonFactory.getTaskDataAll(taskListURL, jobId, mapperList, reducerList);
MapReduceTaskData[] mapperData = mapperList.toArray(new MapReduceTaskData[mapperList.size()]);
MapReduceTaskData[] reducerData = reducerList.toArray(new MapReduceTaskData[reducerList.size()]);
jobData.setCounters(jobCounter).setMapperData(mapperData).setReducerData(reducerData);
} else if (state.equals("FAILED")) {
jobData.setSucceeded(false);
// Fetch job counter
MapReduceCounterData jobCounter = _jsonFactory.getJobCounter(_urlFactory.getJobCounterURL(jobId));
// Fetch task data
URL taskListURL = _urlFactory.getTaskListURL(jobId);
List<MapReduceTaskData> mapperList = new ArrayList<MapReduceTaskData>();
List<MapReduceTaskData> reducerList = new ArrayList<MapReduceTaskData>();
_jsonFactory.getTaskDataAll(taskListURL, jobId, mapperList, reducerList);
MapReduceTaskData[] mapperData = mapperList.toArray(new MapReduceTaskData[mapperList.size()]);
MapReduceTaskData[] reducerData = reducerList.toArray(new MapReduceTaskData[reducerList.size()]);
jobData.setCounters(jobCounter).setMapperData(mapperData).setReducerData(reducerData);
String diagnosticInfo;
try {
diagnosticInfo = parseException(jobData.getJobId(), _jsonFactory.getDiagnosticInfo(jobURL));
} catch (Exception e) {
diagnosticInfo = null;
logger.warn("Failed getting diagnostic info for failed job " + jobData.getJobId());
}
jobData.setDiagnosticInfo(diagnosticInfo);
} else {
// Should not reach here
throw new RuntimeException("Job state not supported. Should be either SUCCEEDED or FAILED");
}
} finally {
ThreadContextMR2.updateAuthToken();
}
return jobData;
}
Aggregations