Search in sources :

Example 6 with JobId

use of org.apache.hadoop.mapreduce.v2.api.records.JobId in project hadoop by apache.

the class TestClientServiceDelegate method getJobReportRequest.

private GetJobReportRequest getJobReportRequest() {
    GetJobReportRequest request = Records.newRecord(GetJobReportRequest.class);
    request.setJobId(jobId);
    return request;
}
Also used : GetJobReportRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest)

Example 7 with JobId

use of org.apache.hadoop.mapreduce.v2.api.records.JobId in project hadoop by apache.

the class ClientServiceDelegate method getTaskCompletionEvents.

public TaskCompletionEvent[] getTaskCompletionEvents(JobID arg0, int arg1, int arg2) throws IOException, InterruptedException {
    org.apache.hadoop.mapreduce.v2.api.records.JobId jobID = TypeConverter.toYarn(arg0);
    GetTaskAttemptCompletionEventsRequest request = recordFactory.newRecordInstance(GetTaskAttemptCompletionEventsRequest.class);
    request.setJobId(jobID);
    request.setFromEventId(arg1);
    request.setMaxEvents(arg2);
    List<org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent> list = ((GetTaskAttemptCompletionEventsResponse) invoke("getTaskAttemptCompletionEvents", GetTaskAttemptCompletionEventsRequest.class, request)).getCompletionEventList();
    return TypeConverter.fromYarn(list.toArray(new org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent[0]));
}
Also used : GetTaskAttemptCompletionEventsRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptCompletionEventsRequest) GetTaskAttemptCompletionEventsResponse(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptCompletionEventsResponse)

Example 8 with JobId

use of org.apache.hadoop.mapreduce.v2.api.records.JobId in project hadoop by apache.

the class ClientServiceDelegate method invoke.

private synchronized Object invoke(String method, Class argClass, Object args) throws IOException {
    Method methodOb = null;
    try {
        methodOb = MRClientProtocol.class.getMethod(method, argClass);
    } catch (SecurityException e) {
        throw new YarnRuntimeException(e);
    } catch (NoSuchMethodException e) {
        throw new YarnRuntimeException("Method name mismatch", e);
    }
    maxClientRetry = this.conf.getInt(MRJobConfig.MR_CLIENT_MAX_RETRIES, MRJobConfig.DEFAULT_MR_CLIENT_MAX_RETRIES);
    IOException lastException = null;
    while (maxClientRetry > 0) {
        MRClientProtocol MRClientProxy = null;
        try {
            MRClientProxy = getProxy();
            return methodOb.invoke(MRClientProxy, args);
        } catch (InvocationTargetException e) {
            // Will not throw out YarnException anymore
            LOG.debug("Failed to contact AM/History for job " + jobId + " retrying..", e.getTargetException());
            // Force reconnection by setting the proxy to null.
            realProxy = null;
            if (e.getCause() instanceof AuthorizationException) {
                throw new IOException(e.getTargetException());
            }
            // for its AM to be restarted.
            if (!usingAMProxy.get()) {
                maxClientRetry--;
            }
            usingAMProxy.set(false);
            lastException = new IOException(e.getTargetException());
            try {
                Thread.sleep(100);
            } catch (InterruptedException ie) {
                LOG.warn("ClientServiceDelegate invoke call interrupted", ie);
                throw new YarnRuntimeException(ie);
            }
        } catch (Exception e) {
            LOG.debug("Failed to contact AM/History for job " + jobId + "  Will retry..", e);
            // Force reconnection by setting the proxy to null.
            realProxy = null;
            // RM shutdown
            maxClientRetry--;
            lastException = new IOException(e.getMessage());
            try {
                Thread.sleep(100);
            } catch (InterruptedException ie) {
                LOG.warn("ClientServiceDelegate invoke call interrupted", ie);
                throw new YarnRuntimeException(ie);
            }
        }
    }
    throw lastException;
}
Also used : YarnRuntimeException(org.apache.hadoop.yarn.exceptions.YarnRuntimeException) AuthorizationException(org.apache.hadoop.security.authorize.AuthorizationException) Method(java.lang.reflect.Method) IOException(java.io.IOException) InvocationTargetException(java.lang.reflect.InvocationTargetException) AuthorizationException(org.apache.hadoop.security.authorize.AuthorizationException) InvocationTargetException(java.lang.reflect.InvocationTargetException) ApplicationNotFoundException(org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) IOException(java.io.IOException) YarnRuntimeException(org.apache.hadoop.yarn.exceptions.YarnRuntimeException) MRClientProtocol(org.apache.hadoop.mapreduce.v2.api.MRClientProtocol)

Example 9 with JobId

use of org.apache.hadoop.mapreduce.v2.api.records.JobId in project hadoop by apache.

the class ClientServiceDelegate method getLogFilePath.

public LogParams getLogFilePath(JobID oldJobID, TaskAttemptID oldTaskAttemptID) throws IOException {
    org.apache.hadoop.mapreduce.v2.api.records.JobId jobId = TypeConverter.toYarn(oldJobID);
    GetJobReportRequest request = recordFactory.newRecordInstance(GetJobReportRequest.class);
    request.setJobId(jobId);
    JobReport report = ((GetJobReportResponse) invoke("getJobReport", GetJobReportRequest.class, request)).getJobReport();
    if (EnumSet.of(JobState.SUCCEEDED, JobState.FAILED, JobState.KILLED, JobState.ERROR).contains(report.getJobState())) {
        if (oldTaskAttemptID != null) {
            GetTaskAttemptReportRequest taRequest = recordFactory.newRecordInstance(GetTaskAttemptReportRequest.class);
            taRequest.setTaskAttemptId(TypeConverter.toYarn(oldTaskAttemptID));
            TaskAttemptReport taReport = ((GetTaskAttemptReportResponse) invoke("getTaskAttemptReport", GetTaskAttemptReportRequest.class, taRequest)).getTaskAttemptReport();
            if (taReport.getContainerId() == null || taReport.getNodeManagerHost() == null) {
                throw new IOException("Unable to get log information for task: " + oldTaskAttemptID);
            }
            return new LogParams(taReport.getContainerId().toString(), taReport.getContainerId().getApplicationAttemptId().getApplicationId().toString(), NodeId.newInstance(taReport.getNodeManagerHost(), taReport.getNodeManagerPort()).toString(), report.getUser());
        } else {
            if (report.getAMInfos() == null || report.getAMInfos().size() == 0) {
                throw new IOException("Unable to get log information for job: " + oldJobID);
            }
            AMInfo amInfo = report.getAMInfos().get(report.getAMInfos().size() - 1);
            return new LogParams(amInfo.getContainerId().toString(), amInfo.getAppAttemptId().getApplicationId().toString(), NodeId.newInstance(amInfo.getNodeManagerHost(), amInfo.getNodeManagerPort()).toString(), report.getUser());
        }
    } else {
        throw new IOException("Cannot get log path for a in-progress job");
    }
}
Also used : GetTaskAttemptReportRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptReportRequest) IOException(java.io.IOException) GetJobReportRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest) JobReport(org.apache.hadoop.mapreduce.v2.api.records.JobReport) GetJobReportResponse(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportResponse) LogParams(org.apache.hadoop.mapreduce.v2.LogParams) AMInfo(org.apache.hadoop.mapreduce.v2.api.records.AMInfo) TaskAttemptReport(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport) GetTaskAttemptReportResponse(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptReportResponse)

Example 10 with JobId

use of org.apache.hadoop.mapreduce.v2.api.records.JobId in project hadoop by apache.

the class ClientServiceDelegate method getJobStatus.

public JobStatus getJobStatus(JobID oldJobID) throws IOException {
    org.apache.hadoop.mapreduce.v2.api.records.JobId jobId = TypeConverter.toYarn(oldJobID);
    GetJobReportRequest request = recordFactory.newRecordInstance(GetJobReportRequest.class);
    request.setJobId(jobId);
    JobReport report = ((GetJobReportResponse) invoke("getJobReport", GetJobReportRequest.class, request)).getJobReport();
    JobStatus jobStatus = null;
    if (report != null) {
        if (StringUtils.isEmpty(report.getJobFile())) {
            String jobFile = MRApps.getJobFile(conf, report.getUser(), oldJobID);
            report.setJobFile(jobFile);
        }
        String historyTrackingUrl = report.getTrackingUrl();
        String url = StringUtils.isNotEmpty(historyTrackingUrl) ? historyTrackingUrl : trackingUrl;
        jobStatus = TypeConverter.fromYarn(report, url);
    }
    return jobStatus;
}
Also used : JobStatus(org.apache.hadoop.mapreduce.JobStatus) GetJobReportRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest) JobReport(org.apache.hadoop.mapreduce.v2.api.records.JobReport) GetJobReportResponse(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportResponse)

Aggregations

JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)276 Test (org.junit.Test)238 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)212 ClientResponse (com.sun.jersey.api.client.ClientResponse)103 WebResource (com.sun.jersey.api.client.WebResource)103 Task (org.apache.hadoop.mapreduce.v2.app.job.Task)88 JSONObject (org.codehaus.jettison.json.JSONObject)81 Configuration (org.apache.hadoop.conf.Configuration)77 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)61 ApplicationAttemptId (org.apache.hadoop.yarn.api.records.ApplicationAttemptId)55 Path (org.apache.hadoop.fs.Path)52 TaskAttemptId (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId)51 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)47 TaskAttempt (org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)41 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)36 AppContext (org.apache.hadoop.mapreduce.v2.app.AppContext)35 IOException (java.io.IOException)32 JobConf (org.apache.hadoop.mapred.JobConf)28 HistoryFileInfo (org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo)25 JobID (org.apache.hadoop.mapreduce.JobID)23