Search in sources :

Example 1 with JobStatus

use of org.apache.hadoop.mapreduce.JobStatus in project hadoop by apache.

the class CLI method displayJobList.

@Private
public void displayJobList(JobStatus[] jobs, PrintWriter writer) {
    writer.println("Total jobs:" + jobs.length);
    writer.printf(headerPattern, "JobId", "JobName", "State", "StartTime", "UserName", "Queue", "Priority", "UsedContainers", "RsvdContainers", "UsedMem", "RsvdMem", "NeededMem", "AM info");
    for (JobStatus job : jobs) {
        int numUsedSlots = job.getNumUsedSlots();
        int numReservedSlots = job.getNumReservedSlots();
        long usedMem = job.getUsedMem();
        long rsvdMem = job.getReservedMem();
        long neededMem = job.getNeededMem();
        int jobNameLength = job.getJobName().length();
        writer.printf(dataPattern, job.getJobID().toString(), job.getJobName().substring(0, jobNameLength > 20 ? 20 : jobNameLength), job.getState(), job.getStartTime(), job.getUsername(), job.getQueue(), job.getPriority().name(), numUsedSlots < 0 ? UNAVAILABLE : numUsedSlots, numReservedSlots < 0 ? UNAVAILABLE : numReservedSlots, usedMem < 0 ? UNAVAILABLE : String.format(memPattern, usedMem), rsvdMem < 0 ? UNAVAILABLE : String.format(memPattern, rsvdMem), neededMem < 0 ? UNAVAILABLE : String.format(memPattern, neededMem), job.getSchedulingInfo());
    }
    writer.flush();
}
Also used : JobStatus(org.apache.hadoop.mapreduce.JobStatus) Private(org.apache.hadoop.classification.InterfaceAudience.Private)

Example 2 with JobStatus

use of org.apache.hadoop.mapreduce.JobStatus in project hadoop by apache.

the class CLI method run.

public int run(String[] argv) throws Exception {
    int exitCode = -1;
    if (argv.length < 1) {
        displayUsage("");
        return exitCode;
    }
    // process arguments
    String cmd = argv[0];
    String submitJobFile = null;
    String jobid = null;
    String taskid = null;
    String historyFileOrJobId = null;
    String historyOutFile = null;
    String historyOutFormat = HistoryViewer.HUMAN_FORMAT;
    String counterGroupName = null;
    String counterName = null;
    JobPriority jp = null;
    String taskType = null;
    String taskState = null;
    int fromEvent = 0;
    int nEvents = 0;
    int jpvalue = 0;
    String configOutFile = null;
    boolean getStatus = false;
    boolean getCounter = false;
    boolean killJob = false;
    boolean listEvents = false;
    boolean viewHistory = false;
    boolean viewAllHistory = false;
    boolean listJobs = false;
    boolean listAllJobs = false;
    boolean listActiveTrackers = false;
    boolean listBlacklistedTrackers = false;
    boolean displayTasks = false;
    boolean killTask = false;
    boolean failTask = false;
    boolean setJobPriority = false;
    boolean logs = false;
    boolean downloadConfig = false;
    if ("-submit".equals(cmd)) {
        if (argv.length != 2) {
            displayUsage(cmd);
            return exitCode;
        }
        submitJobFile = argv[1];
    } else if ("-status".equals(cmd)) {
        if (argv.length != 2) {
            displayUsage(cmd);
            return exitCode;
        }
        jobid = argv[1];
        getStatus = true;
    } else if ("-counter".equals(cmd)) {
        if (argv.length != 4) {
            displayUsage(cmd);
            return exitCode;
        }
        getCounter = true;
        jobid = argv[1];
        counterGroupName = argv[2];
        counterName = argv[3];
    } else if ("-kill".equals(cmd)) {
        if (argv.length != 2) {
            displayUsage(cmd);
            return exitCode;
        }
        jobid = argv[1];
        killJob = true;
    } else if ("-set-priority".equals(cmd)) {
        if (argv.length != 3) {
            displayUsage(cmd);
            return exitCode;
        }
        jobid = argv[1];
        try {
            jp = JobPriority.valueOf(argv[2]);
        } catch (IllegalArgumentException iae) {
            try {
                jpvalue = Integer.parseInt(argv[2]);
            } catch (NumberFormatException ne) {
                LOG.info(ne);
                displayUsage(cmd);
                return exitCode;
            }
        }
        setJobPriority = true;
    } else if ("-events".equals(cmd)) {
        if (argv.length != 4) {
            displayUsage(cmd);
            return exitCode;
        }
        jobid = argv[1];
        fromEvent = Integer.parseInt(argv[2]);
        nEvents = Integer.parseInt(argv[3]);
        listEvents = true;
    } else if ("-history".equals(cmd)) {
        viewHistory = true;
        if (argv.length < 2 || argv.length > 7) {
            displayUsage(cmd);
            return exitCode;
        }
        // Some arguments are optional while others are not, and some require
        // second arguments.  Due to this, the indexing can vary depending on
        // what's specified and what's left out, as summarized in the below table:
        // [all] <jobHistoryFile|jobId> [-outfile <file>] [-format <human|json>]
        //   1                  2            3       4         5         6
        //   1                  2            3       4
        //   1                  2                              3         4
        //   1                  2
        //                      1            2       3         4         5
        //                      1            2       3
        //                      1                              2         3
        //                      1
        // "all" is optional, but comes first if specified
        int index = 1;
        if ("all".equals(argv[index])) {
            index++;
            viewAllHistory = true;
            if (argv.length == 2) {
                displayUsage(cmd);
                return exitCode;
            }
        }
        // Get the job history file or job id argument
        historyFileOrJobId = argv[index++];
        // "-outfile" is optional, but if specified requires a second argument
        if (argv.length > index + 1 && "-outfile".equals(argv[index])) {
            index++;
            historyOutFile = argv[index++];
        }
        // "-format" is optional, but if specified required a second argument
        if (argv.length > index + 1 && "-format".equals(argv[index])) {
            index++;
            historyOutFormat = argv[index++];
        }
        // Check for any extra arguments that don't belong here
        if (argv.length > index) {
            displayUsage(cmd);
            return exitCode;
        }
    } else if ("-list".equals(cmd)) {
        if (argv.length != 1 && !(argv.length == 2 && "all".equals(argv[1]))) {
            displayUsage(cmd);
            return exitCode;
        }
        if (argv.length == 2 && "all".equals(argv[1])) {
            listAllJobs = true;
        } else {
            listJobs = true;
        }
    } else if ("-kill-task".equals(cmd)) {
        if (argv.length != 2) {
            displayUsage(cmd);
            return exitCode;
        }
        killTask = true;
        taskid = argv[1];
    } else if ("-fail-task".equals(cmd)) {
        if (argv.length != 2) {
            displayUsage(cmd);
            return exitCode;
        }
        failTask = true;
        taskid = argv[1];
    } else if ("-list-active-trackers".equals(cmd)) {
        if (argv.length != 1) {
            displayUsage(cmd);
            return exitCode;
        }
        listActiveTrackers = true;
    } else if ("-list-blacklisted-trackers".equals(cmd)) {
        if (argv.length != 1) {
            displayUsage(cmd);
            return exitCode;
        }
        listBlacklistedTrackers = true;
    } else if ("-list-attempt-ids".equals(cmd)) {
        if (argv.length != 4) {
            displayUsage(cmd);
            return exitCode;
        }
        jobid = argv[1];
        taskType = argv[2];
        taskState = argv[3];
        displayTasks = true;
        if (!taskTypes.contains(org.apache.hadoop.util.StringUtils.toUpperCase(taskType))) {
            System.out.println("Error: Invalid task-type: " + taskType);
            displayUsage(cmd);
            return exitCode;
        }
        if (!taskStates.contains(org.apache.hadoop.util.StringUtils.toLowerCase(taskState))) {
            System.out.println("Error: Invalid task-state: " + taskState);
            displayUsage(cmd);
            return exitCode;
        }
    } else if ("-logs".equals(cmd)) {
        if (argv.length == 2 || argv.length == 3) {
            logs = true;
            jobid = argv[1];
            if (argv.length == 3) {
                taskid = argv[2];
            } else {
                taskid = null;
            }
        } else {
            displayUsage(cmd);
            return exitCode;
        }
    } else if ("-config".equals(cmd)) {
        downloadConfig = true;
        if (argv.length != 3) {
            displayUsage(cmd);
            return exitCode;
        }
        jobid = argv[1];
        configOutFile = argv[2];
    } else {
        displayUsage(cmd);
        return exitCode;
    }
    // initialize cluster
    cluster = createCluster();
    // Submit the request
    try {
        if (submitJobFile != null) {
            Job job = Job.getInstance(new JobConf(submitJobFile));
            job.submit();
            System.out.println("Created job " + job.getJobID());
            exitCode = 0;
        } else if (getStatus) {
            Job job = getJob(JobID.forName(jobid));
            if (job == null) {
                System.out.println("Could not find job " + jobid);
            } else {
                Counters counters = job.getCounters();
                System.out.println();
                System.out.println(job);
                if (counters != null) {
                    System.out.println(counters);
                } else {
                    System.out.println("Counters not available. Job is retired.");
                }
                exitCode = 0;
            }
        } else if (getCounter) {
            Job job = getJob(JobID.forName(jobid));
            if (job == null) {
                System.out.println("Could not find job " + jobid);
            } else {
                Counters counters = job.getCounters();
                if (counters == null) {
                    System.out.println("Counters not available for retired job " + jobid);
                    exitCode = -1;
                } else {
                    System.out.println(getCounter(counters, counterGroupName, counterName));
                    exitCode = 0;
                }
            }
        } else if (killJob) {
            Job job = getJob(JobID.forName(jobid));
            if (job == null) {
                System.out.println("Could not find job " + jobid);
            } else {
                JobStatus jobStatus = job.getStatus();
                if (jobStatus.getState() == JobStatus.State.FAILED) {
                    System.out.println("Could not mark the job " + jobid + " as killed, as it has already failed.");
                    exitCode = -1;
                } else if (jobStatus.getState() == JobStatus.State.KILLED) {
                    System.out.println("The job " + jobid + " has already been killed.");
                    exitCode = -1;
                } else if (jobStatus.getState() == JobStatus.State.SUCCEEDED) {
                    System.out.println("Could not kill the job " + jobid + ", as it has already succeeded.");
                    exitCode = -1;
                } else {
                    job.killJob();
                    System.out.println("Killed job " + jobid);
                    exitCode = 0;
                }
            }
        } else if (setJobPriority) {
            Job job = getJob(JobID.forName(jobid));
            if (job == null) {
                System.out.println("Could not find job " + jobid);
            } else {
                if (jp != null) {
                    job.setPriority(jp);
                } else {
                    job.setPriorityAsInteger(jpvalue);
                }
                System.out.println("Changed job priority.");
                exitCode = 0;
            }
        } else if (viewHistory) {
            // it's a Job ID
            if (historyFileOrJobId.endsWith(".jhist")) {
                viewHistory(historyFileOrJobId, viewAllHistory, historyOutFile, historyOutFormat);
                exitCode = 0;
            } else {
                Job job = getJob(JobID.forName(historyFileOrJobId));
                if (job == null) {
                    System.out.println("Could not find job " + jobid);
                } else {
                    String historyUrl = job.getHistoryUrl();
                    if (historyUrl == null || historyUrl.isEmpty()) {
                        System.out.println("History file for job " + historyFileOrJobId + " is currently unavailable.");
                    } else {
                        viewHistory(historyUrl, viewAllHistory, historyOutFile, historyOutFormat);
                        exitCode = 0;
                    }
                }
            }
        } else if (listEvents) {
            Job job = getJob(JobID.forName(jobid));
            if (job == null) {
                System.out.println("Could not find job " + jobid);
            } else {
                listEvents(job, fromEvent, nEvents);
                exitCode = 0;
            }
        } else if (listJobs) {
            listJobs(cluster);
            exitCode = 0;
        } else if (listAllJobs) {
            listAllJobs(cluster);
            exitCode = 0;
        } else if (listActiveTrackers) {
            listActiveTrackers(cluster);
            exitCode = 0;
        } else if (listBlacklistedTrackers) {
            listBlacklistedTrackers(cluster);
            exitCode = 0;
        } else if (displayTasks) {
            Job job = getJob(JobID.forName(jobid));
            if (job == null) {
                System.out.println("Could not find job " + jobid);
            } else {
                displayTasks(getJob(JobID.forName(jobid)), taskType, taskState);
                exitCode = 0;
            }
        } else if (killTask) {
            TaskAttemptID taskID = TaskAttemptID.forName(taskid);
            Job job = getJob(taskID.getJobID());
            if (job == null) {
                System.out.println("Could not find job " + jobid);
            } else if (job.killTask(taskID, false)) {
                System.out.println("Killed task " + taskid);
                exitCode = 0;
            } else {
                System.out.println("Could not kill task " + taskid);
                exitCode = -1;
            }
        } else if (failTask) {
            TaskAttemptID taskID = TaskAttemptID.forName(taskid);
            Job job = getJob(taskID.getJobID());
            if (job == null) {
                System.out.println("Could not find job " + jobid);
            } else if (job.killTask(taskID, true)) {
                System.out.println("Killed task " + taskID + " by failing it");
                exitCode = 0;
            } else {
                System.out.println("Could not fail task " + taskid);
                exitCode = -1;
            }
        } else if (logs) {
            JobID jobID = JobID.forName(jobid);
            if (getJob(jobID) == null) {
                System.out.println("Could not find job " + jobid);
            } else {
                try {
                    TaskAttemptID taskAttemptID = TaskAttemptID.forName(taskid);
                    LogParams logParams = cluster.getLogParams(jobID, taskAttemptID);
                    LogCLIHelpers logDumper = new LogCLIHelpers();
                    logDumper.setConf(getConf());
                    exitCode = logDumper.dumpAContainersLogs(logParams.getApplicationId(), logParams.getContainerId(), logParams.getNodeId(), logParams.getOwner());
                } catch (IOException e) {
                    if (e instanceof RemoteException) {
                        throw e;
                    }
                    System.out.println(e.getMessage());
                }
            }
        } else if (downloadConfig) {
            Job job = getJob(JobID.forName(jobid));
            if (job == null) {
                System.out.println("Could not find job " + jobid);
            } else {
                String jobFile = job.getJobFile();
                if (jobFile == null || jobFile.isEmpty()) {
                    System.out.println("Config file for job " + jobFile + " could not be found.");
                } else {
                    Path configPath = new Path(jobFile);
                    FileSystem fs = FileSystem.get(getConf());
                    fs.copyToLocalFile(configPath, new Path(configOutFile));
                    exitCode = 0;
                }
            }
        }
    } catch (RemoteException re) {
        IOException unwrappedException = re.unwrapRemoteException();
        if (unwrappedException instanceof AccessControlException) {
            System.out.println(unwrappedException.getMessage());
        } else {
            throw re;
        }
    } finally {
        cluster.close();
    }
    return exitCode;
}
Also used : Path(org.apache.hadoop.fs.Path) TaskAttemptID(org.apache.hadoop.mapreduce.TaskAttemptID) JobPriority(org.apache.hadoop.mapreduce.JobPriority) AccessControlException(org.apache.hadoop.security.AccessControlException) IOException(java.io.IOException) LogParams(org.apache.hadoop.mapreduce.v2.LogParams) JobStatus(org.apache.hadoop.mapreduce.JobStatus) FileSystem(org.apache.hadoop.fs.FileSystem) LogCLIHelpers(org.apache.hadoop.yarn.logaggregation.LogCLIHelpers) Counters(org.apache.hadoop.mapreduce.Counters) Job(org.apache.hadoop.mapreduce.Job) RemoteException(org.apache.hadoop.ipc.RemoteException) JobConf(org.apache.hadoop.mapred.JobConf) JobID(org.apache.hadoop.mapreduce.JobID)

Example 3 with JobStatus

use of org.apache.hadoop.mapreduce.JobStatus in project hadoop by apache.

the class TestClientServiceDelegate method testHistoryServerNotConfigured.

@Test
public void testHistoryServerNotConfigured() throws Exception {
    //RM doesn't have app report and job History Server is not configured
    ClientServiceDelegate clientServiceDelegate = getClientServiceDelegate(null, getRMDelegate());
    JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId);
    Assert.assertEquals("N/A", jobStatus.getUsername());
    Assert.assertEquals(JobStatus.State.PREP, jobStatus.getState());
    //RM has app report and job History Server is not configured
    ResourceMgrDelegate rm = mock(ResourceMgrDelegate.class);
    ApplicationReport applicationReport = getFinishedApplicationReport();
    when(rm.getApplicationReport(jobId.getAppId())).thenReturn(applicationReport);
    clientServiceDelegate = getClientServiceDelegate(null, rm);
    jobStatus = clientServiceDelegate.getJobStatus(oldJobId);
    Assert.assertEquals(applicationReport.getUser(), jobStatus.getUsername());
    Assert.assertEquals(JobStatus.State.SUCCEEDED, jobStatus.getState());
}
Also used : JobStatus(org.apache.hadoop.mapreduce.JobStatus) ApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationReport) Test(org.junit.Test)

Example 4 with JobStatus

use of org.apache.hadoop.mapreduce.JobStatus in project hadoop by apache.

the class TestClientServiceDelegate method testRMDownRestoreForJobStatusBeforeGetAMReport.

@Test
public void testRMDownRestoreForJobStatusBeforeGetAMReport() throws IOException {
    Configuration conf = new YarnConfiguration();
    conf.setInt(MRJobConfig.MR_CLIENT_MAX_RETRIES, 3);
    conf.set(MRConfig.FRAMEWORK_NAME, MRConfig.YARN_FRAMEWORK_NAME);
    conf.setBoolean(MRJobConfig.JOB_AM_ACCESS_DISABLED, !isAMReachableFromClient);
    MRClientProtocol historyServerProxy = mock(MRClientProtocol.class);
    when(historyServerProxy.getJobReport(any(GetJobReportRequest.class))).thenReturn(getJobReportResponse());
    ResourceMgrDelegate rmDelegate = mock(ResourceMgrDelegate.class);
    try {
        when(rmDelegate.getApplicationReport(jobId.getAppId())).thenThrow(new java.lang.reflect.UndeclaredThrowableException(new IOException("Connection refuced1"))).thenThrow(new java.lang.reflect.UndeclaredThrowableException(new IOException("Connection refuced2"))).thenReturn(getFinishedApplicationReport());
        ClientServiceDelegate clientServiceDelegate = new ClientServiceDelegate(conf, rmDelegate, oldJobId, historyServerProxy);
        JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId);
        verify(rmDelegate, times(3)).getApplicationReport(any(ApplicationId.class));
        Assert.assertNotNull(jobStatus);
    } catch (YarnException e) {
        throw new IOException(e);
    }
}
Also used : YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) Configuration(org.apache.hadoop.conf.Configuration) IOException(java.io.IOException) GetJobReportRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) MRClientProtocol(org.apache.hadoop.mapreduce.v2.api.MRClientProtocol) JobStatus(org.apache.hadoop.mapreduce.JobStatus) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) Test(org.junit.Test)

Example 5 with JobStatus

use of org.apache.hadoop.mapreduce.JobStatus in project hadoop by apache.

the class TestClientServiceDelegate method testJobReportFromHistoryServer.

@Test
public void testJobReportFromHistoryServer() throws Exception {
    MRClientProtocol historyServerProxy = mock(MRClientProtocol.class);
    when(historyServerProxy.getJobReport(getJobReportRequest())).thenReturn(getJobReportResponseFromHistoryServer());
    ResourceMgrDelegate rm = mock(ResourceMgrDelegate.class);
    when(rm.getApplicationReport(TypeConverter.toYarn(oldJobId).getAppId())).thenReturn(null);
    ClientServiceDelegate clientServiceDelegate = getClientServiceDelegate(historyServerProxy, rm);
    JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId);
    Assert.assertNotNull(jobStatus);
    Assert.assertEquals("TestJobFilePath", jobStatus.getJobFile());
    Assert.assertEquals("http://TestTrackingUrl", jobStatus.getTrackingUrl());
    Assert.assertEquals(1.0f, jobStatus.getMapProgress(), 0.0f);
    Assert.assertEquals(1.0f, jobStatus.getReduceProgress(), 0.0f);
}
Also used : JobStatus(org.apache.hadoop.mapreduce.JobStatus) MRClientProtocol(org.apache.hadoop.mapreduce.v2.api.MRClientProtocol) Test(org.junit.Test)

Aggregations

JobStatus (org.apache.hadoop.mapreduce.JobStatus)22 Test (org.junit.Test)10 IOException (java.io.IOException)7 MRClientProtocol (org.apache.hadoop.mapreduce.v2.api.MRClientProtocol)7 Job (org.apache.hadoop.mapreduce.Job)5 GetJobReportRequest (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest)5 JobID (org.apache.hadoop.mapreduce.JobID)4 InetSocketAddress (java.net.InetSocketAddress)3 Configuration (org.apache.hadoop.conf.Configuration)3 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)3 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)3 OutputStreamWriter (java.io.OutputStreamWriter)2 PrintWriter (java.io.PrintWriter)2 ArrayList (java.util.ArrayList)2 Path (org.apache.hadoop.fs.Path)2 GetJobReportResponse (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportResponse)2 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)2 ApplicationReport (org.apache.hadoop.yarn.api.records.ApplicationReport)2 AggregationPhaseJob (com.linkedin.thirdeye.hadoop.aggregation.AggregationPhaseJob)1 BackfillPhaseJob (com.linkedin.thirdeye.hadoop.backfill.BackfillPhaseJob)1