Search in sources :

Example 16 with JobStatus

use of org.apache.hadoop.mapreduce.JobStatus in project hadoop by apache.

the class JobQueueClient method displayQueueInfo.

/**
   * Method used to display information pertaining to a Single JobQueue
   * registered with the {@link QueueManager}. Display of the Jobs is determine
   * by the boolean
   * 
   * @throws IOException, InterruptedException
   */
private void displayQueueInfo(String queue, boolean showJobs) throws IOException, InterruptedException {
    JobQueueInfo jobQueueInfo = jc.getQueueInfo(queue);
    if (jobQueueInfo == null) {
        System.out.println("Queue \"" + queue + "\" does not exist.");
        return;
    }
    printJobQueueInfo(jobQueueInfo, new PrintWriter(new OutputStreamWriter(System.out, Charsets.UTF_8)));
    if (showJobs && (jobQueueInfo.getChildren() == null || jobQueueInfo.getChildren().size() == 0)) {
        JobStatus[] jobs = jobQueueInfo.getJobStatuses();
        if (jobs == null)
            jobs = new JobStatus[0];
        jc.displayJobList(jobs);
    }
}
Also used : JobStatus(org.apache.hadoop.mapreduce.JobStatus) OutputStreamWriter(java.io.OutputStreamWriter) PrintWriter(java.io.PrintWriter)

Example 17 with JobStatus

use of org.apache.hadoop.mapreduce.JobStatus in project hadoop by apache.

the class CLI method listJobs.

/**
   * Dump a list of currently running jobs
   * @throws IOException
   */
private void listJobs(Cluster cluster) throws IOException, InterruptedException {
    List<JobStatus> runningJobs = new ArrayList<JobStatus>();
    for (JobStatus job : cluster.getAllJobStatuses()) {
        if (!job.isJobComplete()) {
            runningJobs.add(job);
        }
    }
    displayJobList(runningJobs.toArray(new JobStatus[0]));
}
Also used : JobStatus(org.apache.hadoop.mapreduce.JobStatus) ArrayList(java.util.ArrayList)

Example 18 with JobStatus

use of org.apache.hadoop.mapreduce.JobStatus in project hadoop by apache.

the class JobClientUnitTest method testShowJob.

@Test
public void testShowJob() throws Exception {
    TestJobClient client = new TestJobClient(new JobConf());
    long startTime = System.currentTimeMillis();
    JobID jobID = new JobID(String.valueOf(startTime), 12345);
    JobStatus mockJobStatus = mock(JobStatus.class);
    when(mockJobStatus.getJobID()).thenReturn(jobID);
    when(mockJobStatus.getJobName()).thenReturn(jobID.toString());
    when(mockJobStatus.getState()).thenReturn(JobStatus.State.RUNNING);
    when(mockJobStatus.getStartTime()).thenReturn(startTime);
    when(mockJobStatus.getUsername()).thenReturn("mockuser");
    when(mockJobStatus.getQueue()).thenReturn("mockqueue");
    when(mockJobStatus.getPriority()).thenReturn(JobPriority.NORMAL);
    when(mockJobStatus.getNumUsedSlots()).thenReturn(1);
    when(mockJobStatus.getNumReservedSlots()).thenReturn(1);
    when(mockJobStatus.getUsedMem()).thenReturn(1024);
    when(mockJobStatus.getReservedMem()).thenReturn(512);
    when(mockJobStatus.getNeededMem()).thenReturn(2048);
    when(mockJobStatus.getSchedulingInfo()).thenReturn("NA");
    Job mockJob = mock(Job.class);
    when(mockJob.getTaskReports(isA(TaskType.class))).thenReturn(new TaskReport[5]);
    Cluster mockCluster = mock(Cluster.class);
    when(mockCluster.getJob(jobID)).thenReturn(mockJob);
    client.setCluster(mockCluster);
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    client.displayJobList(new JobStatus[] { mockJobStatus }, new PrintWriter(out));
    String commandLineOutput = out.toString();
    System.out.println(commandLineOutput);
    Assert.assertTrue(commandLineOutput.contains("Total jobs:1"));
    verify(mockJobStatus, atLeastOnce()).getJobID();
    verify(mockJobStatus).getState();
    verify(mockJobStatus).getStartTime();
    verify(mockJobStatus).getUsername();
    verify(mockJobStatus).getQueue();
    verify(mockJobStatus).getPriority();
    verify(mockJobStatus).getNumUsedSlots();
    verify(mockJobStatus).getNumReservedSlots();
    verify(mockJobStatus).getUsedMem();
    verify(mockJobStatus).getReservedMem();
    verify(mockJobStatus).getNeededMem();
    verify(mockJobStatus).getSchedulingInfo();
    // This call should not go to each AM.
    verify(mockCluster, never()).getJob(jobID);
    verify(mockJob, never()).getTaskReports(isA(TaskType.class));
}
Also used : JobStatus(org.apache.hadoop.mapreduce.JobStatus) TaskType(org.apache.hadoop.mapreduce.TaskType) Cluster(org.apache.hadoop.mapreduce.Cluster) ByteArrayOutputStream(java.io.ByteArrayOutputStream) Job(org.apache.hadoop.mapreduce.Job) PrintWriter(java.io.PrintWriter) Test(org.junit.Test)

Example 19 with JobStatus

use of org.apache.hadoop.mapreduce.JobStatus in project hadoop by apache.

the class TestClientServiceDelegate method testRetriesOnAMConnectionFailures.

@Test
public void testRetriesOnAMConnectionFailures() throws Exception {
    if (!isAMReachableFromClient) {
        return;
    }
    ResourceMgrDelegate rm = mock(ResourceMgrDelegate.class);
    when(rm.getApplicationReport(TypeConverter.toYarn(oldJobId).getAppId())).thenReturn(getRunningApplicationReport("am1", 78));
    // throw exception in 1st, 2nd, 3rd and 4th call of getJobReport, and
    // succeed in the 5th call.
    final MRClientProtocol amProxy = mock(MRClientProtocol.class);
    when(amProxy.getJobReport(any(GetJobReportRequest.class))).thenThrow(new RuntimeException("11")).thenThrow(new RuntimeException("22")).thenThrow(new RuntimeException("33")).thenThrow(new RuntimeException("44")).thenReturn(getJobReportResponse());
    Configuration conf = new YarnConfiguration();
    conf.set(MRConfig.FRAMEWORK_NAME, MRConfig.YARN_FRAMEWORK_NAME);
    conf.setBoolean(MRJobConfig.JOB_AM_ACCESS_DISABLED, !isAMReachableFromClient);
    ClientServiceDelegate clientServiceDelegate = new ClientServiceDelegate(conf, rm, oldJobId, null) {

        @Override
        MRClientProtocol instantiateAMProxy(final InetSocketAddress serviceAddr) throws IOException {
            super.instantiateAMProxy(serviceAddr);
            return amProxy;
        }
    };
    JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId);
    Assert.assertNotNull(jobStatus);
    // assert maxClientRetry is not decremented.
    Assert.assertEquals(conf.getInt(MRJobConfig.MR_CLIENT_MAX_RETRIES, MRJobConfig.DEFAULT_MR_CLIENT_MAX_RETRIES), clientServiceDelegate.getMaxClientRetry());
    verify(amProxy, times(5)).getJobReport(any(GetJobReportRequest.class));
}
Also used : JobStatus(org.apache.hadoop.mapreduce.JobStatus) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) InetSocketAddress(java.net.InetSocketAddress) GetJobReportRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest) MRClientProtocol(org.apache.hadoop.mapreduce.v2.api.MRClientProtocol) Test(org.junit.Test)

Example 20 with JobStatus

use of org.apache.hadoop.mapreduce.JobStatus in project hadoop by apache.

the class TestClientServiceDelegate method testAMAccessDisabled.

@Test
public void testAMAccessDisabled() throws IOException {
    //test only applicable when AM not reachable
    if (isAMReachableFromClient) {
        return;
    }
    MRClientProtocol historyServerProxy = mock(MRClientProtocol.class);
    when(historyServerProxy.getJobReport(getJobReportRequest())).thenReturn(getJobReportResponseFromHistoryServer());
    ResourceMgrDelegate rmDelegate = mock(ResourceMgrDelegate.class);
    try {
        when(rmDelegate.getApplicationReport(jobId.getAppId())).thenReturn(getRunningApplicationReport("am1", 78)).thenReturn(getRunningApplicationReport("am1", 78)).thenReturn(getRunningApplicationReport("am1", 78)).thenReturn(getFinishedApplicationReport());
    } catch (YarnException e) {
        throw new IOException(e);
    }
    ClientServiceDelegate clientServiceDelegate = spy(getClientServiceDelegate(historyServerProxy, rmDelegate));
    JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId);
    Assert.assertNotNull(jobStatus);
    Assert.assertEquals("N/A", jobStatus.getJobName());
    verify(clientServiceDelegate, times(0)).instantiateAMProxy(any(InetSocketAddress.class));
    // Should not reach AM even for second and third times too.
    jobStatus = clientServiceDelegate.getJobStatus(oldJobId);
    Assert.assertNotNull(jobStatus);
    Assert.assertEquals("N/A", jobStatus.getJobName());
    verify(clientServiceDelegate, times(0)).instantiateAMProxy(any(InetSocketAddress.class));
    jobStatus = clientServiceDelegate.getJobStatus(oldJobId);
    Assert.assertNotNull(jobStatus);
    Assert.assertEquals("N/A", jobStatus.getJobName());
    verify(clientServiceDelegate, times(0)).instantiateAMProxy(any(InetSocketAddress.class));
    // The third time around, app is completed, so should go to JHS
    JobStatus jobStatus1 = clientServiceDelegate.getJobStatus(oldJobId);
    Assert.assertNotNull(jobStatus1);
    Assert.assertEquals("TestJobFilePath", jobStatus1.getJobFile());
    Assert.assertEquals("http://TestTrackingUrl", jobStatus1.getTrackingUrl());
    Assert.assertEquals(1.0f, jobStatus1.getMapProgress(), 0.0f);
    Assert.assertEquals(1.0f, jobStatus1.getReduceProgress(), 0.0f);
    verify(clientServiceDelegate, times(0)).instantiateAMProxy(any(InetSocketAddress.class));
}
Also used : JobStatus(org.apache.hadoop.mapreduce.JobStatus) InetSocketAddress(java.net.InetSocketAddress) IOException(java.io.IOException) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) MRClientProtocol(org.apache.hadoop.mapreduce.v2.api.MRClientProtocol) Test(org.junit.Test)

Aggregations

JobStatus (org.apache.hadoop.mapreduce.JobStatus)22 Test (org.junit.Test)10 IOException (java.io.IOException)7 MRClientProtocol (org.apache.hadoop.mapreduce.v2.api.MRClientProtocol)7 Job (org.apache.hadoop.mapreduce.Job)5 GetJobReportRequest (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest)5 JobID (org.apache.hadoop.mapreduce.JobID)4 InetSocketAddress (java.net.InetSocketAddress)3 Configuration (org.apache.hadoop.conf.Configuration)3 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)3 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)3 OutputStreamWriter (java.io.OutputStreamWriter)2 PrintWriter (java.io.PrintWriter)2 ArrayList (java.util.ArrayList)2 Path (org.apache.hadoop.fs.Path)2 GetJobReportResponse (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportResponse)2 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)2 ApplicationReport (org.apache.hadoop.yarn.api.records.ApplicationReport)2 AggregationPhaseJob (com.linkedin.thirdeye.hadoop.aggregation.AggregationPhaseJob)1 BackfillPhaseJob (com.linkedin.thirdeye.hadoop.backfill.BackfillPhaseJob)1