use of org.apache.hadoop.mapreduce.Cluster in project hadoop by apache.
the class JobClientUnitTest method testShowJob.
@Test
public void testShowJob() throws Exception {
TestJobClient client = new TestJobClient(new JobConf());
long startTime = System.currentTimeMillis();
JobID jobID = new JobID(String.valueOf(startTime), 12345);
JobStatus mockJobStatus = mock(JobStatus.class);
when(mockJobStatus.getJobID()).thenReturn(jobID);
when(mockJobStatus.getJobName()).thenReturn(jobID.toString());
when(mockJobStatus.getState()).thenReturn(JobStatus.State.RUNNING);
when(mockJobStatus.getStartTime()).thenReturn(startTime);
when(mockJobStatus.getUsername()).thenReturn("mockuser");
when(mockJobStatus.getQueue()).thenReturn("mockqueue");
when(mockJobStatus.getPriority()).thenReturn(JobPriority.NORMAL);
when(mockJobStatus.getNumUsedSlots()).thenReturn(1);
when(mockJobStatus.getNumReservedSlots()).thenReturn(1);
when(mockJobStatus.getUsedMem()).thenReturn(1024);
when(mockJobStatus.getReservedMem()).thenReturn(512);
when(mockJobStatus.getNeededMem()).thenReturn(2048);
when(mockJobStatus.getSchedulingInfo()).thenReturn("NA");
Job mockJob = mock(Job.class);
when(mockJob.getTaskReports(isA(TaskType.class))).thenReturn(new TaskReport[5]);
Cluster mockCluster = mock(Cluster.class);
when(mockCluster.getJob(jobID)).thenReturn(mockJob);
client.setCluster(mockCluster);
ByteArrayOutputStream out = new ByteArrayOutputStream();
client.displayJobList(new JobStatus[] { mockJobStatus }, new PrintWriter(out));
String commandLineOutput = out.toString();
System.out.println(commandLineOutput);
Assert.assertTrue(commandLineOutput.contains("Total jobs:1"));
verify(mockJobStatus, atLeastOnce()).getJobID();
verify(mockJobStatus).getState();
verify(mockJobStatus).getStartTime();
verify(mockJobStatus).getUsername();
verify(mockJobStatus).getQueue();
verify(mockJobStatus).getPriority();
verify(mockJobStatus).getNumUsedSlots();
verify(mockJobStatus).getNumReservedSlots();
verify(mockJobStatus).getUsedMem();
verify(mockJobStatus).getReservedMem();
verify(mockJobStatus).getNeededMem();
verify(mockJobStatus).getSchedulingInfo();
// This call should not go to each AM.
verify(mockCluster, never()).getJob(jobID);
verify(mockJob, never()).getTaskReports(isA(TaskType.class));
}
use of org.apache.hadoop.mapreduce.Cluster in project hadoop by apache.
the class JobClientUnitTest method testSetupTaskReportsWithNullJob.
@Test
public void testSetupTaskReportsWithNullJob() throws Exception {
TestJobClient client = new TestJobClient(new JobConf());
Cluster mockCluster = mock(Cluster.class);
client.setCluster(mockCluster);
JobID id = new JobID("test", 0);
when(mockCluster.getJob(id)).thenReturn(null);
TaskReport[] result = client.getSetupTaskReports(id);
assertEquals(0, result.length);
verify(mockCluster).getJob(id);
}
use of org.apache.hadoop.mapreduce.Cluster in project hadoop by apache.
the class JobClientUnitTest method testReduceTaskReportsWithNullJob.
@Test
public void testReduceTaskReportsWithNullJob() throws Exception {
TestJobClient client = new TestJobClient(new JobConf());
Cluster mockCluster = mock(Cluster.class);
client.setCluster(mockCluster);
JobID id = new JobID("test", 0);
when(mockCluster.getJob(id)).thenReturn(null);
TaskReport[] result = client.getReduceTaskReports(id);
assertEquals(0, result.length);
verify(mockCluster).getJob(id);
}
use of org.apache.hadoop.mapreduce.Cluster in project hadoop by apache.
the class JobClientUnitTest method testMapTaskReportsWithNullJob.
@Test
public void testMapTaskReportsWithNullJob() throws Exception {
TestJobClient client = new TestJobClient(new JobConf());
Cluster mockCluster = mock(Cluster.class);
client.setCluster(mockCluster);
JobID id = new JobID("test", 0);
when(mockCluster.getJob(id)).thenReturn(null);
TaskReport[] result = client.getMapTaskReports(id);
assertEquals(0, result.length);
verify(mockCluster).getJob(id);
}
use of org.apache.hadoop.mapreduce.Cluster in project hadoop by apache.
the class TestExternalCall method testCleanupTestViaToolRunner.
/**
* test main method of DistCp. Method should to call System.exit().
*
*/
@Test
public void testCleanupTestViaToolRunner() throws IOException, InterruptedException {
Configuration conf = getConf();
Path stagingDir = JobSubmissionFiles.getStagingDir(new Cluster(conf), conf);
stagingDir.getFileSystem(conf).mkdirs(stagingDir);
Path soure = createFile("tmp.txt");
Path target = createFile("target.txt");
try {
String[] arg = { target.toString(), soure.toString() };
DistCp.main(arg);
Assert.fail();
} catch (ExitException t) {
Assert.assertTrue(fs.exists(target));
Assert.assertEquals(t.status, 0);
Assert.assertEquals(stagingDir.getFileSystem(conf).listStatus(stagingDir).length, 0);
}
}
Aggregations