use of org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics in project hadoop by apache.
the class TestJobImpl method testMetaInfoSizeOverMax.
@Test
public void testMetaInfoSizeOverMax() throws Exception {
Configuration conf = new Configuration();
JobID jobID = JobID.forName("job_1234567890000_0001");
JobId jobId = TypeConverter.toYarn(jobID);
MRAppMetrics mrAppMetrics = MRAppMetrics.create();
JobImpl job = new JobImpl(jobId, ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 0), 0), conf, mock(EventHandler.class), null, new JobTokenSecretManager(), new Credentials(), null, null, mrAppMetrics, null, true, null, 0, null, null, null, null);
InitTransition initTransition = new InitTransition() {
@Override
protected TaskSplitMetaInfo[] createSplits(JobImpl job, JobId jobId) {
throw new YarnRuntimeException(EXCEPTIONMSG);
}
};
JobEvent mockJobEvent = mock(JobEvent.class);
JobStateInternal jobSI = initTransition.transition(job, mockJobEvent);
Assert.assertTrue("When init fails, return value from InitTransition.transition should equal NEW.", jobSI.equals(JobStateInternal.NEW));
Assert.assertTrue("Job diagnostics should contain YarnRuntimeException", job.getDiagnostics().toString().contains("YarnRuntimeException"));
Assert.assertTrue("Job diagnostics should contain " + EXCEPTIONMSG, job.getDiagnostics().toString().contains(EXCEPTIONMSG));
}
use of org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics in project hadoop by apache.
the class TestJobImpl method testUberDecision.
private boolean testUberDecision(Configuration conf) {
JobID jobID = JobID.forName("job_1234567890000_0001");
JobId jobId = TypeConverter.toYarn(jobID);
MRAppMetrics mrAppMetrics = MRAppMetrics.create();
JobImpl job = new JobImpl(jobId, ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 0), 0), conf, mock(EventHandler.class), null, new JobTokenSecretManager(), new Credentials(), null, null, mrAppMetrics, null, true, null, 0, null, null, null, null);
InitTransition initTransition = getInitTransition(2);
JobEvent mockJobEvent = mock(JobEvent.class);
initTransition.transition(job, mockJobEvent);
boolean isUber = job.isUber();
return isUber;
}
use of org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics in project hadoop by apache.
the class TestMRAppMetrics method testNames.
@Test
public void testNames() {
Job job = mock(Job.class);
Task mapTask = make(stub(Task.class).returning(TaskType.MAP).from.getType());
Task reduceTask = make(stub(Task.class).returning(TaskType.REDUCE).from.getType());
MRAppMetrics metrics = MRAppMetrics.create();
metrics.submittedJob(job);
metrics.waitingTask(mapTask);
metrics.waitingTask(reduceTask);
metrics.preparingJob(job);
metrics.submittedJob(job);
metrics.waitingTask(mapTask);
metrics.waitingTask(reduceTask);
metrics.preparingJob(job);
metrics.submittedJob(job);
metrics.waitingTask(mapTask);
metrics.waitingTask(reduceTask);
metrics.preparingJob(job);
metrics.endPreparingJob(job);
metrics.endPreparingJob(job);
metrics.endPreparingJob(job);
metrics.runningJob(job);
metrics.launchedTask(mapTask);
metrics.runningTask(mapTask);
metrics.failedTask(mapTask);
metrics.endWaitingTask(reduceTask);
metrics.endRunningTask(mapTask);
metrics.endRunningJob(job);
metrics.failedJob(job);
metrics.runningJob(job);
metrics.launchedTask(mapTask);
metrics.runningTask(mapTask);
metrics.killedTask(mapTask);
metrics.endWaitingTask(reduceTask);
metrics.endRunningTask(mapTask);
metrics.endRunningJob(job);
metrics.killedJob(job);
metrics.runningJob(job);
metrics.launchedTask(mapTask);
metrics.runningTask(mapTask);
metrics.completedTask(mapTask);
metrics.endRunningTask(mapTask);
metrics.launchedTask(reduceTask);
metrics.runningTask(reduceTask);
metrics.completedTask(reduceTask);
metrics.endRunningTask(reduceTask);
metrics.endRunningJob(job);
metrics.completedJob(job);
checkMetrics(/*job*/
3, 1, 1, 1, 0, 0, /*map*/
3, 1, 1, 1, 0, 0, /*reduce*/
1, 1, 0, 0, 0, 0);
}
use of org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics in project hadoop by apache.
the class TestJobImpl method testReportDiagnostics.
@Test
public void testReportDiagnostics() throws Exception {
JobID jobID = JobID.forName("job_1234567890000_0001");
JobId jobId = TypeConverter.toYarn(jobID);
final String diagMsg = "some diagnostic message";
final JobDiagnosticsUpdateEvent diagUpdateEvent = new JobDiagnosticsUpdateEvent(jobId, diagMsg);
MRAppMetrics mrAppMetrics = MRAppMetrics.create();
AppContext mockContext = mock(AppContext.class);
when(mockContext.hasSuccessfullyUnregistered()).thenReturn(true);
JobImpl job = new JobImpl(jobId, Records.newRecord(ApplicationAttemptId.class), new Configuration(), mock(EventHandler.class), null, mock(JobTokenSecretManager.class), null, SystemClock.getInstance(), null, mrAppMetrics, null, true, null, 0, null, mockContext, null, null);
job.handle(diagUpdateEvent);
String diagnostics = job.getReport().getDiagnostics();
Assert.assertNotNull(diagnostics);
Assert.assertTrue(diagnostics.contains(diagMsg));
job = new JobImpl(jobId, Records.newRecord(ApplicationAttemptId.class), new Configuration(), mock(EventHandler.class), null, mock(JobTokenSecretManager.class), null, SystemClock.getInstance(), null, mrAppMetrics, null, true, null, 0, null, mockContext, null, null);
job.handle(new JobEvent(jobId, JobEventType.JOB_KILL));
job.handle(diagUpdateEvent);
diagnostics = job.getReport().getDiagnostics();
Assert.assertNotNull(diagnostics);
Assert.assertTrue(diagnostics.contains(diagMsg));
}
use of org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics in project hadoop by apache.
the class TestRecovery method getMockMapTask.
private MapTaskImpl getMockMapTask(long clusterTimestamp, EventHandler eh) {
ApplicationId appId = ApplicationId.newInstance(clusterTimestamp, 1);
JobId jobId = MRBuilderUtils.newJobId(appId, 1);
int partitions = 2;
Path remoteJobConfFile = mock(Path.class);
JobConf conf = new JobConf();
TaskAttemptListener taskAttemptListener = mock(TaskAttemptListener.class);
Token<JobTokenIdentifier> jobToken = (Token<JobTokenIdentifier>) mock(Token.class);
Credentials credentials = null;
Clock clock = SystemClock.getInstance();
int appAttemptId = 3;
MRAppMetrics metrics = mock(MRAppMetrics.class);
Resource minContainerRequirements = mock(Resource.class);
when(minContainerRequirements.getMemorySize()).thenReturn(1000L);
ClusterInfo clusterInfo = mock(ClusterInfo.class);
AppContext appContext = mock(AppContext.class);
when(appContext.getClusterInfo()).thenReturn(clusterInfo);
TaskSplitMetaInfo taskSplitMetaInfo = mock(TaskSplitMetaInfo.class);
MapTaskImpl mapTask = new MapTaskImpl(jobId, partitions, eh, remoteJobConfFile, conf, taskSplitMetaInfo, taskAttemptListener, jobToken, credentials, clock, appAttemptId, metrics, appContext);
return mapTask;
}
Aggregations