Search in sources :

Example 26 with TaskAttemptId

use of org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId in project hadoop by apache.

the class TestTaskAttemptListenerImpl method createTce.

private static TaskAttemptCompletionEvent createTce(int eventId, boolean isMap, TaskAttemptCompletionEventStatus status) {
    JobId jid = MRBuilderUtils.newJobId(12345, 1, 1);
    TaskId tid = MRBuilderUtils.newTaskId(jid, 0, isMap ? org.apache.hadoop.mapreduce.v2.api.records.TaskType.MAP : org.apache.hadoop.mapreduce.v2.api.records.TaskType.REDUCE);
    TaskAttemptId attemptId = MRBuilderUtils.newTaskAttemptId(tid, 0);
    RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
    TaskAttemptCompletionEvent tce = recordFactory.newRecordInstance(TaskAttemptCompletionEvent.class);
    tce.setEventId(eventId);
    tce.setAttemptId(attemptId);
    tce.setStatus(status);
    return tce;
}
Also used : TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) RecordFactory(org.apache.hadoop.yarn.factories.RecordFactory) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) TaskAttemptCompletionEvent(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId)

Example 27 with TaskAttemptId

use of org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId in project hadoop by apache.

the class TestTaskAttemptListenerImpl method testCheckpointIDTracking.

@Test
public void testCheckpointIDTracking() throws IOException, InterruptedException {
    SystemClock clock = SystemClock.getInstance();
    org.apache.hadoop.mapreduce.v2.app.job.Task mockTask = mock(org.apache.hadoop.mapreduce.v2.app.job.Task.class);
    when(mockTask.canCommit(any(TaskAttemptId.class))).thenReturn(true);
    Job mockJob = mock(Job.class);
    when(mockJob.getTask(any(TaskId.class))).thenReturn(mockTask);
    Dispatcher dispatcher = mock(Dispatcher.class);
    @SuppressWarnings("unchecked") EventHandler<Event> ea = mock(EventHandler.class);
    when(dispatcher.getEventHandler()).thenReturn(ea);
    RMHeartbeatHandler rmHeartbeatHandler = mock(RMHeartbeatHandler.class);
    AppContext appCtx = mock(AppContext.class);
    when(appCtx.getJob(any(JobId.class))).thenReturn(mockJob);
    when(appCtx.getClock()).thenReturn(clock);
    when(appCtx.getEventHandler()).thenReturn(ea);
    JobTokenSecretManager secret = mock(JobTokenSecretManager.class);
    final TaskHeartbeatHandler hbHandler = mock(TaskHeartbeatHandler.class);
    when(appCtx.getEventHandler()).thenReturn(ea);
    CheckpointAMPreemptionPolicy policy = new CheckpointAMPreemptionPolicy();
    policy.init(appCtx);
    TaskAttemptListenerImpl listener = new MockTaskAttemptListenerImpl(appCtx, secret, rmHeartbeatHandler, policy) {

        @Override
        protected void registerHeartbeatHandler(Configuration conf) {
            taskHeartbeatHandler = hbHandler;
        }
    };
    Configuration conf = new Configuration();
    conf.setBoolean(MRJobConfig.TASK_PREEMPTION, true);
    //conf.setBoolean("preemption.reduce", true);
    listener.init(conf);
    listener.start();
    TaskAttemptID tid = new TaskAttemptID("12345", 1, TaskType.REDUCE, 1, 0);
    List<Path> partialOut = new ArrayList<Path>();
    partialOut.add(new Path("/prev1"));
    partialOut.add(new Path("/prev2"));
    Counters counters = mock(Counters.class);
    final long CBYTES = 64L * 1024 * 1024;
    final long CTIME = 4344L;
    final Path CLOC = new Path("/test/1");
    Counter cbytes = mock(Counter.class);
    when(cbytes.getValue()).thenReturn(CBYTES);
    Counter ctime = mock(Counter.class);
    when(ctime.getValue()).thenReturn(CTIME);
    when(counters.findCounter(eq(EnumCounter.CHECKPOINT_BYTES))).thenReturn(cbytes);
    when(counters.findCounter(eq(EnumCounter.CHECKPOINT_MS))).thenReturn(ctime);
    // propagating a taskstatus that contains a checkpoint id
    TaskCheckpointID incid = new TaskCheckpointID(new FSCheckpointID(CLOC), partialOut, counters);
    listener.setCheckpointID(org.apache.hadoop.mapred.TaskID.downgrade(tid.getTaskID()), incid);
    // and try to get it back
    CheckpointID outcid = listener.getCheckpointID(tid.getTaskID());
    TaskCheckpointID tcid = (TaskCheckpointID) outcid;
    assertEquals(CBYTES, tcid.getCheckpointBytes());
    assertEquals(CTIME, tcid.getCheckpointTime());
    assertTrue(partialOut.containsAll(tcid.getPartialCommittedOutput()));
    assertTrue(tcid.getPartialCommittedOutput().containsAll(partialOut));
    //assert it worked
    assert outcid == incid;
    listener.stop();
}
Also used : RMHeartbeatHandler(org.apache.hadoop.mapreduce.v2.app.rm.RMHeartbeatHandler) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) Configuration(org.apache.hadoop.conf.Configuration) ArrayList(java.util.ArrayList) Dispatcher(org.apache.hadoop.yarn.event.Dispatcher) TaskCheckpointID(org.apache.hadoop.mapreduce.checkpoint.TaskCheckpointID) EnumCounter(org.apache.hadoop.mapreduce.checkpoint.EnumCounter) Counter(org.apache.hadoop.mapred.Counters.Counter) JobTokenSecretManager(org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager) TaskCheckpointID(org.apache.hadoop.mapreduce.checkpoint.TaskCheckpointID) FSCheckpointID(org.apache.hadoop.mapreduce.checkpoint.FSCheckpointID) CheckpointID(org.apache.hadoop.mapreduce.checkpoint.CheckpointID) TaskHeartbeatHandler(org.apache.hadoop.mapreduce.v2.app.TaskHeartbeatHandler) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Path(org.apache.hadoop.fs.Path) SystemClock(org.apache.hadoop.yarn.util.SystemClock) FSCheckpointID(org.apache.hadoop.mapreduce.checkpoint.FSCheckpointID) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) AppContext(org.apache.hadoop.mapreduce.v2.app.AppContext) CheckpointAMPreemptionPolicy(org.apache.hadoop.mapreduce.v2.app.rm.preemption.CheckpointAMPreemptionPolicy) TaskAttemptCompletionEvent(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent) Event(org.apache.hadoop.yarn.event.Event) Test(org.junit.Test)

Example 28 with TaskAttemptId

use of org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId in project hadoop by apache.

the class MockJobs method newTaskAttempt.

public static TaskAttempt newTaskAttempt(TaskId tid, int i) {
    final TaskAttemptId taid = Records.newRecord(TaskAttemptId.class);
    taid.setTaskId(tid);
    taid.setId(i);
    final TaskAttemptReport report = newTaskAttemptReport(taid);
    return new TaskAttempt() {

        @Override
        public NodeId getNodeId() throws UnsupportedOperationException {
            throw new UnsupportedOperationException();
        }

        @Override
        public TaskAttemptId getID() {
            return taid;
        }

        @Override
        public TaskAttemptReport getReport() {
            return report;
        }

        @Override
        public long getLaunchTime() {
            return report.getStartTime();
        }

        @Override
        public long getFinishTime() {
            return report.getFinishTime();
        }

        @Override
        public int getShufflePort() {
            return ShuffleHandler.DEFAULT_SHUFFLE_PORT;
        }

        @Override
        public Counters getCounters() {
            if (report != null && report.getCounters() != null) {
                return new Counters(TypeConverter.fromYarn(report.getCounters()));
            }
            return null;
        }

        @Override
        public float getProgress() {
            return report.getProgress();
        }

        @Override
        public Phase getPhase() {
            return report.getPhase();
        }

        @Override
        public TaskAttemptState getState() {
            return report.getTaskAttemptState();
        }

        @Override
        public boolean isFinished() {
            switch(report.getTaskAttemptState()) {
                case SUCCEEDED:
                case FAILED:
                case KILLED:
                    return true;
            }
            return false;
        }

        @Override
        public ContainerId getAssignedContainerID() {
            ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(taid.getTaskId().getJobId().getAppId(), 0);
            ContainerId id = ContainerId.newContainerId(appAttemptId, 0);
            return id;
        }

        @Override
        public String getNodeHttpAddress() {
            return "localhost:8042";
        }

        @Override
        public List<String> getDiagnostics() {
            return Lists.newArrayList(report.getDiagnosticInfo());
        }

        @Override
        public String getAssignedContainerMgrAddress() {
            return "localhost:9998";
        }

        @Override
        public long getShuffleFinishTime() {
            return report.getShuffleFinishTime();
        }

        @Override
        public long getSortFinishTime() {
            return report.getSortFinishTime();
        }

        @Override
        public String getNodeRackName() {
            return "/default-rack";
        }
    };
}
Also used : TaskAttemptReport(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) Counters(org.apache.hadoop.mapreduce.Counters) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) ApplicationAttemptId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId)

Example 29 with TaskAttemptId

use of org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId in project hadoop by apache.

the class MockJobs method newTaskAttempts.

public static Map<TaskAttemptId, TaskAttempt> newTaskAttempts(TaskId tid, int m) {
    Map<TaskAttemptId, TaskAttempt> map = Maps.newHashMap();
    for (int i = 0; i < m; ++i) {
        TaskAttempt ta = newTaskAttempt(tid, i);
        map.put(ta.getID(), ta);
    }
    return map;
}
Also used : TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)

Example 30 with TaskAttemptId

use of org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId in project hadoop by apache.

the class TaskAttemptCompletionEventPBImpl method getAttemptId.

@Override
public TaskAttemptId getAttemptId() {
    TaskAttemptCompletionEventProtoOrBuilder p = viaProto ? proto : builder;
    if (this.taskAttemptId != null) {
        return this.taskAttemptId;
    }
    if (!p.hasAttemptId()) {
        return null;
    }
    this.taskAttemptId = convertFromProtoFormat(p.getAttemptId());
    return this.taskAttemptId;
}
Also used : TaskAttemptCompletionEventProtoOrBuilder(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptCompletionEventProtoOrBuilder)

Aggregations

TaskAttemptId (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId)111 Test (org.junit.Test)72 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)61 TaskAttempt (org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)57 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)51 Task (org.apache.hadoop.mapreduce.v2.app.job.Task)48 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)45 TaskAttemptEvent (org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent)33 Configuration (org.apache.hadoop.conf.Configuration)32 ContainerId (org.apache.hadoop.yarn.api.records.ContainerId)28 AppContext (org.apache.hadoop.mapreduce.v2.app.AppContext)27 ApplicationAttemptId (org.apache.hadoop.yarn.api.records.ApplicationAttemptId)21 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)20 ClientResponse (com.sun.jersey.api.client.ClientResponse)16 WebResource (com.sun.jersey.api.client.WebResource)16 HashMap (java.util.HashMap)16 Container (org.apache.hadoop.yarn.api.records.Container)16 Path (org.apache.hadoop.fs.Path)15 TaskAttemptContainerLaunchedEvent (org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent)14 NodeId (org.apache.hadoop.yarn.api.records.NodeId)14