Search in sources :

Example 1 with TimelineClientImpl

use of org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl in project hadoop by apache.

the class JobHistoryFileReplayMapperV1 method map.

public void map(IntWritable key, IntWritable val, Context context) throws IOException {
    // collect the apps it needs to process
    TimelineClient tlc = new TimelineClientImpl();
    TimelineEntityConverterV1 converter = new TimelineEntityConverterV1();
    JobHistoryFileReplayHelper helper = new JobHistoryFileReplayHelper(context);
    int replayMode = helper.getReplayMode();
    Collection<JobFiles> jobs = helper.getJobFiles();
    JobHistoryFileParser parser = helper.getParser();
    if (jobs.isEmpty()) {
        LOG.info(context.getTaskAttemptID().getTaskID() + " will process no jobs");
    } else {
        LOG.info(context.getTaskAttemptID().getTaskID() + " will process " + jobs.size() + " jobs");
    }
    for (JobFiles job : jobs) {
        // process each job
        String jobIdStr = job.getJobId();
        LOG.info("processing " + jobIdStr + "...");
        JobId jobId = TypeConverter.toYarn(JobID.forName(jobIdStr));
        ApplicationId appId = jobId.getAppId();
        try {
            // parse the job info and configuration
            Path historyFilePath = job.getJobHistoryFilePath();
            Path confFilePath = job.getJobConfFilePath();
            if ((historyFilePath == null) || (confFilePath == null)) {
                continue;
            }
            JobInfo jobInfo = parser.parseHistoryFile(historyFilePath);
            Configuration jobConf = parser.parseConfiguration(confFilePath);
            LOG.info("parsed the job history file and the configuration file for job " + jobIdStr);
            // create entities from job history and write them
            long totalTime = 0;
            Set<TimelineEntity> entitySet = converter.createTimelineEntities(jobInfo, jobConf);
            LOG.info("converted them into timeline entities for job " + jobIdStr);
            // use the current user for this purpose
            UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
            long startWrite = System.nanoTime();
            try {
                switch(replayMode) {
                    case JobHistoryFileReplayHelper.WRITE_ALL_AT_ONCE:
                        writeAllEntities(tlc, entitySet, ugi);
                        break;
                    case JobHistoryFileReplayHelper.WRITE_PER_ENTITY:
                        writePerEntity(tlc, entitySet, ugi);
                        break;
                    default:
                        break;
                }
            } catch (Exception e) {
                context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_FAILURES).increment(1);
                LOG.error("writing to the timeline service failed", e);
            }
            long endWrite = System.nanoTime();
            totalTime += TimeUnit.NANOSECONDS.toMillis(endWrite - startWrite);
            int numEntities = entitySet.size();
            LOG.info("wrote " + numEntities + " entities in " + totalTime + " ms");
            context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_TIME).increment(totalTime);
            context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_COUNTER).increment(numEntities);
        } finally {
            // move it along
            context.progress();
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) JobFiles(org.apache.hadoop.mapreduce.JobHistoryFileReplayHelper.JobFiles) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity) IOException(java.io.IOException) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) TimelineClient(org.apache.hadoop.yarn.client.api.TimelineClient) JobInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo) TimelineClientImpl(org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Example 2 with TimelineClientImpl

use of org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl in project hadoop by apache.

the class SimpleEntityWriterV1 method map.

public void map(IntWritable key, IntWritable val, Context context) throws IOException {
    TimelineClient tlc = new TimelineClientImpl();
    Configuration conf = context.getConfiguration();
    final int kbs = conf.getInt(KBS_SENT, KBS_SENT_DEFAULT);
    long totalTime = 0;
    final int testtimes = conf.getInt(TEST_TIMES, TEST_TIMES_DEFAULT);
    final Random rand = new Random();
    final TaskAttemptID taskAttemptId = context.getTaskAttemptID();
    final char[] payLoad = new char[kbs * 1024];
    for (int i = 0; i < testtimes; i++) {
        // Generate a fixed length random payload
        for (int xx = 0; xx < kbs * 1024; xx++) {
            int alphaNumIdx = rand.nextInt(ALPHA_NUMS.length);
            payLoad[xx] = ALPHA_NUMS[alphaNumIdx];
        }
        String entId = taskAttemptId + "_" + Integer.toString(i);
        final TimelineEntity entity = new TimelineEntity();
        entity.setEntityId(entId);
        entity.setEntityType("FOO_ATTEMPT");
        entity.addOtherInfo("PERF_TEST", payLoad);
        // add an event
        TimelineEvent event = new TimelineEvent();
        event.setTimestamp(System.currentTimeMillis());
        event.setEventType("foo_event");
        entity.addEvent(event);
        // use the current user for this purpose
        UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
        long startWrite = System.nanoTime();
        try {
            tlc.putEntities(entity);
        } catch (Exception e) {
            context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_FAILURES).increment(1);
            LOG.error("writing to the timeline service failed", e);
        }
        long endWrite = System.nanoTime();
        totalTime += TimeUnit.NANOSECONDS.toMillis(endWrite - startWrite);
    }
    LOG.info("wrote " + testtimes + " entities (" + kbs * testtimes + " kB) in " + totalTime + " ms");
    context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_TIME).increment(totalTime);
    context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_COUNTER).increment(testtimes);
    context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_KBS).increment(kbs * testtimes);
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timeline.TimelineEvent) Configuration(org.apache.hadoop.conf.Configuration) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity) IOException(java.io.IOException) TimelineClient(org.apache.hadoop.yarn.client.api.TimelineClient) Random(java.util.Random) TimelineClientImpl(org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Example 3 with TimelineClientImpl

use of org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl in project hadoop by apache.

the class TestDistributedShell method testDSTimelineClientWithConnectionRefuse.

@Test
public void testDSTimelineClientWithConnectionRefuse() throws Exception {
    ApplicationMaster am = new ApplicationMaster();
    TimelineClientImpl client = new TimelineClientImpl() {

        @Override
        protected TimelineWriter createTimelineWriter(Configuration conf, UserGroupInformation authUgi, com.sun.jersey.api.client.Client client, URI resURI) throws IOException {
            TimelineWriter timelineWriter = new DirectTimelineWriter(authUgi, client, resURI);
            spyTimelineWriter = spy(timelineWriter);
            return spyTimelineWriter;
        }
    };
    client.init(conf);
    client.start();
    TestTimelineClient.mockEntityClientResponse(spyTimelineWriter, null, false, true);
    try {
        UserGroupInformation ugi = mock(UserGroupInformation.class);
        when(ugi.getShortUserName()).thenReturn("user1");
        // verify no ClientHandlerException get thrown out.
        am.publishContainerEndEvent(client, ContainerStatus.newInstance(BuilderUtils.newContainerId(1, 1, 1, 1), ContainerState.COMPLETE, "", 1), "domainId", ugi);
    } finally {
        client.stop();
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) DirectTimelineWriter(org.apache.hadoop.yarn.client.api.impl.DirectTimelineWriter) TimelineWriter(org.apache.hadoop.yarn.client.api.impl.TimelineWriter) TimelineClientImpl(org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl) TestTimelineClient(org.apache.hadoop.yarn.client.api.impl.TestTimelineClient) YarnClient(org.apache.hadoop.yarn.client.api.YarnClient) URI(java.net.URI) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) DirectTimelineWriter(org.apache.hadoop.yarn.client.api.impl.DirectTimelineWriter) Test(org.junit.Test)

Aggregations

Configuration (org.apache.hadoop.conf.Configuration)3 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)3 TimelineClientImpl (org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl)3 IOException (java.io.IOException)2 TimelineEntity (org.apache.hadoop.yarn.api.records.timeline.TimelineEntity)2 TimelineClient (org.apache.hadoop.yarn.client.api.TimelineClient)2 URI (java.net.URI)1 Random (java.util.Random)1 Path (org.apache.hadoop.fs.Path)1 HdfsConfiguration (org.apache.hadoop.hdfs.HdfsConfiguration)1 JobFiles (org.apache.hadoop.mapreduce.JobHistoryFileReplayHelper.JobFiles)1 JobInfo (org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo)1 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)1 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)1 TimelineEvent (org.apache.hadoop.yarn.api.records.timeline.TimelineEvent)1 YarnClient (org.apache.hadoop.yarn.client.api.YarnClient)1 DirectTimelineWriter (org.apache.hadoop.yarn.client.api.impl.DirectTimelineWriter)1 TestTimelineClient (org.apache.hadoop.yarn.client.api.impl.TestTimelineClient)1 TimelineWriter (org.apache.hadoop.yarn.client.api.impl.TimelineWriter)1 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)1