Search in sources :

Example 1 with AppLevelTimelineCollector

use of org.apache.hadoop.yarn.server.timelineservice.collector.AppLevelTimelineCollector in project hadoop by apache.

the class SimpleEntityWriterV2 method writeEntities.

protected void writeEntities(Configuration tlConf, TimelineCollectorManager manager, Context context) throws IOException {
    Configuration conf = context.getConfiguration();
    // simulate the app id with the task id
    int taskId = context.getTaskAttemptID().getTaskID().getId();
    long timestamp = conf.getLong(TIMELINE_SERVICE_PERFORMANCE_RUN_ID, 0);
    ApplicationId appId = ApplicationId.newInstance(timestamp, taskId);
    // create the app level timeline collector
    AppLevelTimelineCollector collector = new AppLevelTimelineCollector(appId);
    manager.putIfAbsent(appId, collector);
    try {
        // set the context
        // flow id: job name, flow run id: timestamp, user id
        TimelineCollectorContext tlContext = collector.getTimelineEntityContext();
        tlContext.setFlowName(context.getJobName());
        tlContext.setFlowRunId(timestamp);
        tlContext.setUserId(context.getUser());
        final int kbs = conf.getInt(KBS_SENT, KBS_SENT_DEFAULT);
        long totalTime = 0;
        final int testtimes = conf.getInt(TEST_TIMES, TEST_TIMES_DEFAULT);
        final Random rand = new Random();
        final TaskAttemptID taskAttemptId = context.getTaskAttemptID();
        final char[] payLoad = new char[kbs * 1024];
        for (int i = 0; i < testtimes; i++) {
            // Generate a fixed length random payload
            for (int xx = 0; xx < kbs * 1024; xx++) {
                int alphaNumIdx = rand.nextInt(ALPHA_NUMS.length);
                payLoad[xx] = ALPHA_NUMS[alphaNumIdx];
            }
            String entId = taskAttemptId + "_" + Integer.toString(i);
            final TimelineEntity entity = new TimelineEntity();
            entity.setId(entId);
            entity.setType("FOO_ATTEMPT");
            entity.addInfo("PERF_TEST", payLoad);
            // add an event
            TimelineEvent event = new TimelineEvent();
            event.setId("foo_event_id");
            event.setTimestamp(System.currentTimeMillis());
            event.addInfo("foo_event", "test");
            entity.addEvent(event);
            // add a metric
            TimelineMetric metric = new TimelineMetric();
            metric.setId("foo_metric");
            metric.addValue(System.currentTimeMillis(), 123456789L);
            entity.addMetric(metric);
            // add a config
            entity.addConfig("foo", "bar");
            TimelineEntities entities = new TimelineEntities();
            entities.addEntity(entity);
            // use the current user for this purpose
            UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
            long startWrite = System.nanoTime();
            try {
                collector.putEntities(entities, ugi);
            } catch (Exception e) {
                context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_FAILURES).increment(1);
                LOG.error("writing to the timeline service failed", e);
            }
            long endWrite = System.nanoTime();
            totalTime += TimeUnit.NANOSECONDS.toMillis(endWrite - startWrite);
        }
        LOG.info("wrote " + testtimes + " entities (" + kbs * testtimes + " kB) in " + totalTime + " ms");
        context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_TIME).increment(totalTime);
        context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_COUNTER).increment(testtimes);
        context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_KBS).increment(kbs * testtimes);
    } finally {
        // clean up
        manager.remove(appId);
    }
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent) TimelineMetric(org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric) Configuration(org.apache.hadoop.conf.Configuration) AppLevelTimelineCollector(org.apache.hadoop.yarn.server.timelineservice.collector.AppLevelTimelineCollector) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) IOException(java.io.IOException) Random(java.util.Random) TimelineEntities(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities) TimelineCollectorContext(org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollectorContext) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Example 2 with AppLevelTimelineCollector

use of org.apache.hadoop.yarn.server.timelineservice.collector.AppLevelTimelineCollector in project hadoop by apache.

the class TestSystemMetricsPublisherForV2 method createAppAndRegister.

private RMApp createAppAndRegister(ApplicationId appId) {
    RMApp app = createRMApp(appId);
    // some stuff which are currently taken care in RMAppImpl
    rmAppsMapInContext.putIfAbsent(appId, app);
    AppLevelTimelineCollector collector = new AppLevelTimelineCollector(appId);
    rmTimelineCollectorManager.putIfAbsent(appId, collector);
    return app;
}
Also used : RMApp(org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp) AppLevelTimelineCollector(org.apache.hadoop.yarn.server.timelineservice.collector.AppLevelTimelineCollector)

Example 3 with AppLevelTimelineCollector

use of org.apache.hadoop.yarn.server.timelineservice.collector.AppLevelTimelineCollector in project hadoop by apache.

the class JobHistoryFileReplayMapperV2 method writeEntities.

@Override
protected void writeEntities(Configuration tlConf, TimelineCollectorManager manager, Context context) throws IOException {
    JobHistoryFileReplayHelper helper = new JobHistoryFileReplayHelper(context);
    int replayMode = helper.getReplayMode();
    JobHistoryFileParser parser = helper.getParser();
    TimelineEntityConverterV2 converter = new TimelineEntityConverterV2();
    // collect the apps it needs to process
    Collection<JobFiles> jobs = helper.getJobFiles();
    if (jobs.isEmpty()) {
        LOG.info(context.getTaskAttemptID().getTaskID() + " will process no jobs");
    } else {
        LOG.info(context.getTaskAttemptID().getTaskID() + " will process " + jobs.size() + " jobs");
    }
    for (JobFiles job : jobs) {
        // process each job
        String jobIdStr = job.getJobId();
        // skip if either of the file is missing
        if (job.getJobConfFilePath() == null || job.getJobHistoryFilePath() == null) {
            LOG.info(jobIdStr + " missing either the job history file or the " + "configuration file. Skipping.");
            continue;
        }
        LOG.info("processing " + jobIdStr + "...");
        JobId jobId = TypeConverter.toYarn(JobID.forName(jobIdStr));
        ApplicationId appId = jobId.getAppId();
        // create the app level timeline collector and start it
        AppLevelTimelineCollector collector = new AppLevelTimelineCollector(appId);
        manager.putIfAbsent(appId, collector);
        try {
            // parse the job info and configuration
            JobInfo jobInfo = parser.parseHistoryFile(job.getJobHistoryFilePath());
            Configuration jobConf = parser.parseConfiguration(job.getJobConfFilePath());
            LOG.info("parsed the job history file and the configuration file " + "for job " + jobIdStr);
            // set the context
            // flow id: job name, flow run id: timestamp, user id
            TimelineCollectorContext tlContext = collector.getTimelineEntityContext();
            tlContext.setFlowName(jobInfo.getJobname());
            tlContext.setFlowRunId(jobInfo.getSubmitTime());
            tlContext.setUserId(jobInfo.getUsername());
            // create entities from job history and write them
            long totalTime = 0;
            List<TimelineEntity> entitySet = converter.createTimelineEntities(jobInfo, jobConf);
            LOG.info("converted them into timeline entities for job " + jobIdStr);
            // use the current user for this purpose
            UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
            long startWrite = System.nanoTime();
            try {
                switch(replayMode) {
                    case JobHistoryFileReplayHelper.WRITE_ALL_AT_ONCE:
                        writeAllEntities(collector, entitySet, ugi);
                        break;
                    case JobHistoryFileReplayHelper.WRITE_PER_ENTITY:
                        writePerEntity(collector, entitySet, ugi);
                        break;
                    default:
                        break;
                }
            } catch (Exception e) {
                context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_FAILURES).increment(1);
                LOG.error("writing to the timeline service failed", e);
            }
            long endWrite = System.nanoTime();
            totalTime += TimeUnit.NANOSECONDS.toMillis(endWrite - startWrite);
            int numEntities = entitySet.size();
            LOG.info("wrote " + numEntities + " entities in " + totalTime + " ms");
            context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_TIME).increment(totalTime);
            context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_COUNTER).increment(numEntities);
        } finally {
            manager.remove(appId);
            // move it along
            context.progress();
        }
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) AppLevelTimelineCollector(org.apache.hadoop.yarn.server.timelineservice.collector.AppLevelTimelineCollector) JobFiles(org.apache.hadoop.mapreduce.JobHistoryFileReplayHelper.JobFiles) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) IOException(java.io.IOException) JobInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo) TimelineCollectorContext(org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollectorContext) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Example 4 with AppLevelTimelineCollector

use of org.apache.hadoop.yarn.server.timelineservice.collector.AppLevelTimelineCollector in project hadoop by apache.

the class RMAppImpl method startTimelineCollector.

/**
   * Starts the application level timeline collector for this app. This should
   * be used only if the timeline service v.2 is enabled.
   */
public void startTimelineCollector() {
    AppLevelTimelineCollector collector = new AppLevelTimelineCollector(applicationId);
    rmContext.getRMTimelineCollectorManager().putIfAbsent(applicationId, collector);
}
Also used : AppLevelTimelineCollector(org.apache.hadoop.yarn.server.timelineservice.collector.AppLevelTimelineCollector)

Aggregations

AppLevelTimelineCollector (org.apache.hadoop.yarn.server.timelineservice.collector.AppLevelTimelineCollector)4 IOException (java.io.IOException)2 Configuration (org.apache.hadoop.conf.Configuration)2 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)2 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)2 TimelineEntity (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity)2 TimelineCollectorContext (org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollectorContext)2 Random (java.util.Random)1 JobFiles (org.apache.hadoop.mapreduce.JobHistoryFileReplayHelper.JobFiles)1 JobInfo (org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo)1 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)1 TimelineEntities (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities)1 TimelineEvent (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent)1 TimelineMetric (org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric)1 RMApp (org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp)1