Search in sources :

Example 1 with TimelineEntity

use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.

the class TimelineServiceV2Publisher method containerCreated.

@SuppressWarnings("unchecked")
@Override
public void containerCreated(RMContainer container, long createdTime) {
    if (publishContainerEvents) {
        TimelineEntity entity = createContainerEntity(container.getContainerId());
        entity.setCreatedTime(createdTime);
        TimelineEvent tEvent = new TimelineEvent();
        tEvent.setId(ContainerMetricsConstants.CREATED_IN_RM_EVENT_TYPE);
        tEvent.setTimestamp(createdTime);
        entity.addEvent(tEvent);
        // updated as event info instead of entity info, as entity info is updated
        // by NM
        Map<String, Object> entityInfo = new HashMap<String, Object>();
        entityInfo.put(ContainerMetricsConstants.ALLOCATED_MEMORY_INFO, container.getAllocatedResource().getMemorySize());
        entityInfo.put(ContainerMetricsConstants.ALLOCATED_VCORE_INFO, container.getAllocatedResource().getVirtualCores());
        entityInfo.put(ContainerMetricsConstants.ALLOCATED_HOST_INFO, container.getAllocatedNode().getHost());
        entityInfo.put(ContainerMetricsConstants.ALLOCATED_PORT_INFO, container.getAllocatedNode().getPort());
        entityInfo.put(ContainerMetricsConstants.ALLOCATED_PRIORITY_INFO, container.getAllocatedPriority().getPriority());
        entityInfo.put(ContainerMetricsConstants.ALLOCATED_HOST_HTTP_ADDRESS_INFO, container.getNodeHttpAddress());
        entity.setInfo(entityInfo);
        getDispatcher().getEventHandler().handle(new TimelineV2PublishEvent(SystemMetricsEventType.PUBLISH_ENTITY, entity, container.getContainerId().getApplicationAttemptId().getApplicationId()));
    }
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent) HashMap(java.util.HashMap) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity)

Example 2 with TimelineEntity

use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.

the class TimelineEntityConverterV2 method createTaskAttemptEntities.

private Set<TimelineEntity> createTaskAttemptEntities(TaskInfo taskInfo) {
    Set<TimelineEntity> taskAttempts = new HashSet<TimelineEntity>();
    Map<TaskAttemptID, TaskAttemptInfo> taskAttemptInfoMap = taskInfo.getAllTaskAttempts();
    LOG.info("task " + taskInfo.getTaskId() + " has " + taskAttemptInfoMap.size() + " task attempts");
    for (TaskAttemptInfo taskAttemptInfo : taskAttemptInfoMap.values()) {
        TimelineEntity taskAttempt = createTaskAttemptEntity(taskAttemptInfo);
        taskAttempts.add(taskAttempt);
    }
    return taskAttempts;
}
Also used : TaskAttemptInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) HashSet(java.util.HashSet)

Example 3 with TimelineEntity

use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.

the class TimelineEntityConverterV2 method createTaskAndTaskAttemptEntities.

private List<TimelineEntity> createTaskAndTaskAttemptEntities(JobInfo jobInfo) {
    List<TimelineEntity> entities = new ArrayList<>();
    Map<TaskID, TaskInfo> taskInfoMap = jobInfo.getAllTasks();
    LOG.info("job " + jobInfo.getJobId() + " has " + taskInfoMap.size() + " tasks");
    for (TaskInfo taskInfo : taskInfoMap.values()) {
        TimelineEntity task = createTaskEntity(taskInfo);
        entities.add(task);
        // add the task attempts from this task
        Set<TimelineEntity> taskAttempts = createTaskAttemptEntities(taskInfo);
        entities.addAll(taskAttempts);
    }
    return entities;
}
Also used : TaskInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo) ArrayList(java.util.ArrayList) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity)

Example 4 with TimelineEntity

use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.

the class SimpleEntityWriterV2 method writeEntities.

protected void writeEntities(Configuration tlConf, TimelineCollectorManager manager, Context context) throws IOException {
    Configuration conf = context.getConfiguration();
    // simulate the app id with the task id
    int taskId = context.getTaskAttemptID().getTaskID().getId();
    long timestamp = conf.getLong(TIMELINE_SERVICE_PERFORMANCE_RUN_ID, 0);
    ApplicationId appId = ApplicationId.newInstance(timestamp, taskId);
    // create the app level timeline collector
    AppLevelTimelineCollector collector = new AppLevelTimelineCollector(appId);
    manager.putIfAbsent(appId, collector);
    try {
        // set the context
        // flow id: job name, flow run id: timestamp, user id
        TimelineCollectorContext tlContext = collector.getTimelineEntityContext();
        tlContext.setFlowName(context.getJobName());
        tlContext.setFlowRunId(timestamp);
        tlContext.setUserId(context.getUser());
        final int kbs = conf.getInt(KBS_SENT, KBS_SENT_DEFAULT);
        long totalTime = 0;
        final int testtimes = conf.getInt(TEST_TIMES, TEST_TIMES_DEFAULT);
        final Random rand = new Random();
        final TaskAttemptID taskAttemptId = context.getTaskAttemptID();
        final char[] payLoad = new char[kbs * 1024];
        for (int i = 0; i < testtimes; i++) {
            // Generate a fixed length random payload
            for (int xx = 0; xx < kbs * 1024; xx++) {
                int alphaNumIdx = rand.nextInt(ALPHA_NUMS.length);
                payLoad[xx] = ALPHA_NUMS[alphaNumIdx];
            }
            String entId = taskAttemptId + "_" + Integer.toString(i);
            final TimelineEntity entity = new TimelineEntity();
            entity.setId(entId);
            entity.setType("FOO_ATTEMPT");
            entity.addInfo("PERF_TEST", payLoad);
            // add an event
            TimelineEvent event = new TimelineEvent();
            event.setId("foo_event_id");
            event.setTimestamp(System.currentTimeMillis());
            event.addInfo("foo_event", "test");
            entity.addEvent(event);
            // add a metric
            TimelineMetric metric = new TimelineMetric();
            metric.setId("foo_metric");
            metric.addValue(System.currentTimeMillis(), 123456789L);
            entity.addMetric(metric);
            // add a config
            entity.addConfig("foo", "bar");
            TimelineEntities entities = new TimelineEntities();
            entities.addEntity(entity);
            // use the current user for this purpose
            UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
            long startWrite = System.nanoTime();
            try {
                collector.putEntities(entities, ugi);
            } catch (Exception e) {
                context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_FAILURES).increment(1);
                LOG.error("writing to the timeline service failed", e);
            }
            long endWrite = System.nanoTime();
            totalTime += TimeUnit.NANOSECONDS.toMillis(endWrite - startWrite);
        }
        LOG.info("wrote " + testtimes + " entities (" + kbs * testtimes + " kB) in " + totalTime + " ms");
        context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_TIME).increment(totalTime);
        context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_COUNTER).increment(testtimes);
        context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_KBS).increment(kbs * testtimes);
    } finally {
        // clean up
        manager.remove(appId);
    }
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent) TimelineMetric(org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric) Configuration(org.apache.hadoop.conf.Configuration) AppLevelTimelineCollector(org.apache.hadoop.yarn.server.timelineservice.collector.AppLevelTimelineCollector) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) IOException(java.io.IOException) Random(java.util.Random) TimelineEntities(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities) TimelineCollectorContext(org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollectorContext) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Example 5 with TimelineEntity

use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.

the class TestSystemMetricsPublisherForV2 method verifyEntity.

private static void verifyEntity(File entityFile, long expectedEvents, String eventForCreatedTime, long expectedMetrics) throws IOException {
    BufferedReader reader = null;
    String strLine;
    long count = 0;
    long metricsCount = 0;
    try {
        reader = new BufferedReader(new FileReader(entityFile));
        while ((strLine = reader.readLine()) != null) {
            if (strLine.trim().length() > 0) {
                TimelineEntity entity = FileSystemTimelineReaderImpl.getTimelineRecordFromJSON(strLine.trim(), TimelineEntity.class);
                metricsCount = entity.getMetrics().size();
                for (TimelineEvent event : entity.getEvents()) {
                    if (event.getId().equals(eventForCreatedTime)) {
                        assertTrue(entity.getCreatedTime() > 0);
                        break;
                    }
                }
                count++;
            }
        }
    } finally {
        reader.close();
    }
    assertEquals("Expected " + expectedEvents + " events to be published", expectedEvents, count);
    assertEquals("Expected " + expectedMetrics + " metrics is incorrect", expectedMetrics, metricsCount);
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent) BufferedReader(java.io.BufferedReader) FileReader(java.io.FileReader) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity)

Aggregations

TimelineEntity (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity)155 Test (org.junit.Test)98 TimelineDataToRetrieve (org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve)54 TimelineReaderContext (org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext)54 TimelineEntityFilters (org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters)46 HashSet (java.util.HashSet)37 Client (com.sun.jersey.api.client.Client)36 ClientResponse (com.sun.jersey.api.client.ClientResponse)36 URI (java.net.URI)36 TimelineMetric (org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric)33 Set (java.util.Set)32 TimelineEntities (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities)27 TimelineEvent (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent)26 TimelineFilterList (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList)26 HashMap (java.util.HashMap)23 Configuration (org.apache.hadoop.conf.Configuration)21 GenericType (com.sun.jersey.api.client.GenericType)14 HBaseTimelineWriterImpl (org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl)12 TimelinePrefixFilter (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelinePrefixFilter)10 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)9