Search in sources :

Example 31 with TimelineEntity

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.

the class ApplicationHistoryManagerOnTimelineStore method getContainers.

@Override
public Map<ContainerId, ContainerReport> getContainers(ApplicationAttemptId appAttemptId) throws YarnException, IOException {
    ApplicationReportExt app = getApplication(appAttemptId.getApplicationId(), ApplicationReportField.USER_AND_ACLS);
    checkAccess(app);
    TimelineEntities entities = timelineDataManager.getEntities(ContainerMetricsConstants.ENTITY_TYPE, new NameValuePair(ContainerMetricsConstants.PARENT_PRIMARIY_FILTER, appAttemptId.toString()), null, null, null, null, null, Long.MAX_VALUE, EnumSet.allOf(Field.class), UserGroupInformation.getLoginUser());
    Map<ContainerId, ContainerReport> containers = new LinkedHashMap<ContainerId, ContainerReport>();
    if (entities != null && entities.getEntities() != null) {
        for (TimelineEntity entity : entities.getEntities()) {
            ContainerReport container = convertToContainerReport(entity, serverHttpAddress, app.appReport.getUser());
            containers.put(container.getContainerId(), container);
        }
    }
    return containers;
}
Also used : NameValuePair(org.apache.hadoop.yarn.server.timeline.NameValuePair) Field(org.apache.hadoop.yarn.server.timeline.TimelineReader.Field) TimelineEntities(org.apache.hadoop.yarn.api.records.timeline.TimelineEntities) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) ContainerReport(org.apache.hadoop.yarn.api.records.ContainerReport) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity) LinkedHashMap(java.util.LinkedHashMap)

Example 32 with TimelineEntity

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.

the class JobHistoryFileReplayMapperV1 method map.

public void map(IntWritable key, IntWritable val, Context context) throws IOException {
    // collect the apps it needs to process
    TimelineClient tlc = new TimelineClientImpl();
    TimelineEntityConverterV1 converter = new TimelineEntityConverterV1();
    JobHistoryFileReplayHelper helper = new JobHistoryFileReplayHelper(context);
    int replayMode = helper.getReplayMode();
    Collection<JobFiles> jobs = helper.getJobFiles();
    JobHistoryFileParser parser = helper.getParser();
    if (jobs.isEmpty()) {
        LOG.info(context.getTaskAttemptID().getTaskID() + " will process no jobs");
    } else {
        LOG.info(context.getTaskAttemptID().getTaskID() + " will process " + jobs.size() + " jobs");
    }
    for (JobFiles job : jobs) {
        // process each job
        String jobIdStr = job.getJobId();
        LOG.info("processing " + jobIdStr + "...");
        JobId jobId = TypeConverter.toYarn(JobID.forName(jobIdStr));
        ApplicationId appId = jobId.getAppId();
        try {
            // parse the job info and configuration
            Path historyFilePath = job.getJobHistoryFilePath();
            Path confFilePath = job.getJobConfFilePath();
            if ((historyFilePath == null) || (confFilePath == null)) {
                continue;
            }
            JobInfo jobInfo = parser.parseHistoryFile(historyFilePath);
            Configuration jobConf = parser.parseConfiguration(confFilePath);
            LOG.info("parsed the job history file and the configuration file for job " + jobIdStr);
            // create entities from job history and write them
            long totalTime = 0;
            Set<TimelineEntity> entitySet = converter.createTimelineEntities(jobInfo, jobConf);
            LOG.info("converted them into timeline entities for job " + jobIdStr);
            // use the current user for this purpose
            UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
            long startWrite = System.nanoTime();
            try {
                switch(replayMode) {
                    case JobHistoryFileReplayHelper.WRITE_ALL_AT_ONCE:
                        writeAllEntities(tlc, entitySet, ugi);
                        break;
                    case JobHistoryFileReplayHelper.WRITE_PER_ENTITY:
                        writePerEntity(tlc, entitySet, ugi);
                        break;
                    default:
                        break;
                }
            } catch (Exception e) {
                context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_FAILURES).increment(1);
                LOG.error("writing to the timeline service failed", e);
            }
            long endWrite = System.nanoTime();
            totalTime += TimeUnit.NANOSECONDS.toMillis(endWrite - startWrite);
            int numEntities = entitySet.size();
            LOG.info("wrote " + numEntities + " entities in " + totalTime + " ms");
            context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_TIME).increment(totalTime);
            context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_COUNTER).increment(numEntities);
        } finally {
            // move it along
            context.progress();
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) JobFiles(org.apache.hadoop.mapreduce.JobHistoryFileReplayHelper.JobFiles) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity) IOException(java.io.IOException) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) TimelineClient(org.apache.hadoop.yarn.client.api.TimelineClient) JobInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo) TimelineClientImpl(org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Example 33 with TimelineEntity

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.

the class ApplicationMaster method publishContainerEndEvent.

@VisibleForTesting
void publishContainerEndEvent(final TimelineClient timelineClient, ContainerStatus container, String domainId, UserGroupInformation ugi) {
    final TimelineEntity entity = new TimelineEntity();
    entity.setEntityId(container.getContainerId().toString());
    entity.setEntityType(DSEntity.DS_CONTAINER.toString());
    entity.setDomainId(domainId);
    entity.addPrimaryFilter(USER_TIMELINE_FILTER_NAME, ugi.getShortUserName());
    entity.addPrimaryFilter(APPID_TIMELINE_FILTER_NAME, container.getContainerId().getApplicationAttemptId().getApplicationId().toString());
    TimelineEvent event = new TimelineEvent();
    event.setTimestamp(System.currentTimeMillis());
    event.setEventType(DSEvent.DS_CONTAINER_END.toString());
    event.addEventInfo("State", container.getState().name());
    event.addEventInfo("Exit Status", container.getExitStatus());
    entity.addEvent(event);
    try {
        processTimelineResponseErrors(putContainerEntity(timelineClient, container.getContainerId().getApplicationAttemptId(), entity));
    } catch (YarnException | IOException | ClientHandlerException e) {
        LOG.error("Container end event could not be published for " + container.getContainerId().toString(), e);
    }
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timeline.TimelineEvent) ClientHandlerException(com.sun.jersey.api.client.ClientHandlerException) IOException(java.io.IOException) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) VisibleForTesting(com.google.common.annotations.VisibleForTesting)

Example 34 with TimelineEntity

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.

the class FileSystemTimelineWriter method putEntities.

@Override
public TimelinePutResponse putEntities(ApplicationAttemptId appAttemptId, TimelineEntityGroupId groupId, TimelineEntity... entities) throws IOException, YarnException {
    if (appAttemptId == null) {
        return putEntities(entities);
    }
    List<TimelineEntity> entitiesToDBStore = new ArrayList<TimelineEntity>();
    List<TimelineEntity> entitiesToSummaryCache = new ArrayList<TimelineEntity>();
    List<TimelineEntity> entitiesToEntityCache = new ArrayList<TimelineEntity>();
    Path attemptDir = attemptDirCache.getAppAttemptDir(appAttemptId);
    for (TimelineEntity entity : entities) {
        if (summaryEntityTypes.contains(entity.getEntityType())) {
            entitiesToSummaryCache.add(entity);
        } else {
            if (groupId != null) {
                entitiesToEntityCache.add(entity);
            } else {
                entitiesToDBStore.add(entity);
            }
        }
    }
    if (!entitiesToSummaryCache.isEmpty()) {
        Path summaryLogPath = new Path(attemptDir, SUMMARY_LOG_PREFIX + appAttemptId.toString());
        if (LOG.isDebugEnabled()) {
            LOG.debug("Writing summary log for " + appAttemptId.toString() + " to " + summaryLogPath);
        }
        this.logFDsCache.writeSummaryEntityLogs(fs, summaryLogPath, objMapper, appAttemptId, entitiesToSummaryCache, isAppendSupported);
    }
    if (!entitiesToEntityCache.isEmpty()) {
        Path entityLogPath = new Path(attemptDir, ENTITY_LOG_PREFIX + groupId.toString());
        if (LOG.isDebugEnabled()) {
            LOG.debug("Writing entity log for " + groupId.toString() + " to " + entityLogPath);
        }
        this.logFDsCache.writeEntityLogs(fs, entityLogPath, objMapper, appAttemptId, groupId, entitiesToEntityCache, isAppendSupported);
    }
    if (!entitiesToDBStore.isEmpty()) {
        putEntities(entitiesToDBStore.toArray(new TimelineEntity[entitiesToDBStore.size()]));
    }
    return new TimelinePutResponse();
}
Also used : Path(org.apache.hadoop.fs.Path) ArrayList(java.util.ArrayList) TimelinePutResponse(org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity)

Example 35 with TimelineEntity

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.

the class TimelineClientImpl method putTimelineDataInJSONFile.

/**
   * Put timeline data in a JSON file via command line.
   * 
   * @param path
   *          path to the timeline data JSON file
   * @param type
   *          the type of the timeline data in the JSON file
   */
private static void putTimelineDataInJSONFile(String path, String type) {
    File jsonFile = new File(path);
    if (!jsonFile.exists()) {
        LOG.error("File [" + jsonFile.getAbsolutePath() + "] doesn't exist");
        return;
    }
    YarnJacksonJaxbJsonProvider.configObjectMapper(MAPPER);
    TimelineEntities entities = null;
    TimelineDomains domains = null;
    try {
        if (type.equals(ENTITY_DATA_TYPE)) {
            entities = MAPPER.readValue(jsonFile, TimelineEntities.class);
        } else if (type.equals(DOMAIN_DATA_TYPE)) {
            domains = MAPPER.readValue(jsonFile, TimelineDomains.class);
        }
    } catch (Exception e) {
        LOG.error("Error when reading  " + e.getMessage());
        e.printStackTrace(System.err);
        return;
    }
    Configuration conf = new YarnConfiguration();
    TimelineClient client = TimelineClient.createTimelineClient();
    client.init(conf);
    client.start();
    try {
        if (UserGroupInformation.isSecurityEnabled() && conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, false)) {
            Token<TimelineDelegationTokenIdentifier> token = client.getDelegationToken(UserGroupInformation.getCurrentUser().getUserName());
            UserGroupInformation.getCurrentUser().addToken(token);
        }
        if (type.equals(ENTITY_DATA_TYPE)) {
            TimelinePutResponse response = client.putEntities(entities.getEntities().toArray(new TimelineEntity[entities.getEntities().size()]));
            if (response.getErrors().size() == 0) {
                LOG.info("Timeline entities are successfully put");
            } else {
                for (TimelinePutResponse.TimelinePutError error : response.getErrors()) {
                    LOG.error("TimelineEntity [" + error.getEntityType() + ":" + error.getEntityId() + "] is not successfully put. Error code: " + error.getErrorCode());
                }
            }
        } else if (type.equals(DOMAIN_DATA_TYPE) && domains != null) {
            boolean hasError = false;
            for (TimelineDomain domain : domains.getDomains()) {
                try {
                    client.putDomain(domain);
                } catch (Exception e) {
                    LOG.error("Error when putting domain " + domain.getId(), e);
                    hasError = true;
                }
            }
            if (!hasError) {
                LOG.info("Timeline domains are successfully put");
            }
        }
    } catch (RuntimeException e) {
        LOG.error("Error when putting the timeline data", e);
    } catch (Exception e) {
        LOG.error("Error when putting the timeline data", e);
    } finally {
        client.stop();
    }
}
Also used : TimelineDomains(org.apache.hadoop.yarn.api.records.timeline.TimelineDomains) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) Configuration(org.apache.hadoop.conf.Configuration) TimelineDelegationTokenIdentifier(org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier) TimelinePutResponse(org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) IOException(java.io.IOException) TimelineClient(org.apache.hadoop.yarn.client.api.TimelineClient) TimelineEntities(org.apache.hadoop.yarn.api.records.timeline.TimelineEntities) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) TimelineDomain(org.apache.hadoop.yarn.api.records.timeline.TimelineDomain) File(java.io.File)

Aggregations

TimelineEntity (org.apache.hadoop.yarn.api.records.timeline.TimelineEntity)148 TimelineEvent (org.apache.hadoop.yarn.api.records.timeline.TimelineEvent)66 Test (org.junit.Test)61 TimelineEntities (org.apache.hadoop.yarn.api.records.timeline.TimelineEntities)30 HashMap (java.util.HashMap)23 TimelinePutResponse (org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse)21 IOException (java.io.IOException)17 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)16 ClientResponse (com.sun.jersey.api.client.ClientResponse)13 WebResource (com.sun.jersey.api.client.WebResource)12 Set (java.util.Set)12 Configuration (org.apache.hadoop.conf.Configuration)11 HashSet (java.util.HashSet)8 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)8 Map (java.util.Map)7 Path (org.apache.hadoop.fs.Path)7 Field (org.apache.hadoop.yarn.server.timeline.TimelineReader.Field)6 DAGHistoryEvent (org.apache.tez.dag.history.DAGHistoryEvent)6 TezDAGID (org.apache.tez.dag.records.TezDAGID)6 ArrayList (java.util.ArrayList)5