Search in sources :

Example 11 with ApplicationEntity

use of org.apache.hadoop.yarn.api.records.timelineservice.ApplicationEntity in project hadoop by apache.

the class TestHBaseTimelineStorageApps method testNonIntegralMetricValues.

@Test
public void testNonIntegralMetricValues() throws IOException {
    TimelineEntities teApp = new TimelineEntities();
    ApplicationEntity entityApp = new ApplicationEntity();
    String appId = "application_1000178881110_2002";
    entityApp.setId(appId);
    entityApp.setCreatedTime(1425016501000L);
    // add metrics with floating point values
    Set<TimelineMetric> metricsApp = new HashSet<>();
    TimelineMetric mApp = new TimelineMetric();
    mApp.setId("MAP_SLOT_MILLIS");
    Map<Long, Number> metricAppValues = new HashMap<Long, Number>();
    long ts = System.currentTimeMillis();
    metricAppValues.put(ts - 20, 10.5);
    metricAppValues.put(ts - 10, 20.5);
    mApp.setType(Type.TIME_SERIES);
    mApp.setValues(metricAppValues);
    metricsApp.add(mApp);
    entityApp.addMetrics(metricsApp);
    teApp.addEntity(entityApp);
    TimelineEntities teEntity = new TimelineEntities();
    TimelineEntity entity = new TimelineEntity();
    entity.setId("hello");
    entity.setType("world");
    entity.setCreatedTime(1425016501000L);
    // add metrics with floating point values
    Set<TimelineMetric> metricsEntity = new HashSet<>();
    TimelineMetric mEntity = new TimelineMetric();
    mEntity.setId("MAP_SLOT_MILLIS");
    mEntity.addValue(ts - 20, 10.5);
    metricsEntity.add(mEntity);
    entity.addMetrics(metricsEntity);
    teEntity.addEntity(entity);
    HBaseTimelineWriterImpl hbi = null;
    try {
        Configuration c1 = util.getConfiguration();
        hbi = new HBaseTimelineWriterImpl();
        hbi.init(c1);
        hbi.start();
        // Writing application entity.
        try {
            hbi.write("c1", "u1", "f1", "v1", 1002345678919L, appId, teApp);
            Assert.fail("Expected an exception as metric values are non integral");
        } catch (IOException e) {
        }
        // Writing generic entity.
        try {
            hbi.write("c1", "u1", "f1", "v1", 1002345678919L, appId, teEntity);
            Assert.fail("Expected an exception as metric values are non integral");
        } catch (IOException e) {
        }
        hbi.stop();
    } finally {
        if (hbi != null) {
            hbi.stop();
            hbi.close();
        }
    }
}
Also used : TimelineMetric(org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric) Configuration(org.apache.hadoop.conf.Configuration) HashMap(java.util.HashMap) IOException(java.io.IOException) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) TimelineEntities(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities) ApplicationEntity(org.apache.hadoop.yarn.api.records.timelineservice.ApplicationEntity) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 12 with ApplicationEntity

use of org.apache.hadoop.yarn.api.records.timelineservice.ApplicationEntity in project hadoop by apache.

the class JobHistoryEventHandler method publishConfigsOnJobSubmittedEvent.

private void publishConfigsOnJobSubmittedEvent(JobSubmittedEvent event, JobId jobId) {
    if (event.getJobConf() == null) {
        return;
    }
    // Publish job configurations both as job and app entity.
    // Configs are split into multiple entities if they exceed 100kb in size.
    org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity jobEntityForConfigs = createJobEntity(jobId);
    ApplicationEntity appEntityForConfigs = new ApplicationEntity();
    String appId = jobId.getAppId().toString();
    appEntityForConfigs.setId(appId);
    try {
        int configSize = 0;
        for (Map.Entry<String, String> entry : event.getJobConf()) {
            int size = entry.getKey().length() + entry.getValue().length();
            configSize += size;
            if (configSize > JobHistoryEventUtils.ATS_CONFIG_PUBLISH_SIZE_BYTES) {
                if (jobEntityForConfigs.getConfigs().size() > 0) {
                    timelineV2Client.putEntities(jobEntityForConfigs);
                    timelineV2Client.putEntities(appEntityForConfigs);
                    jobEntityForConfigs = createJobEntity(jobId);
                    appEntityForConfigs = new ApplicationEntity();
                    appEntityForConfigs.setId(appId);
                }
                configSize = size;
            }
            jobEntityForConfigs.addConfig(entry.getKey(), entry.getValue());
            appEntityForConfigs.addConfig(entry.getKey(), entry.getValue());
        }
        if (configSize > 0) {
            timelineV2Client.putEntities(jobEntityForConfigs);
            timelineV2Client.putEntities(appEntityForConfigs);
        }
    } catch (IOException | YarnException e) {
        LOG.error("Exception while publishing configs on JOB_SUBMITTED Event " + " for the job : " + jobId, e);
    }
}
Also used : IOException(java.io.IOException) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) ApplicationEntity(org.apache.hadoop.yarn.api.records.timelineservice.ApplicationEntity) Map(java.util.Map) HashMap(java.util.HashMap)

Example 13 with ApplicationEntity

use of org.apache.hadoop.yarn.api.records.timelineservice.ApplicationEntity in project hadoop by apache.

the class TimelineServiceV2Publisher method appACLsUpdated.

@SuppressWarnings("unchecked")
@Override
public void appACLsUpdated(RMApp app, String appViewACLs, long updatedTime) {
    ApplicationEntity entity = createApplicationEntity(app.getApplicationId());
    TimelineEvent tEvent = new TimelineEvent();
    Map<String, Object> entityInfo = new HashMap<String, Object>();
    entityInfo.put(ApplicationMetricsConstants.APP_VIEW_ACLS_ENTITY_INFO, (appViewACLs == null) ? "" : appViewACLs);
    entity.setInfo(entityInfo);
    tEvent.setId(ApplicationMetricsConstants.ACLS_UPDATED_EVENT_TYPE);
    tEvent.setTimestamp(updatedTime);
    entity.addEvent(tEvent);
    getDispatcher().getEventHandler().handle(new TimelineV2PublishEvent(SystemMetricsEventType.PUBLISH_ENTITY, entity, app.getApplicationId()));
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent) HashMap(java.util.HashMap) ApplicationEntity(org.apache.hadoop.yarn.api.records.timelineservice.ApplicationEntity)

Example 14 with ApplicationEntity

use of org.apache.hadoop.yarn.api.records.timelineservice.ApplicationEntity in project hadoop by apache.

the class TimelineServiceV2Publisher method appFinished.

@SuppressWarnings("unchecked")
@Override
public void appFinished(RMApp app, RMAppState state, long finishedTime) {
    ApplicationEntity entity = createApplicationEntity(app.getApplicationId());
    TimelineEvent tEvent = new TimelineEvent();
    tEvent.setId(ApplicationMetricsConstants.FINISHED_EVENT_TYPE);
    tEvent.setTimestamp(finishedTime);
    entity.addEvent(tEvent);
    Map<String, Object> entityInfo = new HashMap<String, Object>();
    entityInfo.put(ApplicationMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO, app.getDiagnostics().toString());
    entityInfo.put(ApplicationMetricsConstants.FINAL_STATUS_EVENT_INFO, app.getFinalApplicationStatus().toString());
    entityInfo.put(ApplicationMetricsConstants.STATE_EVENT_INFO, RMServerUtils.createApplicationState(state).toString());
    ApplicationAttemptId appAttemptId = app.getCurrentAppAttempt() == null ? null : app.getCurrentAppAttempt().getAppAttemptId();
    if (appAttemptId != null) {
        entityInfo.put(ApplicationMetricsConstants.LATEST_APP_ATTEMPT_EVENT_INFO, appAttemptId.toString());
    }
    entity.setInfo(entityInfo);
    RMAppMetrics appMetrics = app.getRMAppMetrics();
    Set<TimelineMetric> entityMetrics = getTimelinelineAppMetrics(appMetrics, finishedTime);
    entity.setMetrics(entityMetrics);
    getDispatcher().getEventHandler().handle(new TimelineV2PublishEvent(SystemMetricsEventType.PUBLISH_ENTITY, entity, app.getApplicationId()));
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent) TimelineMetric(org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric) HashMap(java.util.HashMap) ApplicationEntity(org.apache.hadoop.yarn.api.records.timelineservice.ApplicationEntity) RMAppMetrics(org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppMetrics) ApplicationAttemptId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId)

Aggregations

ApplicationEntity (org.apache.hadoop.yarn.api.records.timelineservice.ApplicationEntity)14 HashMap (java.util.HashMap)11 TimelineEvent (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent)7 Test (org.junit.Test)6 Configuration (org.apache.hadoop.conf.Configuration)5 TimelineEntities (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities)5 Map (java.util.Map)4 TimelineEntity (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity)4 NavigableMap (java.util.NavigableMap)3 Connection (org.apache.hadoop.hbase.client.Connection)3 Result (org.apache.hadoop.hbase.client.Result)3 TimelineMetric (org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric)3 TimelineDataToRetrieve (org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve)3 TimelineReaderContext (org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext)3 ApplicationTable (org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationTable)3 IOException (java.io.IOException)2 HashSet (java.util.HashSet)2 Get (org.apache.hadoop.hbase.client.Get)2 ApplicationAttemptId (org.apache.hadoop.yarn.api.records.ApplicationAttemptId)2 ApplicationRowKey (org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationRowKey)2