use of org.apache.hadoop.yarn.api.records.timelineservice.ApplicationEntity in project hadoop by apache.
the class TestHBaseTimelineStorageApps method testNonIntegralMetricValues.
@Test
public void testNonIntegralMetricValues() throws IOException {
TimelineEntities teApp = new TimelineEntities();
ApplicationEntity entityApp = new ApplicationEntity();
String appId = "application_1000178881110_2002";
entityApp.setId(appId);
entityApp.setCreatedTime(1425016501000L);
// add metrics with floating point values
Set<TimelineMetric> metricsApp = new HashSet<>();
TimelineMetric mApp = new TimelineMetric();
mApp.setId("MAP_SLOT_MILLIS");
Map<Long, Number> metricAppValues = new HashMap<Long, Number>();
long ts = System.currentTimeMillis();
metricAppValues.put(ts - 20, 10.5);
metricAppValues.put(ts - 10, 20.5);
mApp.setType(Type.TIME_SERIES);
mApp.setValues(metricAppValues);
metricsApp.add(mApp);
entityApp.addMetrics(metricsApp);
teApp.addEntity(entityApp);
TimelineEntities teEntity = new TimelineEntities();
TimelineEntity entity = new TimelineEntity();
entity.setId("hello");
entity.setType("world");
entity.setCreatedTime(1425016501000L);
// add metrics with floating point values
Set<TimelineMetric> metricsEntity = new HashSet<>();
TimelineMetric mEntity = new TimelineMetric();
mEntity.setId("MAP_SLOT_MILLIS");
mEntity.addValue(ts - 20, 10.5);
metricsEntity.add(mEntity);
entity.addMetrics(metricsEntity);
teEntity.addEntity(entity);
HBaseTimelineWriterImpl hbi = null;
try {
Configuration c1 = util.getConfiguration();
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
hbi.start();
// Writing application entity.
try {
hbi.write("c1", "u1", "f1", "v1", 1002345678919L, appId, teApp);
Assert.fail("Expected an exception as metric values are non integral");
} catch (IOException e) {
}
// Writing generic entity.
try {
hbi.write("c1", "u1", "f1", "v1", 1002345678919L, appId, teEntity);
Assert.fail("Expected an exception as metric values are non integral");
} catch (IOException e) {
}
hbi.stop();
} finally {
if (hbi != null) {
hbi.stop();
hbi.close();
}
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.ApplicationEntity in project hadoop by apache.
the class JobHistoryEventHandler method publishConfigsOnJobSubmittedEvent.
private void publishConfigsOnJobSubmittedEvent(JobSubmittedEvent event, JobId jobId) {
if (event.getJobConf() == null) {
return;
}
// Publish job configurations both as job and app entity.
// Configs are split into multiple entities if they exceed 100kb in size.
org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity jobEntityForConfigs = createJobEntity(jobId);
ApplicationEntity appEntityForConfigs = new ApplicationEntity();
String appId = jobId.getAppId().toString();
appEntityForConfigs.setId(appId);
try {
int configSize = 0;
for (Map.Entry<String, String> entry : event.getJobConf()) {
int size = entry.getKey().length() + entry.getValue().length();
configSize += size;
if (configSize > JobHistoryEventUtils.ATS_CONFIG_PUBLISH_SIZE_BYTES) {
if (jobEntityForConfigs.getConfigs().size() > 0) {
timelineV2Client.putEntities(jobEntityForConfigs);
timelineV2Client.putEntities(appEntityForConfigs);
jobEntityForConfigs = createJobEntity(jobId);
appEntityForConfigs = new ApplicationEntity();
appEntityForConfigs.setId(appId);
}
configSize = size;
}
jobEntityForConfigs.addConfig(entry.getKey(), entry.getValue());
appEntityForConfigs.addConfig(entry.getKey(), entry.getValue());
}
if (configSize > 0) {
timelineV2Client.putEntities(jobEntityForConfigs);
timelineV2Client.putEntities(appEntityForConfigs);
}
} catch (IOException | YarnException e) {
LOG.error("Exception while publishing configs on JOB_SUBMITTED Event " + " for the job : " + jobId, e);
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.ApplicationEntity in project hadoop by apache.
the class TimelineServiceV2Publisher method appACLsUpdated.
@SuppressWarnings("unchecked")
@Override
public void appACLsUpdated(RMApp app, String appViewACLs, long updatedTime) {
ApplicationEntity entity = createApplicationEntity(app.getApplicationId());
TimelineEvent tEvent = new TimelineEvent();
Map<String, Object> entityInfo = new HashMap<String, Object>();
entityInfo.put(ApplicationMetricsConstants.APP_VIEW_ACLS_ENTITY_INFO, (appViewACLs == null) ? "" : appViewACLs);
entity.setInfo(entityInfo);
tEvent.setId(ApplicationMetricsConstants.ACLS_UPDATED_EVENT_TYPE);
tEvent.setTimestamp(updatedTime);
entity.addEvent(tEvent);
getDispatcher().getEventHandler().handle(new TimelineV2PublishEvent(SystemMetricsEventType.PUBLISH_ENTITY, entity, app.getApplicationId()));
}
use of org.apache.hadoop.yarn.api.records.timelineservice.ApplicationEntity in project hadoop by apache.
the class TimelineServiceV2Publisher method appFinished.
@SuppressWarnings("unchecked")
@Override
public void appFinished(RMApp app, RMAppState state, long finishedTime) {
ApplicationEntity entity = createApplicationEntity(app.getApplicationId());
TimelineEvent tEvent = new TimelineEvent();
tEvent.setId(ApplicationMetricsConstants.FINISHED_EVENT_TYPE);
tEvent.setTimestamp(finishedTime);
entity.addEvent(tEvent);
Map<String, Object> entityInfo = new HashMap<String, Object>();
entityInfo.put(ApplicationMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO, app.getDiagnostics().toString());
entityInfo.put(ApplicationMetricsConstants.FINAL_STATUS_EVENT_INFO, app.getFinalApplicationStatus().toString());
entityInfo.put(ApplicationMetricsConstants.STATE_EVENT_INFO, RMServerUtils.createApplicationState(state).toString());
ApplicationAttemptId appAttemptId = app.getCurrentAppAttempt() == null ? null : app.getCurrentAppAttempt().getAppAttemptId();
if (appAttemptId != null) {
entityInfo.put(ApplicationMetricsConstants.LATEST_APP_ATTEMPT_EVENT_INFO, appAttemptId.toString());
}
entity.setInfo(entityInfo);
RMAppMetrics appMetrics = app.getRMAppMetrics();
Set<TimelineMetric> entityMetrics = getTimelinelineAppMetrics(appMetrics, finishedTime);
entity.setMetrics(entityMetrics);
getDispatcher().getEventHandler().handle(new TimelineV2PublishEvent(SystemMetricsEventType.PUBLISH_ENTITY, entity, app.getApplicationId()));
}
Aggregations