use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvent in project hadoop by apache.
the class TestTimelineWebServices method testGetEventsWithYarnACLsEnabled.
@Test
public void testGetEventsWithYarnACLsEnabled() {
AdminACLsManager oldAdminACLsManager = timelineACLsManager.setAdminACLsManager(adminACLsManager);
try {
// Put entity [5, 5] in domain 1
TimelineEntities entities = new TimelineEntities();
TimelineEntity entity = new TimelineEntity();
entity.setEntityId("test id 5");
entity.setEntityType("test type 5");
entity.setStartTime(System.currentTimeMillis());
entity.setDomainId("domain_id_1");
TimelineEvent event = new TimelineEvent();
event.setEventType("event type 1");
event.setTimestamp(System.currentTimeMillis());
entity.addEvent(event);
entities.addEntity(entity);
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("timeline").queryParam("user.name", "writer_user_1").accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON).post(ClientResponse.class, entities);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, response.getType().toString());
TimelinePutResponse putResponse = response.getEntity(TimelinePutResponse.class);
Assert.assertEquals(0, putResponse.getErrors().size());
// Put entity [5, 6] in domain 2
entities = new TimelineEntities();
entity = new TimelineEntity();
entity.setEntityId("test id 6");
entity.setEntityType("test type 5");
entity.setStartTime(System.currentTimeMillis());
entity.setDomainId("domain_id_2");
event = new TimelineEvent();
event.setEventType("event type 2");
event.setTimestamp(System.currentTimeMillis());
entity.addEvent(event);
entities.addEntity(entity);
r = resource();
response = r.path("ws").path("v1").path("timeline").queryParam("user.name", "writer_user_3").accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON).post(ClientResponse.class, entities);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, response.getType().toString());
putResponse = response.getEntity(TimelinePutResponse.class);
Assert.assertEquals(0, putResponse.getErrors().size());
// Query events belonging to the entities of type 4
response = r.path("ws").path("v1").path("timeline").path("test type 5").path("events").queryParam("user.name", "reader_user_1").queryParam("entityId", "test id 5,test id 6").accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, response.getType().toString());
TimelineEvents events = response.getEntity(TimelineEvents.class);
// Reader 1 should just have the access to the events of entity [5, 5]
assertEquals(1, events.getAllEvents().size());
assertEquals("test id 5", events.getAllEvents().get(0).getEntityId());
} finally {
timelineACLsManager.setAdminACLsManager(oldAdminACLsManager);
}
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvent in project hadoop by apache.
the class TimelineServiceV1Publisher method appCreated.
@SuppressWarnings("unchecked")
@Override
public void appCreated(RMApp app, long createdTime) {
TimelineEntity entity = createApplicationEntity(app.getApplicationId());
Map<String, Object> entityInfo = new HashMap<String, Object>();
entityInfo.put(ApplicationMetricsConstants.NAME_ENTITY_INFO, app.getName());
entityInfo.put(ApplicationMetricsConstants.TYPE_ENTITY_INFO, app.getApplicationType());
entityInfo.put(ApplicationMetricsConstants.USER_ENTITY_INFO, app.getUser());
entityInfo.put(ApplicationMetricsConstants.QUEUE_ENTITY_INFO, app.getQueue());
entityInfo.put(ApplicationMetricsConstants.SUBMITTED_TIME_ENTITY_INFO, app.getSubmitTime());
entityInfo.put(ApplicationMetricsConstants.APP_TAGS_INFO, app.getApplicationTags());
entityInfo.put(ApplicationMetricsConstants.UNMANAGED_APPLICATION_ENTITY_INFO, app.getApplicationSubmissionContext().getUnmanagedAM());
entityInfo.put(ApplicationMetricsConstants.APPLICATION_PRIORITY_INFO, app.getApplicationPriority().getPriority());
entityInfo.put(ApplicationMetricsConstants.AM_NODE_LABEL_EXPRESSION, app.getAmNodeLabelExpression());
entityInfo.put(ApplicationMetricsConstants.APP_NODE_LABEL_EXPRESSION, app.getAppNodeLabelExpression());
if (app.getCallerContext() != null) {
if (app.getCallerContext().getContext() != null) {
entityInfo.put(ApplicationMetricsConstants.YARN_APP_CALLER_CONTEXT, app.getCallerContext().getContext());
}
if (app.getCallerContext().getSignature() != null) {
entityInfo.put(ApplicationMetricsConstants.YARN_APP_CALLER_SIGNATURE, app.getCallerContext().getSignature());
}
}
ContainerLaunchContext amContainerSpec = app.getApplicationSubmissionContext().getAMContainerSpec();
entityInfo.put(ApplicationMetricsConstants.AM_CONTAINER_LAUNCH_COMMAND, amContainerSpec.getCommands());
entity.setOtherInfo(entityInfo);
TimelineEvent tEvent = new TimelineEvent();
tEvent.setEventType(ApplicationMetricsConstants.CREATED_EVENT_TYPE);
tEvent.setTimestamp(createdTime);
entity.addEvent(tEvent);
getDispatcher().getEventHandler().handle(new TimelineV1PublishEvent(SystemMetricsEventType.PUBLISH_ENTITY, entity, app.getApplicationId()));
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvent in project hadoop by apache.
the class TimelineServiceV1Publisher method appAttemptRegistered.
@SuppressWarnings("unchecked")
@Override
public void appAttemptRegistered(RMAppAttempt appAttempt, long registeredTime) {
TimelineEntity entity = createAppAttemptEntity(appAttempt.getAppAttemptId());
TimelineEvent tEvent = new TimelineEvent();
tEvent.setEventType(AppAttemptMetricsConstants.REGISTERED_EVENT_TYPE);
tEvent.setTimestamp(registeredTime);
Map<String, Object> eventInfo = new HashMap<String, Object>();
eventInfo.put(AppAttemptMetricsConstants.TRACKING_URL_INFO, appAttempt.getTrackingUrl());
eventInfo.put(AppAttemptMetricsConstants.ORIGINAL_TRACKING_URL_INFO, appAttempt.getOriginalTrackingUrl());
eventInfo.put(AppAttemptMetricsConstants.HOST_INFO, appAttempt.getHost());
eventInfo.put(AppAttemptMetricsConstants.RPC_PORT_INFO, appAttempt.getRpcPort());
if (appAttempt.getMasterContainer() != null) {
eventInfo.put(AppAttemptMetricsConstants.MASTER_CONTAINER_INFO, appAttempt.getMasterContainer().getId().toString());
}
tEvent.setEventInfo(eventInfo);
entity.addEvent(tEvent);
getDispatcher().getEventHandler().handle(new TimelineV1PublishEvent(SystemMetricsEventType.PUBLISH_ENTITY, entity, appAttempt.getAppAttemptId().getApplicationId()));
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvent in project hadoop by apache.
the class TimelineServiceV1Publisher method appFinished.
@Override
public void appFinished(RMApp app, RMAppState state, long finishedTime) {
TimelineEntity entity = createApplicationEntity(app.getApplicationId());
TimelineEvent tEvent = new TimelineEvent();
tEvent.setEventType(ApplicationMetricsConstants.FINISHED_EVENT_TYPE);
tEvent.setTimestamp(finishedTime);
Map<String, Object> eventInfo = new HashMap<String, Object>();
eventInfo.put(ApplicationMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO, app.getDiagnostics().toString());
eventInfo.put(ApplicationMetricsConstants.FINAL_STATUS_EVENT_INFO, app.getFinalApplicationStatus().toString());
eventInfo.put(ApplicationMetricsConstants.STATE_EVENT_INFO, RMServerUtils.createApplicationState(state).toString());
String latestApplicationAttemptId = app.getCurrentAppAttempt() == null ? null : app.getCurrentAppAttempt().getAppAttemptId().toString();
if (latestApplicationAttemptId != null) {
eventInfo.put(ApplicationMetricsConstants.LATEST_APP_ATTEMPT_EVENT_INFO, latestApplicationAttemptId);
}
RMAppMetrics appMetrics = app.getRMAppMetrics();
entity.addOtherInfo(ApplicationMetricsConstants.APP_CPU_METRICS, appMetrics.getVcoreSeconds());
entity.addOtherInfo(ApplicationMetricsConstants.APP_MEM_METRICS, appMetrics.getMemorySeconds());
entity.addOtherInfo(ApplicationMetricsConstants.APP_MEM_PREEMPT_METRICS, appMetrics.getPreemptedMemorySeconds());
entity.addOtherInfo(ApplicationMetricsConstants.APP_CPU_PREEMPT_METRICS, appMetrics.getPreemptedVcoreSeconds());
tEvent.setEventInfo(eventInfo);
entity.addEvent(tEvent);
// sync sending of finish event to avoid possibility of saving application
// finished state in RMStateStore save without publishing in ATS.
// sync event so that ATS update is done without fail.
putEntity(entity);
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvent in project hadoop by apache.
the class TimelineServiceV1Publisher method containerFinished.
@SuppressWarnings("unchecked")
@Override
public void containerFinished(RMContainer container, long finishedTime) {
TimelineEntity entity = createContainerEntity(container.getContainerId());
TimelineEvent tEvent = new TimelineEvent();
tEvent.setEventType(ContainerMetricsConstants.FINISHED_EVENT_TYPE);
tEvent.setTimestamp(finishedTime);
Map<String, Object> eventInfo = new HashMap<String, Object>();
eventInfo.put(ContainerMetricsConstants.DIAGNOSTICS_INFO, container.getDiagnosticsInfo());
eventInfo.put(ContainerMetricsConstants.EXIT_STATUS_INFO, container.getContainerExitStatus());
eventInfo.put(ContainerMetricsConstants.STATE_INFO, container.getContainerState().toString());
Map<String, Object> entityInfo = new HashMap<String, Object>();
entityInfo.put(ContainerMetricsConstants.ALLOCATED_HOST_INFO, container.getAllocatedNode().getHost());
entityInfo.put(ContainerMetricsConstants.ALLOCATED_PORT_INFO, container.getAllocatedNode().getPort());
entity.setOtherInfo(entityInfo);
tEvent.setEventInfo(eventInfo);
entity.addEvent(tEvent);
getDispatcher().getEventHandler().handle(new TimelineV1PublishEvent(SystemMetricsEventType.PUBLISH_ENTITY, entity, container.getContainerId().getApplicationAttemptId().getApplicationId()));
}
Aggregations