use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class TestSystemMetricsPublisher method testPublishContainerMetrics.
@Test(timeout = 10000)
public void testPublishContainerMetrics() throws Exception {
ContainerId containerId = ContainerId.newContainerId(ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 1), 1), 1);
RMContainer container = createRMContainer(containerId);
metricsPublisher.containerCreated(container, container.getCreationTime());
metricsPublisher.containerFinished(container, container.getFinishTime());
TimelineEntity entity = null;
do {
entity = store.getEntity(containerId.toString(), ContainerMetricsConstants.ENTITY_TYPE, EnumSet.allOf(Field.class));
// ensure two events are both published before leaving the loop
} while (entity == null || entity.getEvents().size() < 2);
// verify all the fields
Assert.assertEquals(ContainerMetricsConstants.ENTITY_TYPE, entity.getEntityType());
Assert.assertEquals(containerId.toString(), entity.getEntityId());
Assert.assertEquals(containerId.getApplicationAttemptId().toString(), entity.getPrimaryFilters().get(ContainerMetricsConstants.PARENT_PRIMARIY_FILTER).iterator().next());
Assert.assertEquals(container.getAllocatedNode().getHost(), entity.getOtherInfo().get(ContainerMetricsConstants.ALLOCATED_HOST_INFO));
Assert.assertEquals(container.getAllocatedNode().getPort(), entity.getOtherInfo().get(ContainerMetricsConstants.ALLOCATED_PORT_INFO));
Assert.assertEquals(container.getAllocatedResource().getMemorySize(), // variables for compare have same type.
((Integer) entity.getOtherInfo().get(ContainerMetricsConstants.ALLOCATED_MEMORY_INFO)).longValue());
Assert.assertEquals(container.getAllocatedResource().getVirtualCores(), entity.getOtherInfo().get(ContainerMetricsConstants.ALLOCATED_VCORE_INFO));
Assert.assertEquals(container.getAllocatedPriority().getPriority(), entity.getOtherInfo().get(ContainerMetricsConstants.ALLOCATED_PRIORITY_INFO));
boolean hasCreatedEvent = false;
boolean hasFinishedEvent = false;
for (TimelineEvent event : entity.getEvents()) {
if (event.getEventType().equals(ContainerMetricsConstants.CREATED_EVENT_TYPE)) {
hasCreatedEvent = true;
Assert.assertEquals(container.getCreationTime(), event.getTimestamp());
} else if (event.getEventType().equals(ContainerMetricsConstants.FINISHED_EVENT_TYPE)) {
hasFinishedEvent = true;
Assert.assertEquals(container.getFinishTime(), event.getTimestamp());
Assert.assertEquals(container.getDiagnosticsInfo(), event.getEventInfo().get(ContainerMetricsConstants.DIAGNOSTICS_INFO));
Assert.assertEquals(container.getContainerExitStatus(), event.getEventInfo().get(ContainerMetricsConstants.EXIT_STATUS_INFO));
Assert.assertEquals(container.getContainerState().toString(), event.getEventInfo().get(ContainerMetricsConstants.STATE_INFO));
}
}
Assert.assertTrue(hasCreatedEvent && hasFinishedEvent);
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class JobHistoryEventHandler method processEventForTimelineServer.
private void processEventForTimelineServer(HistoryEvent event, JobId jobId, long timestamp) {
TimelineEvent tEvent = new TimelineEvent();
tEvent.setEventType(StringUtils.toUpperCase(event.getEventType().name()));
tEvent.setTimestamp(timestamp);
TimelineEntity tEntity = new TimelineEntity();
switch(event.getEventType()) {
case JOB_SUBMITTED:
JobSubmittedEvent jse = (JobSubmittedEvent) event;
tEvent.addEventInfo("SUBMIT_TIME", jse.getSubmitTime());
tEvent.addEventInfo("QUEUE_NAME", jse.getJobQueueName());
tEvent.addEventInfo("JOB_NAME", jse.getJobName());
tEvent.addEventInfo("USER_NAME", jse.getUserName());
tEvent.addEventInfo("JOB_CONF_PATH", jse.getJobConfPath());
tEvent.addEventInfo("ACLS", jse.getJobAcls());
tEvent.addEventInfo("JOB_QUEUE_NAME", jse.getJobQueueName());
tEvent.addEventInfo("WORKFLOW_ID", jse.getWorkflowId());
tEvent.addEventInfo("WORKFLOW_NAME", jse.getWorkflowName());
tEvent.addEventInfo("WORKFLOW_NAME_NAME", jse.getWorkflowNodeName());
tEvent.addEventInfo("WORKFLOW_ADJACENCIES", jse.getWorkflowAdjacencies());
tEvent.addEventInfo("WORKFLOW_TAGS", jse.getWorkflowTags());
tEntity.addEvent(tEvent);
tEntity.setEntityId(jobId.toString());
tEntity.setEntityType(MAPREDUCE_JOB_ENTITY_TYPE);
break;
case JOB_STATUS_CHANGED:
JobStatusChangedEvent jsce = (JobStatusChangedEvent) event;
tEvent.addEventInfo("STATUS", jsce.getStatus());
tEntity.addEvent(tEvent);
tEntity.setEntityId(jobId.toString());
tEntity.setEntityType(MAPREDUCE_JOB_ENTITY_TYPE);
break;
case JOB_INFO_CHANGED:
JobInfoChangeEvent jice = (JobInfoChangeEvent) event;
tEvent.addEventInfo("SUBMIT_TIME", jice.getSubmitTime());
tEvent.addEventInfo("LAUNCH_TIME", jice.getLaunchTime());
tEntity.addEvent(tEvent);
tEntity.setEntityId(jobId.toString());
tEntity.setEntityType(MAPREDUCE_JOB_ENTITY_TYPE);
break;
case JOB_INITED:
JobInitedEvent jie = (JobInitedEvent) event;
tEvent.addEventInfo("START_TIME", jie.getLaunchTime());
tEvent.addEventInfo("STATUS", jie.getStatus());
tEvent.addEventInfo("TOTAL_MAPS", jie.getTotalMaps());
tEvent.addEventInfo("TOTAL_REDUCES", jie.getTotalReduces());
tEvent.addEventInfo("UBERIZED", jie.getUberized());
tEntity.setStartTime(jie.getLaunchTime());
tEntity.addEvent(tEvent);
tEntity.setEntityId(jobId.toString());
tEntity.setEntityType(MAPREDUCE_JOB_ENTITY_TYPE);
break;
case JOB_PRIORITY_CHANGED:
JobPriorityChangeEvent jpce = (JobPriorityChangeEvent) event;
tEvent.addEventInfo("PRIORITY", jpce.getPriority().toString());
tEntity.addEvent(tEvent);
tEntity.setEntityId(jobId.toString());
tEntity.setEntityType(MAPREDUCE_JOB_ENTITY_TYPE);
break;
case JOB_QUEUE_CHANGED:
JobQueueChangeEvent jqe = (JobQueueChangeEvent) event;
tEvent.addEventInfo("QUEUE_NAMES", jqe.getJobQueueName());
tEntity.addEvent(tEvent);
tEntity.setEntityId(jobId.toString());
tEntity.setEntityType(MAPREDUCE_JOB_ENTITY_TYPE);
break;
case JOB_FAILED:
case JOB_KILLED:
case JOB_ERROR:
JobUnsuccessfulCompletionEvent juce = (JobUnsuccessfulCompletionEvent) event;
tEvent.addEventInfo("FINISH_TIME", juce.getFinishTime());
tEvent.addEventInfo("NUM_MAPS", juce.getFinishedMaps());
tEvent.addEventInfo("NUM_REDUCES", juce.getFinishedReduces());
tEvent.addEventInfo("JOB_STATUS", juce.getStatus());
tEvent.addEventInfo("DIAGNOSTICS", juce.getDiagnostics());
tEvent.addEventInfo("FINISHED_MAPS", juce.getFinishedMaps());
tEvent.addEventInfo("FINISHED_REDUCES", juce.getFinishedReduces());
tEntity.addEvent(tEvent);
tEntity.setEntityId(jobId.toString());
tEntity.setEntityType(MAPREDUCE_JOB_ENTITY_TYPE);
break;
case JOB_FINISHED:
JobFinishedEvent jfe = (JobFinishedEvent) event;
tEvent.addEventInfo("FINISH_TIME", jfe.getFinishTime());
tEvent.addEventInfo("NUM_MAPS", jfe.getFinishedMaps());
tEvent.addEventInfo("NUM_REDUCES", jfe.getFinishedReduces());
tEvent.addEventInfo("FAILED_MAPS", jfe.getFailedMaps());
tEvent.addEventInfo("FAILED_REDUCES", jfe.getFailedReduces());
tEvent.addEventInfo("FINISHED_MAPS", jfe.getFinishedMaps());
tEvent.addEventInfo("FINISHED_REDUCES", jfe.getFinishedReduces());
tEvent.addEventInfo("MAP_COUNTERS_GROUPS", JobHistoryEventUtils.countersToJSON(jfe.getMapCounters()));
tEvent.addEventInfo("REDUCE_COUNTERS_GROUPS", JobHistoryEventUtils.countersToJSON(jfe.getReduceCounters()));
tEvent.addEventInfo("TOTAL_COUNTERS_GROUPS", JobHistoryEventUtils.countersToJSON(jfe.getTotalCounters()));
tEvent.addEventInfo("JOB_STATUS", JobState.SUCCEEDED.toString());
tEntity.addEvent(tEvent);
tEntity.setEntityId(jobId.toString());
tEntity.setEntityType(MAPREDUCE_JOB_ENTITY_TYPE);
break;
case TASK_STARTED:
TaskStartedEvent tse = (TaskStartedEvent) event;
tEvent.addEventInfo("TASK_TYPE", tse.getTaskType().toString());
tEvent.addEventInfo("START_TIME", tse.getStartTime());
tEvent.addEventInfo("SPLIT_LOCATIONS", tse.getSplitLocations());
tEntity.addEvent(tEvent);
tEntity.setEntityId(tse.getTaskId().toString());
tEntity.setEntityType(MAPREDUCE_TASK_ENTITY_TYPE);
tEntity.addRelatedEntity(MAPREDUCE_JOB_ENTITY_TYPE, jobId.toString());
break;
case TASK_FAILED:
TaskFailedEvent tfe = (TaskFailedEvent) event;
tEvent.addEventInfo("TASK_TYPE", tfe.getTaskType().toString());
tEvent.addEventInfo("STATUS", TaskStatus.State.FAILED.toString());
tEvent.addEventInfo("FINISH_TIME", tfe.getFinishTime());
tEvent.addEventInfo("ERROR", tfe.getError());
tEvent.addEventInfo("FAILED_ATTEMPT_ID", tfe.getFailedAttemptID() == null ? "" : tfe.getFailedAttemptID().toString());
tEvent.addEventInfo("COUNTERS_GROUPS", JobHistoryEventUtils.countersToJSON(tfe.getCounters()));
tEntity.addEvent(tEvent);
tEntity.setEntityId(tfe.getTaskId().toString());
tEntity.setEntityType(MAPREDUCE_TASK_ENTITY_TYPE);
tEntity.addRelatedEntity(MAPREDUCE_JOB_ENTITY_TYPE, jobId.toString());
break;
case TASK_UPDATED:
TaskUpdatedEvent tue = (TaskUpdatedEvent) event;
tEvent.addEventInfo("FINISH_TIME", tue.getFinishTime());
tEntity.addEvent(tEvent);
tEntity.setEntityId(tue.getTaskId().toString());
tEntity.setEntityType(MAPREDUCE_TASK_ENTITY_TYPE);
tEntity.addRelatedEntity(MAPREDUCE_JOB_ENTITY_TYPE, jobId.toString());
break;
case TASK_FINISHED:
TaskFinishedEvent tfe2 = (TaskFinishedEvent) event;
tEvent.addEventInfo("TASK_TYPE", tfe2.getTaskType().toString());
tEvent.addEventInfo("COUNTERS_GROUPS", JobHistoryEventUtils.countersToJSON(tfe2.getCounters()));
tEvent.addEventInfo("FINISH_TIME", tfe2.getFinishTime());
tEvent.addEventInfo("STATUS", TaskStatus.State.SUCCEEDED.toString());
tEvent.addEventInfo("SUCCESSFUL_TASK_ATTEMPT_ID", tfe2.getSuccessfulTaskAttemptId() == null ? "" : tfe2.getSuccessfulTaskAttemptId().toString());
tEntity.addEvent(tEvent);
tEntity.setEntityId(tfe2.getTaskId().toString());
tEntity.setEntityType(MAPREDUCE_TASK_ENTITY_TYPE);
tEntity.addRelatedEntity(MAPREDUCE_JOB_ENTITY_TYPE, jobId.toString());
break;
case MAP_ATTEMPT_STARTED:
case CLEANUP_ATTEMPT_STARTED:
case REDUCE_ATTEMPT_STARTED:
case SETUP_ATTEMPT_STARTED:
TaskAttemptStartedEvent tase = (TaskAttemptStartedEvent) event;
tEvent.addEventInfo("TASK_TYPE", tase.getTaskType().toString());
tEvent.addEventInfo("TASK_ATTEMPT_ID", tase.getTaskAttemptId().toString());
tEvent.addEventInfo("START_TIME", tase.getStartTime());
tEvent.addEventInfo("HTTP_PORT", tase.getHttpPort());
tEvent.addEventInfo("TRACKER_NAME", tase.getTrackerName());
tEvent.addEventInfo("SHUFFLE_PORT", tase.getShufflePort());
tEvent.addEventInfo("CONTAINER_ID", tase.getContainerId() == null ? "" : tase.getContainerId().toString());
tEntity.addEvent(tEvent);
tEntity.setEntityId(tase.getTaskId().toString());
tEntity.setEntityType(MAPREDUCE_TASK_ENTITY_TYPE);
tEntity.addRelatedEntity(MAPREDUCE_JOB_ENTITY_TYPE, jobId.toString());
break;
case MAP_ATTEMPT_FAILED:
case CLEANUP_ATTEMPT_FAILED:
case REDUCE_ATTEMPT_FAILED:
case SETUP_ATTEMPT_FAILED:
case MAP_ATTEMPT_KILLED:
case CLEANUP_ATTEMPT_KILLED:
case REDUCE_ATTEMPT_KILLED:
case SETUP_ATTEMPT_KILLED:
TaskAttemptUnsuccessfulCompletionEvent tauce = (TaskAttemptUnsuccessfulCompletionEvent) event;
tEvent.addEventInfo("TASK_TYPE", tauce.getTaskType().toString());
tEvent.addEventInfo("TASK_ATTEMPT_ID", tauce.getTaskAttemptId() == null ? "" : tauce.getTaskAttemptId().toString());
tEvent.addEventInfo("FINISH_TIME", tauce.getFinishTime());
tEvent.addEventInfo("ERROR", tauce.getError());
tEvent.addEventInfo("STATUS", tauce.getTaskStatus());
tEvent.addEventInfo("HOSTNAME", tauce.getHostname());
tEvent.addEventInfo("PORT", tauce.getPort());
tEvent.addEventInfo("RACK_NAME", tauce.getRackName());
tEvent.addEventInfo("SHUFFLE_FINISH_TIME", tauce.getFinishTime());
tEvent.addEventInfo("SORT_FINISH_TIME", tauce.getFinishTime());
tEvent.addEventInfo("MAP_FINISH_TIME", tauce.getFinishTime());
tEvent.addEventInfo("COUNTERS_GROUPS", JobHistoryEventUtils.countersToJSON(tauce.getCounters()));
tEntity.addEvent(tEvent);
tEntity.setEntityId(tauce.getTaskId().toString());
tEntity.setEntityType(MAPREDUCE_TASK_ENTITY_TYPE);
tEntity.addRelatedEntity(MAPREDUCE_JOB_ENTITY_TYPE, jobId.toString());
break;
case MAP_ATTEMPT_FINISHED:
MapAttemptFinishedEvent mafe = (MapAttemptFinishedEvent) event;
tEvent.addEventInfo("TASK_TYPE", mafe.getTaskType().toString());
tEvent.addEventInfo("FINISH_TIME", mafe.getFinishTime());
tEvent.addEventInfo("STATUS", mafe.getTaskStatus());
tEvent.addEventInfo("STATE", mafe.getState());
tEvent.addEventInfo("MAP_FINISH_TIME", mafe.getMapFinishTime());
tEvent.addEventInfo("COUNTERS_GROUPS", JobHistoryEventUtils.countersToJSON(mafe.getCounters()));
tEvent.addEventInfo("HOSTNAME", mafe.getHostname());
tEvent.addEventInfo("PORT", mafe.getPort());
tEvent.addEventInfo("RACK_NAME", mafe.getRackName());
tEvent.addEventInfo("ATTEMPT_ID", mafe.getAttemptId() == null ? "" : mafe.getAttemptId().toString());
tEntity.addEvent(tEvent);
tEntity.setEntityId(mafe.getTaskId().toString());
tEntity.setEntityType(MAPREDUCE_TASK_ENTITY_TYPE);
tEntity.addRelatedEntity(MAPREDUCE_JOB_ENTITY_TYPE, jobId.toString());
break;
case REDUCE_ATTEMPT_FINISHED:
ReduceAttemptFinishedEvent rafe = (ReduceAttemptFinishedEvent) event;
tEvent.addEventInfo("TASK_TYPE", rafe.getTaskType().toString());
tEvent.addEventInfo("ATTEMPT_ID", rafe.getAttemptId() == null ? "" : rafe.getAttemptId().toString());
tEvent.addEventInfo("FINISH_TIME", rafe.getFinishTime());
tEvent.addEventInfo("STATUS", rafe.getTaskStatus());
tEvent.addEventInfo("STATE", rafe.getState());
tEvent.addEventInfo("SHUFFLE_FINISH_TIME", rafe.getShuffleFinishTime());
tEvent.addEventInfo("SORT_FINISH_TIME", rafe.getSortFinishTime());
tEvent.addEventInfo("COUNTERS_GROUPS", JobHistoryEventUtils.countersToJSON(rafe.getCounters()));
tEvent.addEventInfo("HOSTNAME", rafe.getHostname());
tEvent.addEventInfo("PORT", rafe.getPort());
tEvent.addEventInfo("RACK_NAME", rafe.getRackName());
tEntity.addEvent(tEvent);
tEntity.setEntityId(rafe.getTaskId().toString());
tEntity.setEntityType(MAPREDUCE_TASK_ENTITY_TYPE);
tEntity.addRelatedEntity(MAPREDUCE_JOB_ENTITY_TYPE, jobId.toString());
break;
case SETUP_ATTEMPT_FINISHED:
case CLEANUP_ATTEMPT_FINISHED:
TaskAttemptFinishedEvent tafe = (TaskAttemptFinishedEvent) event;
tEvent.addEventInfo("TASK_TYPE", tafe.getTaskType().toString());
tEvent.addEventInfo("ATTEMPT_ID", tafe.getAttemptId() == null ? "" : tafe.getAttemptId().toString());
tEvent.addEventInfo("FINISH_TIME", tafe.getFinishTime());
tEvent.addEventInfo("STATUS", tafe.getTaskStatus());
tEvent.addEventInfo("STATE", tafe.getState());
tEvent.addEventInfo("COUNTERS_GROUPS", JobHistoryEventUtils.countersToJSON(tafe.getCounters()));
tEvent.addEventInfo("HOSTNAME", tafe.getHostname());
tEntity.addEvent(tEvent);
tEntity.setEntityId(tafe.getTaskId().toString());
tEntity.setEntityType(MAPREDUCE_TASK_ENTITY_TYPE);
tEntity.addRelatedEntity(MAPREDUCE_JOB_ENTITY_TYPE, jobId.toString());
break;
case AM_STARTED:
AMStartedEvent ase = (AMStartedEvent) event;
tEvent.addEventInfo("APPLICATION_ATTEMPT_ID", ase.getAppAttemptId() == null ? "" : ase.getAppAttemptId().toString());
tEvent.addEventInfo("CONTAINER_ID", ase.getContainerId() == null ? "" : ase.getContainerId().toString());
tEvent.addEventInfo("NODE_MANAGER_HOST", ase.getNodeManagerHost());
tEvent.addEventInfo("NODE_MANAGER_PORT", ase.getNodeManagerPort());
tEvent.addEventInfo("NODE_MANAGER_HTTP_PORT", ase.getNodeManagerHttpPort());
tEvent.addEventInfo("START_TIME", ase.getStartTime());
tEvent.addEventInfo("SUBMIT_TIME", ase.getSubmitTime());
tEntity.addEvent(tEvent);
tEntity.setEntityId(jobId.toString());
tEntity.setEntityType(MAPREDUCE_JOB_ENTITY_TYPE);
break;
default:
break;
}
try {
TimelinePutResponse response = timelineClient.putEntities(tEntity);
List<TimelinePutResponse.TimelinePutError> errors = response.getErrors();
if (errors.size() == 0) {
if (LOG.isDebugEnabled()) {
LOG.debug("Timeline entities are successfully put in event " + event.getEventType());
}
} else {
for (TimelinePutResponse.TimelinePutError error : errors) {
LOG.error("Error when publishing entity [" + error.getEntityType() + "," + error.getEntityId() + "], server side error code: " + error.getErrorCode());
}
}
} catch (YarnException | IOException | ClientHandlerException ex) {
LOG.error("Error putting entity " + tEntity.getEntityId() + " to Timeline" + "Server", ex);
}
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hive by apache.
the class ATSHook method createPreHookEvent.
TimelineEntity createPreHookEvent(String queryId, String query, JSONObject explainPlan, long startTime, String user, String requestuser, int numMrJobs, int numTezJobs, String opId, String clientIpAddress, String hiveInstanceAddress, String hiveInstanceType, String sessionID, String logID, String threadId, String executionMode, List<String> tablesRead, List<String> tablesWritten, HiveConf conf, ApplicationId llapAppId, String domainId) throws Exception {
JSONObject queryObj = new JSONObject(new LinkedHashMap<>());
queryObj.put("queryText", query);
queryObj.put("queryPlan", explainPlan);
LOG.info("Received pre-hook notification for :" + queryId);
if (LOG.isDebugEnabled()) {
LOG.debug("Otherinfo: " + queryObj.toString());
LOG.debug("Operation id: <" + opId + ">");
}
conf.stripHiddenConfigurations(conf);
Map<String, String> confMap = new HashMap<String, String>();
for (Map.Entry<String, String> setting : conf) {
confMap.put(setting.getKey(), setting.getValue());
}
JSONObject confObj = new JSONObject((Map) confMap);
TimelineEntity atsEntity = new TimelineEntity();
atsEntity.setEntityId(queryId);
atsEntity.setEntityType(EntityTypes.HIVE_QUERY_ID.name());
atsEntity.addPrimaryFilter(PrimaryFilterTypes.user.name(), user);
atsEntity.addPrimaryFilter(PrimaryFilterTypes.requestuser.name(), requestuser);
atsEntity.addPrimaryFilter(PrimaryFilterTypes.executionmode.name(), executionMode);
atsEntity.addPrimaryFilter(PrimaryFilterTypes.queue.name(), conf.get("mapreduce.job.queuename"));
if (opId != null) {
atsEntity.addPrimaryFilter(PrimaryFilterTypes.operationid.name(), opId);
}
for (String tabName : tablesRead) {
atsEntity.addPrimaryFilter(PrimaryFilterTypes.tablesread.name(), tabName);
}
for (String tabName : tablesWritten) {
atsEntity.addPrimaryFilter(PrimaryFilterTypes.tableswritten.name(), tabName);
}
TimelineEvent startEvt = new TimelineEvent();
startEvt.setEventType(EventTypes.QUERY_SUBMITTED.name());
startEvt.setTimestamp(startTime);
atsEntity.addEvent(startEvt);
atsEntity.addOtherInfo(OtherInfoTypes.QUERY.name(), queryObj.toString());
atsEntity.addOtherInfo(OtherInfoTypes.TEZ.name(), numTezJobs > 0);
atsEntity.addOtherInfo(OtherInfoTypes.MAPRED.name(), numMrJobs > 0);
atsEntity.addOtherInfo(OtherInfoTypes.SESSION_ID.name(), sessionID);
atsEntity.addOtherInfo(OtherInfoTypes.INVOKER_INFO.name(), logID);
atsEntity.addOtherInfo(OtherInfoTypes.THREAD_NAME.name(), threadId);
atsEntity.addOtherInfo(OtherInfoTypes.VERSION.name(), VERSION);
if (clientIpAddress != null) {
atsEntity.addOtherInfo(OtherInfoTypes.CLIENT_IP_ADDRESS.name(), clientIpAddress);
}
atsEntity.addOtherInfo(OtherInfoTypes.HIVE_ADDRESS.name(), hiveInstanceAddress);
atsEntity.addOtherInfo(OtherInfoTypes.HIVE_INSTANCE_TYPE.name(), hiveInstanceType);
atsEntity.addOtherInfo(OtherInfoTypes.CONF.name(), confObj.toString());
if (llapAppId != null) {
atsEntity.addOtherInfo(OtherInfoTypes.LLAP_APP_ID.name(), llapAppId.toString());
}
atsEntity.setDomainId(domainId);
return atsEntity;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hive by apache.
the class ATSHook method createPostHookEvent.
TimelineEntity createPostHookEvent(String queryId, long stopTime, String user, String requestuser, boolean success, String opId, Map<String, Long> durations, String domainId) throws Exception {
LOG.info("Received post-hook notification for :" + queryId);
TimelineEntity atsEntity = new TimelineEntity();
atsEntity.setEntityId(queryId);
atsEntity.setEntityType(EntityTypes.HIVE_QUERY_ID.name());
atsEntity.addPrimaryFilter(PrimaryFilterTypes.user.name(), user);
atsEntity.addPrimaryFilter(PrimaryFilterTypes.requestuser.name(), requestuser);
if (opId != null) {
atsEntity.addPrimaryFilter(PrimaryFilterTypes.operationid.name(), opId);
}
TimelineEvent stopEvt = new TimelineEvent();
stopEvt.setEventType(EventTypes.QUERY_COMPLETED.name());
stopEvt.setTimestamp(stopTime);
atsEntity.addEvent(stopEvt);
atsEntity.addOtherInfo(OtherInfoTypes.STATUS.name(), success);
// Perf times
JSONObject perfObj = new JSONObject(new LinkedHashMap<>());
for (Map.Entry<String, Long> entry : durations.entrySet()) {
perfObj.put(entry.getKey(), entry.getValue());
}
atsEntity.addOtherInfo(OtherInfoTypes.PERF.name(), perfObj.toString());
atsEntity.setDomainId(domainId);
return atsEntity;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class TestLogInfo method writeEntitiesLeaveOpen.
// TestLogInfo needs to maintain opened hdfs files so we have to build our own
// write methods
private void writeEntitiesLeaveOpen(TimelineEntities entities, Path logPath) throws IOException {
if (outStream == null) {
outStream = PluginStoreTestUtils.createLogFile(logPath, fs);
jsonGenerator = new JsonFactory().createGenerator(outStream);
jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
}
for (TimelineEntity entity : entities.getEntities()) {
objMapper.writeValue(jsonGenerator, entity);
}
outStream.hflush();
}
Aggregations