Search in sources :

Example 1 with ClientHandlerException

use of com.sun.jersey.api.client.ClientHandlerException in project hadoop by apache.

the class JobHistoryEventHandler method processEventForTimelineServer.

private void processEventForTimelineServer(HistoryEvent event, JobId jobId, long timestamp) {
    TimelineEvent tEvent = new TimelineEvent();
    tEvent.setEventType(StringUtils.toUpperCase(event.getEventType().name()));
    tEvent.setTimestamp(timestamp);
    TimelineEntity tEntity = new TimelineEntity();
    switch(event.getEventType()) {
        case JOB_SUBMITTED:
            JobSubmittedEvent jse = (JobSubmittedEvent) event;
            tEvent.addEventInfo("SUBMIT_TIME", jse.getSubmitTime());
            tEvent.addEventInfo("QUEUE_NAME", jse.getJobQueueName());
            tEvent.addEventInfo("JOB_NAME", jse.getJobName());
            tEvent.addEventInfo("USER_NAME", jse.getUserName());
            tEvent.addEventInfo("JOB_CONF_PATH", jse.getJobConfPath());
            tEvent.addEventInfo("ACLS", jse.getJobAcls());
            tEvent.addEventInfo("JOB_QUEUE_NAME", jse.getJobQueueName());
            tEvent.addEventInfo("WORKFLOW_ID", jse.getWorkflowId());
            tEvent.addEventInfo("WORKFLOW_NAME", jse.getWorkflowName());
            tEvent.addEventInfo("WORKFLOW_NAME_NAME", jse.getWorkflowNodeName());
            tEvent.addEventInfo("WORKFLOW_ADJACENCIES", jse.getWorkflowAdjacencies());
            tEvent.addEventInfo("WORKFLOW_TAGS", jse.getWorkflowTags());
            tEntity.addEvent(tEvent);
            tEntity.setEntityId(jobId.toString());
            tEntity.setEntityType(MAPREDUCE_JOB_ENTITY_TYPE);
            break;
        case JOB_STATUS_CHANGED:
            JobStatusChangedEvent jsce = (JobStatusChangedEvent) event;
            tEvent.addEventInfo("STATUS", jsce.getStatus());
            tEntity.addEvent(tEvent);
            tEntity.setEntityId(jobId.toString());
            tEntity.setEntityType(MAPREDUCE_JOB_ENTITY_TYPE);
            break;
        case JOB_INFO_CHANGED:
            JobInfoChangeEvent jice = (JobInfoChangeEvent) event;
            tEvent.addEventInfo("SUBMIT_TIME", jice.getSubmitTime());
            tEvent.addEventInfo("LAUNCH_TIME", jice.getLaunchTime());
            tEntity.addEvent(tEvent);
            tEntity.setEntityId(jobId.toString());
            tEntity.setEntityType(MAPREDUCE_JOB_ENTITY_TYPE);
            break;
        case JOB_INITED:
            JobInitedEvent jie = (JobInitedEvent) event;
            tEvent.addEventInfo("START_TIME", jie.getLaunchTime());
            tEvent.addEventInfo("STATUS", jie.getStatus());
            tEvent.addEventInfo("TOTAL_MAPS", jie.getTotalMaps());
            tEvent.addEventInfo("TOTAL_REDUCES", jie.getTotalReduces());
            tEvent.addEventInfo("UBERIZED", jie.getUberized());
            tEntity.setStartTime(jie.getLaunchTime());
            tEntity.addEvent(tEvent);
            tEntity.setEntityId(jobId.toString());
            tEntity.setEntityType(MAPREDUCE_JOB_ENTITY_TYPE);
            break;
        case JOB_PRIORITY_CHANGED:
            JobPriorityChangeEvent jpce = (JobPriorityChangeEvent) event;
            tEvent.addEventInfo("PRIORITY", jpce.getPriority().toString());
            tEntity.addEvent(tEvent);
            tEntity.setEntityId(jobId.toString());
            tEntity.setEntityType(MAPREDUCE_JOB_ENTITY_TYPE);
            break;
        case JOB_QUEUE_CHANGED:
            JobQueueChangeEvent jqe = (JobQueueChangeEvent) event;
            tEvent.addEventInfo("QUEUE_NAMES", jqe.getJobQueueName());
            tEntity.addEvent(tEvent);
            tEntity.setEntityId(jobId.toString());
            tEntity.setEntityType(MAPREDUCE_JOB_ENTITY_TYPE);
            break;
        case JOB_FAILED:
        case JOB_KILLED:
        case JOB_ERROR:
            JobUnsuccessfulCompletionEvent juce = (JobUnsuccessfulCompletionEvent) event;
            tEvent.addEventInfo("FINISH_TIME", juce.getFinishTime());
            tEvent.addEventInfo("NUM_MAPS", juce.getFinishedMaps());
            tEvent.addEventInfo("NUM_REDUCES", juce.getFinishedReduces());
            tEvent.addEventInfo("JOB_STATUS", juce.getStatus());
            tEvent.addEventInfo("DIAGNOSTICS", juce.getDiagnostics());
            tEvent.addEventInfo("FINISHED_MAPS", juce.getFinishedMaps());
            tEvent.addEventInfo("FINISHED_REDUCES", juce.getFinishedReduces());
            tEntity.addEvent(tEvent);
            tEntity.setEntityId(jobId.toString());
            tEntity.setEntityType(MAPREDUCE_JOB_ENTITY_TYPE);
            break;
        case JOB_FINISHED:
            JobFinishedEvent jfe = (JobFinishedEvent) event;
            tEvent.addEventInfo("FINISH_TIME", jfe.getFinishTime());
            tEvent.addEventInfo("NUM_MAPS", jfe.getFinishedMaps());
            tEvent.addEventInfo("NUM_REDUCES", jfe.getFinishedReduces());
            tEvent.addEventInfo("FAILED_MAPS", jfe.getFailedMaps());
            tEvent.addEventInfo("FAILED_REDUCES", jfe.getFailedReduces());
            tEvent.addEventInfo("FINISHED_MAPS", jfe.getFinishedMaps());
            tEvent.addEventInfo("FINISHED_REDUCES", jfe.getFinishedReduces());
            tEvent.addEventInfo("MAP_COUNTERS_GROUPS", JobHistoryEventUtils.countersToJSON(jfe.getMapCounters()));
            tEvent.addEventInfo("REDUCE_COUNTERS_GROUPS", JobHistoryEventUtils.countersToJSON(jfe.getReduceCounters()));
            tEvent.addEventInfo("TOTAL_COUNTERS_GROUPS", JobHistoryEventUtils.countersToJSON(jfe.getTotalCounters()));
            tEvent.addEventInfo("JOB_STATUS", JobState.SUCCEEDED.toString());
            tEntity.addEvent(tEvent);
            tEntity.setEntityId(jobId.toString());
            tEntity.setEntityType(MAPREDUCE_JOB_ENTITY_TYPE);
            break;
        case TASK_STARTED:
            TaskStartedEvent tse = (TaskStartedEvent) event;
            tEvent.addEventInfo("TASK_TYPE", tse.getTaskType().toString());
            tEvent.addEventInfo("START_TIME", tse.getStartTime());
            tEvent.addEventInfo("SPLIT_LOCATIONS", tse.getSplitLocations());
            tEntity.addEvent(tEvent);
            tEntity.setEntityId(tse.getTaskId().toString());
            tEntity.setEntityType(MAPREDUCE_TASK_ENTITY_TYPE);
            tEntity.addRelatedEntity(MAPREDUCE_JOB_ENTITY_TYPE, jobId.toString());
            break;
        case TASK_FAILED:
            TaskFailedEvent tfe = (TaskFailedEvent) event;
            tEvent.addEventInfo("TASK_TYPE", tfe.getTaskType().toString());
            tEvent.addEventInfo("STATUS", TaskStatus.State.FAILED.toString());
            tEvent.addEventInfo("FINISH_TIME", tfe.getFinishTime());
            tEvent.addEventInfo("ERROR", tfe.getError());
            tEvent.addEventInfo("FAILED_ATTEMPT_ID", tfe.getFailedAttemptID() == null ? "" : tfe.getFailedAttemptID().toString());
            tEvent.addEventInfo("COUNTERS_GROUPS", JobHistoryEventUtils.countersToJSON(tfe.getCounters()));
            tEntity.addEvent(tEvent);
            tEntity.setEntityId(tfe.getTaskId().toString());
            tEntity.setEntityType(MAPREDUCE_TASK_ENTITY_TYPE);
            tEntity.addRelatedEntity(MAPREDUCE_JOB_ENTITY_TYPE, jobId.toString());
            break;
        case TASK_UPDATED:
            TaskUpdatedEvent tue = (TaskUpdatedEvent) event;
            tEvent.addEventInfo("FINISH_TIME", tue.getFinishTime());
            tEntity.addEvent(tEvent);
            tEntity.setEntityId(tue.getTaskId().toString());
            tEntity.setEntityType(MAPREDUCE_TASK_ENTITY_TYPE);
            tEntity.addRelatedEntity(MAPREDUCE_JOB_ENTITY_TYPE, jobId.toString());
            break;
        case TASK_FINISHED:
            TaskFinishedEvent tfe2 = (TaskFinishedEvent) event;
            tEvent.addEventInfo("TASK_TYPE", tfe2.getTaskType().toString());
            tEvent.addEventInfo("COUNTERS_GROUPS", JobHistoryEventUtils.countersToJSON(tfe2.getCounters()));
            tEvent.addEventInfo("FINISH_TIME", tfe2.getFinishTime());
            tEvent.addEventInfo("STATUS", TaskStatus.State.SUCCEEDED.toString());
            tEvent.addEventInfo("SUCCESSFUL_TASK_ATTEMPT_ID", tfe2.getSuccessfulTaskAttemptId() == null ? "" : tfe2.getSuccessfulTaskAttemptId().toString());
            tEntity.addEvent(tEvent);
            tEntity.setEntityId(tfe2.getTaskId().toString());
            tEntity.setEntityType(MAPREDUCE_TASK_ENTITY_TYPE);
            tEntity.addRelatedEntity(MAPREDUCE_JOB_ENTITY_TYPE, jobId.toString());
            break;
        case MAP_ATTEMPT_STARTED:
        case CLEANUP_ATTEMPT_STARTED:
        case REDUCE_ATTEMPT_STARTED:
        case SETUP_ATTEMPT_STARTED:
            TaskAttemptStartedEvent tase = (TaskAttemptStartedEvent) event;
            tEvent.addEventInfo("TASK_TYPE", tase.getTaskType().toString());
            tEvent.addEventInfo("TASK_ATTEMPT_ID", tase.getTaskAttemptId().toString());
            tEvent.addEventInfo("START_TIME", tase.getStartTime());
            tEvent.addEventInfo("HTTP_PORT", tase.getHttpPort());
            tEvent.addEventInfo("TRACKER_NAME", tase.getTrackerName());
            tEvent.addEventInfo("SHUFFLE_PORT", tase.getShufflePort());
            tEvent.addEventInfo("CONTAINER_ID", tase.getContainerId() == null ? "" : tase.getContainerId().toString());
            tEntity.addEvent(tEvent);
            tEntity.setEntityId(tase.getTaskId().toString());
            tEntity.setEntityType(MAPREDUCE_TASK_ENTITY_TYPE);
            tEntity.addRelatedEntity(MAPREDUCE_JOB_ENTITY_TYPE, jobId.toString());
            break;
        case MAP_ATTEMPT_FAILED:
        case CLEANUP_ATTEMPT_FAILED:
        case REDUCE_ATTEMPT_FAILED:
        case SETUP_ATTEMPT_FAILED:
        case MAP_ATTEMPT_KILLED:
        case CLEANUP_ATTEMPT_KILLED:
        case REDUCE_ATTEMPT_KILLED:
        case SETUP_ATTEMPT_KILLED:
            TaskAttemptUnsuccessfulCompletionEvent tauce = (TaskAttemptUnsuccessfulCompletionEvent) event;
            tEvent.addEventInfo("TASK_TYPE", tauce.getTaskType().toString());
            tEvent.addEventInfo("TASK_ATTEMPT_ID", tauce.getTaskAttemptId() == null ? "" : tauce.getTaskAttemptId().toString());
            tEvent.addEventInfo("FINISH_TIME", tauce.getFinishTime());
            tEvent.addEventInfo("ERROR", tauce.getError());
            tEvent.addEventInfo("STATUS", tauce.getTaskStatus());
            tEvent.addEventInfo("HOSTNAME", tauce.getHostname());
            tEvent.addEventInfo("PORT", tauce.getPort());
            tEvent.addEventInfo("RACK_NAME", tauce.getRackName());
            tEvent.addEventInfo("SHUFFLE_FINISH_TIME", tauce.getFinishTime());
            tEvent.addEventInfo("SORT_FINISH_TIME", tauce.getFinishTime());
            tEvent.addEventInfo("MAP_FINISH_TIME", tauce.getFinishTime());
            tEvent.addEventInfo("COUNTERS_GROUPS", JobHistoryEventUtils.countersToJSON(tauce.getCounters()));
            tEntity.addEvent(tEvent);
            tEntity.setEntityId(tauce.getTaskId().toString());
            tEntity.setEntityType(MAPREDUCE_TASK_ENTITY_TYPE);
            tEntity.addRelatedEntity(MAPREDUCE_JOB_ENTITY_TYPE, jobId.toString());
            break;
        case MAP_ATTEMPT_FINISHED:
            MapAttemptFinishedEvent mafe = (MapAttemptFinishedEvent) event;
            tEvent.addEventInfo("TASK_TYPE", mafe.getTaskType().toString());
            tEvent.addEventInfo("FINISH_TIME", mafe.getFinishTime());
            tEvent.addEventInfo("STATUS", mafe.getTaskStatus());
            tEvent.addEventInfo("STATE", mafe.getState());
            tEvent.addEventInfo("MAP_FINISH_TIME", mafe.getMapFinishTime());
            tEvent.addEventInfo("COUNTERS_GROUPS", JobHistoryEventUtils.countersToJSON(mafe.getCounters()));
            tEvent.addEventInfo("HOSTNAME", mafe.getHostname());
            tEvent.addEventInfo("PORT", mafe.getPort());
            tEvent.addEventInfo("RACK_NAME", mafe.getRackName());
            tEvent.addEventInfo("ATTEMPT_ID", mafe.getAttemptId() == null ? "" : mafe.getAttemptId().toString());
            tEntity.addEvent(tEvent);
            tEntity.setEntityId(mafe.getTaskId().toString());
            tEntity.setEntityType(MAPREDUCE_TASK_ENTITY_TYPE);
            tEntity.addRelatedEntity(MAPREDUCE_JOB_ENTITY_TYPE, jobId.toString());
            break;
        case REDUCE_ATTEMPT_FINISHED:
            ReduceAttemptFinishedEvent rafe = (ReduceAttemptFinishedEvent) event;
            tEvent.addEventInfo("TASK_TYPE", rafe.getTaskType().toString());
            tEvent.addEventInfo("ATTEMPT_ID", rafe.getAttemptId() == null ? "" : rafe.getAttemptId().toString());
            tEvent.addEventInfo("FINISH_TIME", rafe.getFinishTime());
            tEvent.addEventInfo("STATUS", rafe.getTaskStatus());
            tEvent.addEventInfo("STATE", rafe.getState());
            tEvent.addEventInfo("SHUFFLE_FINISH_TIME", rafe.getShuffleFinishTime());
            tEvent.addEventInfo("SORT_FINISH_TIME", rafe.getSortFinishTime());
            tEvent.addEventInfo("COUNTERS_GROUPS", JobHistoryEventUtils.countersToJSON(rafe.getCounters()));
            tEvent.addEventInfo("HOSTNAME", rafe.getHostname());
            tEvent.addEventInfo("PORT", rafe.getPort());
            tEvent.addEventInfo("RACK_NAME", rafe.getRackName());
            tEntity.addEvent(tEvent);
            tEntity.setEntityId(rafe.getTaskId().toString());
            tEntity.setEntityType(MAPREDUCE_TASK_ENTITY_TYPE);
            tEntity.addRelatedEntity(MAPREDUCE_JOB_ENTITY_TYPE, jobId.toString());
            break;
        case SETUP_ATTEMPT_FINISHED:
        case CLEANUP_ATTEMPT_FINISHED:
            TaskAttemptFinishedEvent tafe = (TaskAttemptFinishedEvent) event;
            tEvent.addEventInfo("TASK_TYPE", tafe.getTaskType().toString());
            tEvent.addEventInfo("ATTEMPT_ID", tafe.getAttemptId() == null ? "" : tafe.getAttemptId().toString());
            tEvent.addEventInfo("FINISH_TIME", tafe.getFinishTime());
            tEvent.addEventInfo("STATUS", tafe.getTaskStatus());
            tEvent.addEventInfo("STATE", tafe.getState());
            tEvent.addEventInfo("COUNTERS_GROUPS", JobHistoryEventUtils.countersToJSON(tafe.getCounters()));
            tEvent.addEventInfo("HOSTNAME", tafe.getHostname());
            tEntity.addEvent(tEvent);
            tEntity.setEntityId(tafe.getTaskId().toString());
            tEntity.setEntityType(MAPREDUCE_TASK_ENTITY_TYPE);
            tEntity.addRelatedEntity(MAPREDUCE_JOB_ENTITY_TYPE, jobId.toString());
            break;
        case AM_STARTED:
            AMStartedEvent ase = (AMStartedEvent) event;
            tEvent.addEventInfo("APPLICATION_ATTEMPT_ID", ase.getAppAttemptId() == null ? "" : ase.getAppAttemptId().toString());
            tEvent.addEventInfo("CONTAINER_ID", ase.getContainerId() == null ? "" : ase.getContainerId().toString());
            tEvent.addEventInfo("NODE_MANAGER_HOST", ase.getNodeManagerHost());
            tEvent.addEventInfo("NODE_MANAGER_PORT", ase.getNodeManagerPort());
            tEvent.addEventInfo("NODE_MANAGER_HTTP_PORT", ase.getNodeManagerHttpPort());
            tEvent.addEventInfo("START_TIME", ase.getStartTime());
            tEvent.addEventInfo("SUBMIT_TIME", ase.getSubmitTime());
            tEntity.addEvent(tEvent);
            tEntity.setEntityId(jobId.toString());
            tEntity.setEntityType(MAPREDUCE_JOB_ENTITY_TYPE);
            break;
        default:
            break;
    }
    try {
        TimelinePutResponse response = timelineClient.putEntities(tEntity);
        List<TimelinePutResponse.TimelinePutError> errors = response.getErrors();
        if (errors.size() == 0) {
            if (LOG.isDebugEnabled()) {
                LOG.debug("Timeline entities are successfully put in event " + event.getEventType());
            }
        } else {
            for (TimelinePutResponse.TimelinePutError error : errors) {
                LOG.error("Error when publishing entity [" + error.getEntityType() + "," + error.getEntityId() + "], server side error code: " + error.getErrorCode());
            }
        }
    } catch (YarnException | IOException | ClientHandlerException ex) {
        LOG.error("Error putting entity " + tEntity.getEntityId() + " to Timeline" + "Server", ex);
    }
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timeline.TimelineEvent) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) ClientHandlerException(com.sun.jersey.api.client.ClientHandlerException) TimelinePutResponse(org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse) IOException(java.io.IOException) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity)

Example 2 with ClientHandlerException

use of com.sun.jersey.api.client.ClientHandlerException in project hadoop by apache.

the class TestAHSWebServices method setupClass.

@BeforeClass
public static void setupClass() throws Exception {
    conf = new YarnConfiguration();
    TimelineStore store = TestApplicationHistoryManagerOnTimelineStore.createStore(MAX_APPS);
    TimelineACLsManager aclsManager = new TimelineACLsManager(conf);
    aclsManager.setTimelineStore(store);
    TimelineDataManager dataManager = new TimelineDataManager(store, aclsManager);
    conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true);
    conf.set(YarnConfiguration.YARN_ADMIN_ACL, "foo");
    conf.setBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, true);
    conf.set(YarnConfiguration.NM_REMOTE_APP_LOG_DIR, remoteLogRootDir);
    dataManager.init(conf);
    ApplicationACLsManager appAclsManager = new ApplicationACLsManager(conf);
    ApplicationHistoryManagerOnTimelineStore historyManager = new ApplicationHistoryManagerOnTimelineStore(dataManager, appAclsManager);
    historyManager.init(conf);
    historyClientService = new ApplicationHistoryClientService(historyManager) {

        @Override
        protected void serviceStart() throws Exception {
        // Do Nothing
        }
    };
    historyClientService.init(conf);
    historyClientService.start();
    ahsWebservice = new AHSWebServices(historyClientService, conf) {

        @Override
        public String getNMWebAddressFromRM(Configuration configuration, String nodeId) throws ClientHandlerException, UniformInterfaceException, JSONException {
            if (nodeId.equals(NM_ID)) {
                return NM_WEBADDRESS;
            }
            return null;
        }
    };
    fs = FileSystem.get(conf);
    GuiceServletConfig.setInjector(Guice.createInjector(new WebServletModule()));
}
Also used : ClientHandlerException(com.sun.jersey.api.client.ClientHandlerException) Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) ApplicationHistoryClientService(org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryClientService) JSONException(org.codehaus.jettison.json.JSONException) TimelineACLsManager(org.apache.hadoop.yarn.server.timeline.security.TimelineACLsManager) ServletException(javax.servlet.ServletException) UniformInterfaceException(com.sun.jersey.api.client.UniformInterfaceException) JSONException(org.codehaus.jettison.json.JSONException) ClientHandlerException(com.sun.jersey.api.client.ClientHandlerException) TimelineDataManager(org.apache.hadoop.yarn.server.timeline.TimelineDataManager) ApplicationACLsManager(org.apache.hadoop.yarn.server.security.ApplicationACLsManager) UniformInterfaceException(com.sun.jersey.api.client.UniformInterfaceException) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) ApplicationHistoryManagerOnTimelineStore(org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryManagerOnTimelineStore) TestApplicationHistoryManagerOnTimelineStore(org.apache.hadoop.yarn.server.applicationhistoryservice.TestApplicationHistoryManagerOnTimelineStore) ApplicationHistoryManagerOnTimelineStore(org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryManagerOnTimelineStore) TestApplicationHistoryManagerOnTimelineStore(org.apache.hadoop.yarn.server.applicationhistoryservice.TestApplicationHistoryManagerOnTimelineStore) TimelineStore(org.apache.hadoop.yarn.server.timeline.TimelineStore) BeforeClass(org.junit.BeforeClass)

Example 3 with ClientHandlerException

use of com.sun.jersey.api.client.ClientHandlerException in project hadoop by apache.

the class LogsCLI method getContainerLogFiles.

private List<Pair<PerContainerLogFileInfo, String>> getContainerLogFiles(Configuration conf, String containerIdStr, String nodeHttpAddress) throws IOException {
    List<Pair<PerContainerLogFileInfo, String>> logFileInfos = new ArrayList<>();
    Client webServiceClient = Client.create();
    try {
        WebResource webResource = webServiceClient.resource(WebAppUtils.getHttpSchemePrefix(conf) + nodeHttpAddress);
        ClientResponse response = webResource.path("ws").path("v1").path("node").path("containers").path(containerIdStr).path("logs").accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
        if (response.getStatusInfo().getStatusCode() == ClientResponse.Status.OK.getStatusCode()) {
            try {
                JSONArray array = new JSONArray();
                JSONObject json = response.getEntity(JSONObject.class);
                Object logsInfoObj = json.get("containerLogsInfo");
                if (logsInfoObj instanceof JSONObject) {
                    array.put((JSONObject) logsInfoObj);
                } else if (logsInfoObj instanceof JSONArray) {
                    JSONArray logsArray = (JSONArray) logsInfoObj;
                    for (int i = 0; i < logsArray.length(); i++) {
                        array.put(logsArray.getJSONObject(i));
                    }
                }
                for (int i = 0; i < array.length(); i++) {
                    JSONObject log = array.getJSONObject(i);
                    String aggregateType = log.has("logAggregationType") ? log.getString("logAggregationType") : "N/A";
                    Object ob = log.get("containerLogInfo");
                    if (ob instanceof JSONArray) {
                        JSONArray obArray = (JSONArray) ob;
                        for (int j = 0; j < obArray.length(); j++) {
                            logFileInfos.add(new Pair<PerContainerLogFileInfo, String>(generatePerContainerLogFileInfoFromJSON(obArray.getJSONObject(j)), aggregateType));
                        }
                    } else if (ob instanceof JSONObject) {
                        logFileInfos.add(new Pair<PerContainerLogFileInfo, String>(generatePerContainerLogFileInfoFromJSON((JSONObject) ob), aggregateType));
                    }
                }
            } catch (Exception e) {
                System.err.println("Unable to parse json from webservice. Error:");
                System.err.println(e.getMessage());
                throw new IOException(e);
            }
        }
    } catch (ClientHandlerException | UniformInterfaceException ex) {
        System.err.println("Unable to fetch log files list");
        throw new IOException(ex);
    }
    return logFileInfos;
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) ClientHandlerException(com.sun.jersey.api.client.ClientHandlerException) ArrayList(java.util.ArrayList) JSONArray(org.codehaus.jettison.json.JSONArray) WebResource(com.sun.jersey.api.client.WebResource) IOException(java.io.IOException) PerContainerLogFileInfo(org.apache.hadoop.yarn.logaggregation.PerContainerLogFileInfo) ParseException(org.apache.commons.cli.ParseException) UniformInterfaceException(com.sun.jersey.api.client.UniformInterfaceException) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) IOException(java.io.IOException) JSONException(org.codehaus.jettison.json.JSONException) ClientHandlerException(com.sun.jersey.api.client.ClientHandlerException) UniformInterfaceException(com.sun.jersey.api.client.UniformInterfaceException) JSONObject(org.codehaus.jettison.json.JSONObject) JSONObject(org.codehaus.jettison.json.JSONObject) YarnClient(org.apache.hadoop.yarn.client.api.YarnClient) Client(com.sun.jersey.api.client.Client) Pair(org.apache.commons.math3.util.Pair)

Example 4 with ClientHandlerException

use of com.sun.jersey.api.client.ClientHandlerException in project hadoop by apache.

the class TestTimelineClient method mockEntityClientResponse.

public static ClientResponse mockEntityClientResponse(TimelineWriter spyTimelineWriter, ClientResponse.Status status, boolean hasError, boolean hasRuntimeError) {
    ClientResponse response = mock(ClientResponse.class);
    if (hasRuntimeError) {
        doThrow(new ClientHandlerException(new ConnectException())).when(spyTimelineWriter).doPostingObject(any(TimelineEntities.class), any(String.class));
        return response;
    }
    doReturn(response).when(spyTimelineWriter).doPostingObject(any(TimelineEntities.class), any(String.class));
    when(response.getStatusInfo()).thenReturn(status);
    TimelinePutResponse.TimelinePutError error = new TimelinePutResponse.TimelinePutError();
    error.setEntityId("test entity id");
    error.setEntityType("test entity type");
    error.setErrorCode(TimelinePutResponse.TimelinePutError.IO_EXCEPTION);
    TimelinePutResponse putResponse = new TimelinePutResponse();
    if (hasError) {
        putResponse.addError(error);
    }
    when(response.getEntity(TimelinePutResponse.class)).thenReturn(putResponse);
    return response;
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) ClientHandlerException(com.sun.jersey.api.client.ClientHandlerException) TimelineEntities(org.apache.hadoop.yarn.api.records.timeline.TimelineEntities) TimelinePutResponse(org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse) ConnectException(java.net.ConnectException)

Example 5 with ClientHandlerException

use of com.sun.jersey.api.client.ClientHandlerException in project hadoop by apache.

the class ApplicationMaster method publishContainerEndEvent.

@VisibleForTesting
void publishContainerEndEvent(final TimelineClient timelineClient, ContainerStatus container, String domainId, UserGroupInformation ugi) {
    final TimelineEntity entity = new TimelineEntity();
    entity.setEntityId(container.getContainerId().toString());
    entity.setEntityType(DSEntity.DS_CONTAINER.toString());
    entity.setDomainId(domainId);
    entity.addPrimaryFilter(USER_TIMELINE_FILTER_NAME, ugi.getShortUserName());
    entity.addPrimaryFilter(APPID_TIMELINE_FILTER_NAME, container.getContainerId().getApplicationAttemptId().getApplicationId().toString());
    TimelineEvent event = new TimelineEvent();
    event.setTimestamp(System.currentTimeMillis());
    event.setEventType(DSEvent.DS_CONTAINER_END.toString());
    event.addEventInfo("State", container.getState().name());
    event.addEventInfo("Exit Status", container.getExitStatus());
    entity.addEvent(event);
    try {
        processTimelineResponseErrors(putContainerEntity(timelineClient, container.getContainerId().getApplicationAttemptId(), entity));
    } catch (YarnException | IOException | ClientHandlerException e) {
        LOG.error("Container end event could not be published for " + container.getContainerId().toString(), e);
    }
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timeline.TimelineEvent) ClientHandlerException(com.sun.jersey.api.client.ClientHandlerException) IOException(java.io.IOException) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) VisibleForTesting(com.google.common.annotations.VisibleForTesting)

Aggregations

ClientHandlerException (com.sun.jersey.api.client.ClientHandlerException)21 ClientResponse (com.sun.jersey.api.client.ClientResponse)12 WebResource (com.sun.jersey.api.client.WebResource)9 IOException (java.io.IOException)6 Test (org.testng.annotations.Test)6 ConnectException (java.net.ConnectException)5 URI (java.net.URI)5 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)5 Matchers.anyString (org.mockito.Matchers.anyString)5 UniformInterfaceException (com.sun.jersey.api.client.UniformInterfaceException)4 TimelineEntity (org.apache.hadoop.yarn.api.records.timeline.TimelineEntity)4 TimelineEvent (org.apache.hadoop.yarn.api.records.timeline.TimelineEvent)4 JSONException (org.codehaus.jettison.json.JSONException)4 TimelinePutResponse (org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse)3 JSONObject (org.codehaus.jettison.json.JSONObject)3 VisibleForTesting (com.google.common.annotations.VisibleForTesting)2 HttpRequest (com.netflix.client.http.HttpRequest)2 Client (com.sun.jersey.api.client.Client)2 InputStream (java.io.InputStream)2 AbstractConfiguration (org.apache.commons.configuration.AbstractConfiguration)2