use of com.webank.wedatasphere.qualitis.exception.TaskNotExistException in project Qualitis by WeBankFinTech.
the class LinkisJobSubmitter method getJobPartialLog.
@Override
public LogResult getJobPartialLog(Long taskId, Integer begin, String user, String remoteAddress, String clusterName) throws LogPartialException, ClusterInfoNotConfigException {
Integer begin1 = 0;
String jobStatus = null;
String logPath = null;
String execId = null;
try {
Map response = getTaskDetail(taskId, user, remoteAddress, clusterName);
jobStatus = (String) ((Map) ((Map) response.get("data")).get("task")).get("status");
logPath = (String) ((Map) ((Map) response.get("data")).get("task")).get("logPath");
execId = (String) ((Map) ((Map) response.get("data")).get("task")).get("strongerExecId");
} catch (TaskNotExistException e) {
throw new LogPartialException(e);
}
String log = "";
if (isTaskRunning(jobStatus)) {
String url = getPath(remoteAddress).path(linkisConfig.getRunningLog()).toString();
url = url.replace("{id}", execId);
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
headers.add("Token-User", user);
headers.add("Token-Code", getToken(clusterName));
HttpEntity entity = new HttpEntity<>(headers);
LOGGER.info("Start to get job log from linkis. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.GET, entity);
Map response = restTemplate.exchange(url, HttpMethod.GET, entity, Map.class).getBody();
LOGGER.info("Succeed to get job log from linkis. repsonse: {}", response);
if (!checkResponse(response)) {
throw new LogPartialException("Failed to get partial logs, task ID: " + taskId);
}
log = (String) ((List) ((Map) response.get("data")).get("log")).get(3);
} else {
String url = getPath(remoteAddress).path(linkisConfig.getFinishLog()).toString() + "?path=" + logPath;
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
headers.add("Token-User", user);
headers.add("Token-Code", getToken(clusterName));
HttpEntity entity = new HttpEntity<>(headers);
LOGGER.info("Start to get job log from linkis. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.GET, entity);
Map response = restTemplate.exchange(url, HttpMethod.GET, entity, Map.class).getBody();
LOGGER.info("Succeed to get job log from linkis. repsonse: {}", response);
if (!checkResponse(response)) {
throw new LogPartialException("Failed to get partial logs, task ID: " + taskId);
}
log = (String) ((List) ((Map) response.get("data")).get("log")).get(3);
}
// 将账号敏感信息脱敏替换成 ******
log = maskAccountInfo(log);
Integer end = getEnd(log) + begin1;
return new LogResult(log, begin1, end, getLast(log));
}
use of com.webank.wedatasphere.qualitis.exception.TaskNotExistException in project Qualitis by WeBankFinTech.
the class LinkisJobSubmitter method getTaskDetail.
private Map getTaskDetail(Long taskId, String user, String ujesAddress, String clusterName) throws TaskNotExistException, ClusterInfoNotConfigException {
String url = getPath(ujesAddress).path(linkisConfig.getStatus()).toString();
url = url.replace("{id}", String.valueOf(taskId));
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
headers.add("Token-User", user);
headers.add("Token-Code", getToken(clusterName));
HttpEntity entity = new HttpEntity<>(headers);
LOGGER.info("Start to get job detail from linkis. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.GET, entity);
Map response = restTemplate.exchange(url, HttpMethod.GET, entity, Map.class).getBody();
LOGGER.info("Succeed to get job detail from linkis. response: {}", response);
if (!checkResponse(response)) {
throw new TaskNotExistException("Can not get detail of task, task ID: " + taskId);
}
Object taskObj = ((Map) response.get("data")).get("task");
if (taskObj == null) {
throw new TaskNotExistException("Job ID: " + taskId + " {&DOES_NOT_EXIST}");
}
return response;
}
use of com.webank.wedatasphere.qualitis.exception.TaskNotExistException in project Qualitis by WeBankFinTech.
the class TaskChecker method checkTaskStatus.
@Override
@Transactional(rollbackFor = Exception.class)
public void checkTaskStatus(JobChecker jobChecker) {
try {
Map<String, Object> taskInfos = monitorManager.getTaskStatus(jobChecker.getTaskId(), jobChecker.getUsername(), jobChecker.getUjesAddress(), jobChecker.getClusterName());
String jobStatus = ((String) taskInfos.get("status")).toUpperCase();
Integer errCode = (Integer) taskInfos.get("errCode");
LOGGER.info("Task status: {}", jobStatus);
if (!jobStatus.equals(jobChecker.getOldStatus())) {
LOGGER.info("Start to update task status. old status: {}, new status: {}, task_id: {}", jobChecker.getOldStatus(), jobStatus, jobChecker.getTaskId());
writeDb(jobChecker, jobStatus, errCode);
LOGGER.info("Succeed to update task status. old status: {}, new status: {}, task_id: {}", jobChecker.getOldStatus(), jobStatus, jobChecker.getTaskId());
}
// Compute task time in same progress.
if (linkisConfig.getKillStuckTasks() && TaskStatusEnum.RUNNING.getState().equals(jobStatus)) {
Task taskInDb = taskDao.findByRemoteTaskIdAndClusterName(jobChecker.getTaskId(), jobChecker.getClusterName());
Double progress = (Double) taskInfos.get("progress");
LOGGER.info("Old time progress[{}].", jobChecker.getOldProgress());
LOGGER.info("Current time progress[{}].", progress);
long runningTime = System.currentTimeMillis() - taskInDb.getRunningTime();
LOGGER.info("Current task running time [{}] minutes.", runningTime / (60 * 1000));
if (progress.equals(jobChecker.getOldProgress())) {
long diff = System.currentTimeMillis() - taskInDb.getNewProgressTime();
long diffMinutes = diff;
LOGGER.info("Time in same progress[{}]: {} minutes. Config max time: {} minutes.", progress, diffMinutes / (60 * 1000), linkisConfig.getKillStuckTasksTime().longValue() / (60 * 1000));
if (diffMinutes > linkisConfig.getKillStuckTasksTime().longValue()) {
killTimeoutTask(applicationDao.findById(jobChecker.getApplicationId()), taskInDb, jobChecker);
}
} else {
LOGGER.info("Progress is updating , so is task new progress.");
taskInDb.setNewProgressTime(System.currentTimeMillis());
taskInDb.setProgress(progress);
if (runningTime > linkisConfig.getKillStuckTasksTime().longValue()) {
killTimeoutTask(applicationDao.findById(jobChecker.getApplicationId()), taskInDb, jobChecker);
}
}
taskDao.save(taskInDb);
}
} catch (TaskNotExistException e) {
LOGGER.error("Spark Task [{}] does not exist, application id : [{}]", jobChecker.getTaskId(), jobChecker.getApplicationId(), e);
jobChecker.getTask().setStatus(TaskStatusEnum.TASK_NOT_EXIST.getCode());
taskDao.save(jobChecker.getTask());
jobChecker.getTask().getApplication().addAbnormalTaskNum();
applicationDao.saveApplication(jobChecker.getTask().getApplication());
} catch (Exception e) {
LOGGER.error("Check task id:[{}] failed, application id:[{}]", jobChecker.getTaskId(), jobChecker.getApplicationId(), e);
}
}
Aggregations