use of com.dtstack.taier.scheduler.vo.action.ActionLogVO in project Taier by DTStack.
the class BatchServerLogService method getLogsByJobId.
public BatchServerLogVO getLogsByJobId(String jobId, Integer pageInfo) {
if (StringUtils.isBlank(jobId)) {
return null;
}
final ScheduleJob job = scheduleJobService.getByJobId(jobId);
if (Objects.isNull(job)) {
LOGGER.info("can not find job by id:{}.", jobId);
throw new RdosDefineException(ErrorCode.CAN_NOT_FIND_JOB);
}
final Long tenantId = job.getTenantId();
final ScheduleTaskShade scheduleTaskShade = this.taskService.findTaskByTaskId(job.getTaskId());
if (Objects.isNull(scheduleTaskShade)) {
LOGGER.info("can not find task shade by jobId:{}.", jobId);
throw new RdosDefineException(ErrorCode.SERVER_EXCEPTION);
}
final BatchServerLogVO batchServerLogVO = new BatchServerLogVO();
// 日志从engine获取
final JSONObject logsBody = new JSONObject(2);
logsBody.put("jobId", jobId);
logsBody.put("computeType", ComputeType.BATCH.getType());
ActionLogVO actionLogVO = actionService.log(jobId);
JSONObject info = new JSONObject();
if (!Strings.isNullOrEmpty(actionLogVO.getLogInfo())) {
try {
info = JSON.parseObject(actionLogVO.getLogInfo());
} catch (final Exception e) {
LOGGER.error(String.format("parse jobId: %s logInfo:%s", jobId, actionLogVO.getLogInfo()), e);
info.put("msg_info", actionLogVO.getLogInfo());
}
}
if (Objects.nonNull(job.getVersionId())) {
// 需要获取执行任务时候版本对应的sql
BatchTaskVersionDetailDTO taskVersion = this.batchTaskVersionService.getByVersionId((long) job.getVersionId());
if (Objects.nonNull(taskVersion)) {
if (StringUtils.isEmpty(taskVersion.getOriginSql())) {
String jsonSql = StringUtils.isEmpty(taskVersion.getSqlText()) ? "{}" : taskVersion.getSqlText();
scheduleTaskShade.setSqlText(jsonSql);
} else {
scheduleTaskShade.setSqlText(taskVersion.getOriginSql());
}
}
}
info.put("status", job.getStatus());
if (EScheduleJobType.SPARK_SQL.getVal().equals(scheduleTaskShade.getTaskType())) {
// 处理sql注释,先把注释base64编码,再处理非注释的自定义参数
String sql = SqlFormatterUtil.dealAnnotationBefore(scheduleTaskShade.getSqlText());
final List<BatchTaskParamShade> taskParamsToReplace = this.batchTaskParamShadeService.getTaskParam(scheduleTaskShade.getId());
sql = this.jobParamReplace.paramReplace(sql, taskParamsToReplace, job.getCycTime());
sql = SqlFormatterUtil.dealAnnotationAfter(sql);
info.put("sql", sql);
} else if (EScheduleJobType.SYNC.getVal().equals(scheduleTaskShade.getTaskType())) {
final JSONObject jobJson;
// taskShade 需要解码
JSONObject sqlJson = null;
try {
sqlJson = JSON.parseObject(Base64Util.baseDecode(scheduleTaskShade.getSqlText()));
} catch (final Exception e) {
sqlJson = JSON.parseObject(scheduleTaskShade.getSqlText());
}
jobJson = sqlJson.getJSONObject("job");
// 密码脱敏
DataFilter.passwordFilter(jobJson);
String jobStr = jobJson.toJSONString();
final List<BatchTaskParamShade> taskParamsToReplace = this.batchTaskParamShadeService.getTaskParam(scheduleTaskShade.getId());
jobStr = this.jobParamReplace.paramReplace(jobStr, taskParamsToReplace, job.getCycTime());
info.put("sql", JsonUtils.formatJSON(jobStr));
if (Objects.nonNull(job.getExecEndTime()) && Objects.nonNull(job.getExecStartTime())) {
List<ActionJobEntityVO> engineEntities = actionService.entitys(Collections.singletonList(logsBody.getString("jobId")));
String engineJobId = "";
if (CollectionUtils.isNotEmpty(engineEntities)) {
engineJobId = engineEntities.get(0).getEngineJobId();
}
this.parseIncreInfo(info, jobStr, tenantId, engineJobId, job.getExecStartTime().getTime(), job.getExecEndTime().getTime(), "");
}
}
if (job.getJobId() != null) {
try {
if (StringUtils.isNotBlank(actionLogVO.getEngineLog())) {
final Map<String, Object> engineLogMap = BatchServerLogService.objectMapper.readValue(actionLogVO.getEngineLog(), Map.class);
this.dealPerfLog(engineLogMap);
info.putAll(engineLogMap);
// 去掉统计信息,界面不展示,调度端统计使用
info.remove("countInfo");
}
} catch (Exception e) {
// 非json格式的日志也返回
info.put("msg_info", actionLogVO.getEngineLog());
LOGGER.error("", e);
}
}
// 增加重试日志
final String retryLog = this.buildRetryLog(jobId, pageInfo, batchServerLogVO);
this.formatForLogInfo(info, job.getType(), scheduleTaskShade.getTaskType(), retryLog, null, null, null, batchServerLogVO, tenantId, jobId);
if (!scheduleTaskShade.getTaskType().equals(EScheduleJobType.SYNC.getVal()) && !scheduleTaskShade.getTaskType().equals(EScheduleJobType.VIRTUAL.getVal()) && !scheduleTaskShade.getTaskType().equals(EScheduleJobType.WORK_FLOW.getVal()) && TaskStatus.getStoppedStatus().contains(job.getStatus())) {
batchServerLogVO.setDownloadLog(String.format(DOWNLOAD_LOG, jobId, scheduleTaskShade.getTaskType(), 0L));
}
batchServerLogVO.setName(scheduleTaskShade.getName());
batchServerLogVO.setComputeType(scheduleTaskShade.getComputeType());
batchServerLogVO.setTaskType(scheduleTaskShade.getTaskType());
return batchServerLogVO;
}
use of com.dtstack.taier.scheduler.vo.action.ActionLogVO in project Taier by DTStack.
the class ScheduleActionService method log.
/**
* 根据jobid 和 计算类型,查询job的日志
*/
public ActionLogVO log(String jobId) {
if (StringUtils.isBlank(jobId)) {
throw new RdosDefineException("jobId is not allow null", ErrorCode.INVALID_PARAMETERS);
}
ActionLogVO vo = new ActionLogVO();
ScheduleJobExpand scheduleJobExpand = scheduleJobExpandService.getByJobId(jobId);
if (scheduleJobExpand != null) {
vo.setEngineLog(scheduleJobExpand.getEngineLog());
vo.setLogInfo(scheduleJobExpand.getLogInfo());
if (StringUtils.isBlank(scheduleJobExpand.getEngineLog())) {
ScheduleJob scheduleJob = scheduleJobService.getByJobId(jobId);
vo.setEngineLog(getEngineLog(jobId, scheduleJob));
}
}
return vo;
}
use of com.dtstack.taier.scheduler.vo.action.ActionLogVO in project Taier by DTStack.
the class BatchJobService method getSyncTaskStatusInner.
private BatchGetSyncTaskStatusInnerResultVO getSyncTaskStatusInner(final Long tenantId, final String jobId, int retryTimes) {
final BatchGetSyncTaskStatusInnerResultVO resultVO = new BatchGetSyncTaskStatusInnerResultVO();
resultVO.setMsg(null);
resultVO.setStatus(TaskStatus.RUNNING.getStatus());
try {
ScheduleJob job = jobService.getScheduleJob(jobId);
if (Objects.isNull(job)) {
resultVO.setMsg("无法获取engine数据");
return resultVO;
}
Integer status = TaskStatus.getShowStatus(job.getStatus());
resultVO.setStatus(status);
if (TaskStatus.RUNNING.getStatus().equals(status)) {
resultVO.setMsg("运行中");
}
final JSONObject logsBody = new JSONObject(2);
logsBody.put("jobId", jobId);
logsBody.put("jobIds", Lists.newArrayList(jobId));
logsBody.put("computeType", ComputeType.BATCH.getType());
ActionLogVO actionLogVO = actionService.log(jobId);
String engineLogStr = actionLogVO.getEngineLog();
String logInfoStr = actionLogVO.getLogInfo();
if (StringUtils.isNotBlank(engineLogStr)) {
// 移除increConf 信息
try {
JSONObject engineLogJson = JSON.parseObject(engineLogStr);
engineLogJson.remove("increConf");
engineLogStr = engineLogJson.toJSONString();
} catch (Exception e) {
LOGGER.error("", e);
if (TaskStatus.FINISHED.getStatus().equals(status) || TaskStatus.CANCELED.getStatus().equals(status) || TaskStatus.FAILED.getStatus().equals(status)) {
resultVO.setMsg(engineLogStr);
resultVO.setDownload(String.format(BatchJobService.DOWNLOAD_URL, jobId, EScheduleJobType.SYNC.getVal(), tenantId));
}
return resultVO;
}
}
if (StringUtils.isEmpty(engineLogStr) && StringUtils.isEmpty(logInfoStr)) {
return resultVO;
}
try {
final JSONObject engineLog = JSON.parseObject(engineLogStr);
final JSONObject logIngo = JSON.parseObject(logInfoStr);
final StringBuilder logBuild = new StringBuilder();
// 读取prometheus的相关信息
Tenant tenantById = this.tenantService.getTenantById(tenantId);
if (Objects.isNull(tenantById)) {
LOGGER.info("can not find job tenant{}.", tenantId);
throw new RdosDefineException(ErrorCode.SERVER_EXCEPTION);
}
List<ActionJobEntityVO> engineEntities = actionService.entitys(Collections.singletonList(jobId));
String engineJobId = "";
if (CollectionUtils.isNotEmpty(engineEntities)) {
engineJobId = engineEntities.get(0).getEngineJobId();
}
final long startTime = Objects.isNull(job.getExecStartTime()) ? System.currentTimeMillis() : job.getExecStartTime().getTime();
final String perf = StringUtils.isBlank(engineJobId) ? null : this.batchServerLogService.formatPerfLogInfo(engineJobId, jobId, startTime, System.currentTimeMillis(), tenantById.getId());
if (StringUtils.isNotBlank(perf)) {
logBuild.append(perf.replace("\n", " "));
}
if (TaskStatus.FAILED.getStatus().equals(status)) {
// 失败的话打印失败日志
logBuild.append("\n");
logBuild.append("====================Flink日志====================\n");
if (engineLog != null) {
if (StringUtils.isEmpty(engineLog.getString("root-exception")) && retryTimes < 3) {
retryTimes++;
Thread.sleep(500);
return this.getSyncTaskStatusInner(tenantId, jobId, retryTimes);
} else {
if (engineLog.containsKey("engineLogErr")) {
// 有这个字段表示日志没有获取到,目前engine端只对flink任务做了这种处理,这里先提前加上
logBuild.append(engineLog.getString("engineLogErr"));
} else {
logBuild.append(engineLog.getString("root-exception"));
}
logBuild.append("\n");
}
}
if (logIngo != null) {
logBuild.append(logIngo.getString("msg_info"));
logBuild.append("\n");
}
final BatchSelectSql batchHiveSelectSql = this.batchSelectSqlService.getByJobId(jobId, tenantId, 0);
if (batchHiveSelectSql != null) {
logBuild.append("====================任务信息====================\n");
final String sqlLog = batchHiveSelectSql.getCorrectSqlText().replaceAll("(\"password\"[^\"]+\")([^\"]+)(\")", "$1**$3");
logBuild.append(JsonUtils.formatJSON(sqlLog));
logBuild.append("\n");
}
} else if (TaskStatus.FINISHED.getStatus().equals(status) && retryTimes < 3) {
// FIXME perjob模式运行任务,任务完成后统计信息可能还没收集到,这里等待1秒后再请求一次结果
Thread.sleep(1000);
return this.getSyncTaskStatusInner(tenantId, jobId, 3);
}
if (TaskStatus.FINISHED.getStatus().equals(status) || TaskStatus.CANCELED.getStatus().equals(status) || TaskStatus.FAILED.getStatus().equals(status)) {
resultVO.setDownload(String.format(BatchJobService.DOWNLOAD_URL, jobId, EScheduleJobType.SYNC.getVal(), tenantId));
}
resultVO.setMsg(logBuild.toString());
} catch (Exception e) {
// 日志解析失败,可能是任务失败,日志信息为非json格式
LOGGER.error("", e);
resultVO.setMsg(StringUtils.isEmpty(engineLogStr) ? "engine调度失败" : engineLogStr);
}
} catch (Exception e) {
LOGGER.error("获取同步任务状态失败", e);
}
return resultVO;
}
use of com.dtstack.taier.scheduler.vo.action.ActionLogVO in project Taier by DTStack.
the class HadoopDataDownloadService method buildIDownLoad.
@Override
public IDownload buildIDownLoad(String jobId, Integer taskType, Long tenantId, Integer limitNum) {
if (StringUtils.isBlank(jobId)) {
throw new RdosDefineException("jobId 不能为空");
}
if (EScheduleJobType.SYNC.getVal().equals(taskType)) {
// standalone模式的不支持日志下载直接返回null
Boolean isStandalone = clusterService.hasStandalone(tenantId, EComponentType.FLINK.getTypeCode());
if (isStandalone) {
return null;
}
// 同步任务
StringBuilder syncLog = new StringBuilder();
SyncDownload syncDownload = new SyncDownload();
ActionLogVO log = actionService.log(jobId);
if (!Objects.isNull(log)) {
String engineLogStr = log.getEngineLog();
String logInfoStr = log.getLogInfo();
if (StringUtils.isNotBlank(engineLogStr)) {
try {
JSONObject engineLogJson = JSON.parseObject(engineLogStr);
engineLogJson.remove("increConf");
engineLogStr = engineLogJson.toJSONString();
} catch (Exception e) {
LOGGER.info("engineLog非json", e);
}
syncLog.append("engineLog:\n").append(engineLogStr).append("\n");
}
try {
JSONObject logInfo = JSON.parseObject(logInfoStr);
syncLog.append("logInfo:\n").append(logInfo.getString("msg_info"));
} catch (Exception e) {
LOGGER.error("同步任务日志下载失败", e);
}
}
syncDownload.setLogInfo(syncLog.toString());
return syncDownload;
}
String applicationId = batchJobService.getApplicationId(jobId);
if (StringUtils.isBlank(applicationId)) {
return null;
}
IDownload iDownload = null;
try {
iDownload = RetryUtil.executeWithRetry(() -> {
final Map<String, Object> hadoopConf = Engine2DTOService.getHdfs(tenantId);
final JSONObject yarnConf = Engine2DTOService.getComponentConfig(tenantId, EComponentType.YARN);
String submitUserName = getSubmitUserNameByJobId(jobId);
final LogPluginDownload downloader = new LogPluginDownload(applicationId, yarnConf, hadoopConf, submitUserName, limitNum);
return downloader;
}, 3, 1000L, false);
} catch (Exception e) {
LOGGER.error("downloadJobLog {} 失败:{}", jobId, e);
return null;
}
return iDownload;
}
Aggregations