use of com.dtstack.taier.develop.dto.devlop.BatchServerLogVO in project Taier by DTStack.
the class BatchServerLogService method getLogsByJobId.
public BatchServerLogVO getLogsByJobId(String jobId, Integer pageInfo) {
if (StringUtils.isBlank(jobId)) {
return null;
}
final ScheduleJob job = scheduleJobService.getByJobId(jobId);
if (Objects.isNull(job)) {
LOGGER.info("can not find job by id:{}.", jobId);
throw new RdosDefineException(ErrorCode.CAN_NOT_FIND_JOB);
}
final Long tenantId = job.getTenantId();
final ScheduleTaskShade scheduleTaskShade = this.taskService.findTaskByTaskId(job.getTaskId());
if (Objects.isNull(scheduleTaskShade)) {
LOGGER.info("can not find task shade by jobId:{}.", jobId);
throw new RdosDefineException(ErrorCode.SERVER_EXCEPTION);
}
final BatchServerLogVO batchServerLogVO = new BatchServerLogVO();
// 日志从engine获取
final JSONObject logsBody = new JSONObject(2);
logsBody.put("jobId", jobId);
logsBody.put("computeType", ComputeType.BATCH.getType());
ActionLogVO actionLogVO = actionService.log(jobId);
JSONObject info = new JSONObject();
if (!Strings.isNullOrEmpty(actionLogVO.getLogInfo())) {
try {
info = JSON.parseObject(actionLogVO.getLogInfo());
} catch (final Exception e) {
LOGGER.error(String.format("parse jobId: %s logInfo:%s", jobId, actionLogVO.getLogInfo()), e);
info.put("msg_info", actionLogVO.getLogInfo());
}
}
if (Objects.nonNull(job.getVersionId())) {
// 需要获取执行任务时候版本对应的sql
BatchTaskVersionDetailDTO taskVersion = this.batchTaskVersionService.getByVersionId((long) job.getVersionId());
if (Objects.nonNull(taskVersion)) {
if (StringUtils.isEmpty(taskVersion.getOriginSql())) {
String jsonSql = StringUtils.isEmpty(taskVersion.getSqlText()) ? "{}" : taskVersion.getSqlText();
scheduleTaskShade.setSqlText(jsonSql);
} else {
scheduleTaskShade.setSqlText(taskVersion.getOriginSql());
}
}
}
info.put("status", job.getStatus());
if (EScheduleJobType.SPARK_SQL.getVal().equals(scheduleTaskShade.getTaskType())) {
// 处理sql注释,先把注释base64编码,再处理非注释的自定义参数
String sql = SqlFormatterUtil.dealAnnotationBefore(scheduleTaskShade.getSqlText());
final List<BatchTaskParamShade> taskParamsToReplace = this.batchTaskParamShadeService.getTaskParam(scheduleTaskShade.getId());
sql = this.jobParamReplace.paramReplace(sql, taskParamsToReplace, job.getCycTime());
sql = SqlFormatterUtil.dealAnnotationAfter(sql);
info.put("sql", sql);
} else if (EScheduleJobType.SYNC.getVal().equals(scheduleTaskShade.getTaskType())) {
final JSONObject jobJson;
// taskShade 需要解码
JSONObject sqlJson = null;
try {
sqlJson = JSON.parseObject(Base64Util.baseDecode(scheduleTaskShade.getSqlText()));
} catch (final Exception e) {
sqlJson = JSON.parseObject(scheduleTaskShade.getSqlText());
}
jobJson = sqlJson.getJSONObject("job");
// 密码脱敏
DataFilter.passwordFilter(jobJson);
String jobStr = jobJson.toJSONString();
final List<BatchTaskParamShade> taskParamsToReplace = this.batchTaskParamShadeService.getTaskParam(scheduleTaskShade.getId());
jobStr = this.jobParamReplace.paramReplace(jobStr, taskParamsToReplace, job.getCycTime());
info.put("sql", JsonUtils.formatJSON(jobStr));
if (Objects.nonNull(job.getExecEndTime()) && Objects.nonNull(job.getExecStartTime())) {
List<ActionJobEntityVO> engineEntities = actionService.entitys(Collections.singletonList(logsBody.getString("jobId")));
String engineJobId = "";
if (CollectionUtils.isNotEmpty(engineEntities)) {
engineJobId = engineEntities.get(0).getEngineJobId();
}
this.parseIncreInfo(info, jobStr, tenantId, engineJobId, job.getExecStartTime().getTime(), job.getExecEndTime().getTime(), "");
}
}
if (job.getJobId() != null) {
try {
if (StringUtils.isNotBlank(actionLogVO.getEngineLog())) {
final Map<String, Object> engineLogMap = BatchServerLogService.objectMapper.readValue(actionLogVO.getEngineLog(), Map.class);
this.dealPerfLog(engineLogMap);
info.putAll(engineLogMap);
// 去掉统计信息,界面不展示,调度端统计使用
info.remove("countInfo");
}
} catch (Exception e) {
// 非json格式的日志也返回
info.put("msg_info", actionLogVO.getEngineLog());
LOGGER.error("", e);
}
}
// 增加重试日志
final String retryLog = this.buildRetryLog(jobId, pageInfo, batchServerLogVO);
this.formatForLogInfo(info, job.getType(), scheduleTaskShade.getTaskType(), retryLog, null, null, null, batchServerLogVO, tenantId, jobId);
if (!scheduleTaskShade.getTaskType().equals(EScheduleJobType.SYNC.getVal()) && !scheduleTaskShade.getTaskType().equals(EScheduleJobType.VIRTUAL.getVal()) && !scheduleTaskShade.getTaskType().equals(EScheduleJobType.WORK_FLOW.getVal()) && TaskStatus.getStoppedStatus().contains(job.getStatus())) {
batchServerLogVO.setDownloadLog(String.format(DOWNLOAD_LOG, jobId, scheduleTaskShade.getTaskType(), 0L));
}
batchServerLogVO.setName(scheduleTaskShade.getName());
batchServerLogVO.setComputeType(scheduleTaskShade.getComputeType());
batchServerLogVO.setTaskType(scheduleTaskShade.getTaskType());
return batchServerLogVO;
}
use of com.dtstack.taier.develop.dto.devlop.BatchServerLogVO in project Taier by DTStack.
the class BatchServerLogService method formatForSyncLogInfo.
private void formatForSyncLogInfo(final JSONObject jobInfo, final Integer jobType, final String retryLog, final Timestamp startTime, final Timestamp endTime, final Long execTime, final BatchServerLogVO batchServerLogVO, final Long tenantId, final String jobId) {
try {
final Map<String, Object> sqlInfoMap = (Map<String, Object>) BatchServerLogService.objectMapper.readValue(jobInfo.getString("sql"), Object.class);
final JSONObject res = new JSONObject();
res.put("job", sqlInfoMap.get("job"));
res.put("parser", sqlInfoMap.get("parser"));
res.put("createModel", sqlInfoMap.get("createModel"));
final Map<String, Object> jobInfoMap = (Map<String, Object>) BatchServerLogService.objectMapper.readValue(jobInfo.toString(), Object.class);
final JSONObject logInfoJson = new JSONObject();
logInfoJson.put("jobid", jobInfoMap.get("jobid"));
logInfoJson.put("msg_info", jobInfoMap.get("msg_info") + retryLog);
logInfoJson.put("turncated", jobInfoMap.get("turncated"));
if (jobInfoMap.get("ruleLogList") != null) {
logInfoJson.put("ruleLogList", jobInfoMap.get("ruleLogList"));
}
String perfLogInfo = jobInfoMap.getOrDefault("perf", StringUtils.EMPTY).toString();
final boolean parsePerfLog = startTime != null && endTime != null && jobInfoMap.get("jobid") != null && this.environmentContext.getSyncLogPromethues();
if (parsePerfLog) {
perfLogInfo = this.formatPerfLogInfo(jobInfoMap.get("jobid").toString(), jobId, startTime.getTime(), endTime.getTime(), tenantId);
}
logInfoJson.put("perf", perfLogInfo);
// 补数据没有增量标志信息
if (EScheduleType.NORMAL_SCHEDULE.getType().equals(jobType)) {
logInfoJson.put("increInfo", jobInfo.getString("increInfo"));
}
logInfoJson.put("sql", res);
String allExceptions = "";
if (jobInfoMap.get("root-exception") != null) {
allExceptions = jobInfoMap.get("root-exception").toString();
if (!Strings.isNullOrEmpty(retryLog)) {
allExceptions += retryLog;
}
}
// 如果没有拿到日志,并且有engineLogErr属性,可能是flink挂了
if (StringUtils.isEmpty(allExceptions.trim()) && jobInfoMap.containsKey("engineLogErr")) {
if (!TaskStatus.FINISHED.getStatus().equals(Integer.valueOf(jobInfoMap.get("status").toString()))) {
// 成功默认为空
allExceptions = jobInfoMap.get("engineLogErr").toString();
} else {
allExceptions = "";
}
}
logInfoJson.put("all-exceptions", allExceptions);
logInfoJson.put("status", jobInfoMap.get("status"));
batchServerLogVO.setLogInfo(logInfoJson.toString());
// 解析出数据同步的信息
final BatchServerLogVO.SyncJobInfo syncJobInfo = this.parseExecLog(perfLogInfo, execTime);
batchServerLogVO.setSyncJobInfo(syncJobInfo);
} catch (final Exception e) {
LOGGER.error("logInfo 解析失败", e);
batchServerLogVO.setLogInfo(jobInfo.toString());
}
}
Aggregations