use of com.dtstack.taier.dao.domain.ScheduleTaskShade in project Taier by DTStack.
the class DependencyManager method getDependencyHandler.
/**
* 获得依赖处理器链
*
* @param keyPreStr 前缀
* @param currentTaskShade 任务
* @param corn 执行周期
* @return 依赖处理器链
*/
public DependencyHandler getDependencyHandler(String keyPreStr, ScheduleTaskShade currentTaskShade, ScheduleCorn corn) {
// 查询上游任务
List<ScheduleTaskTaskShade> scheduleTaskTaskShadeList = scheduleTaskTaskService.lambdaQuery().eq(ScheduleTaskTaskShade::getTaskId, currentTaskShade.getTaskId()).eq(ScheduleTaskTaskShade::getIsDeleted, Deleted.NORMAL.getStatus()).list();
DependencyHandler dependencyHandler = null;
List<Long> parentTaskIds = scheduleTaskTaskShadeList.stream().map(ScheduleTaskTaskShade::getParentTaskId).collect(Collectors.toList());
// 如果没有上游任务,就不需要UpstreamDependencyHandler
List<ScheduleTaskShade> taskShadeList = null;
if (CollectionUtils.isNotEmpty(parentTaskIds)) {
// 查询任务
taskShadeList = scheduleTaskService.lambdaQuery().in(ScheduleTaskShade::getTaskId, parentTaskIds).eq(ScheduleTaskShade::getIsDeleted, Deleted.NORMAL.getStatus()).list();
if (CollectionUtils.isNotEmpty(taskShadeList)) {
dependencyHandler = new UpstreamDependencyHandler(keyPreStr, currentTaskShade, taskShadeList, scheduleJobService);
}
}
// 判断是否设置自依赖
ScheduleConf scheduleConf = corn.getScheduleConf();
if (DependencyType.SELF_DEPENDENCY_SUCCESS.getType().equals(scheduleConf.getSelfReliance()) || DependencyType.SELF_DEPENDENCY_END.getType().equals(scheduleConf.getSelfReliance())) {
if (dependencyHandler == null) {
dependencyHandler = new SelfRelianceDependencyHandler(keyPreStr, currentTaskShade, scheduleJobService);
} else {
dependencyHandler.setNext(new SelfRelianceDependencyHandler(keyPreStr, currentTaskShade, scheduleJobService));
}
} else if (DependencyType.PRE_PERIOD_CHILD_DEPENDENCY_SUCCESS.getType().equals(scheduleConf.getSelfReliance()) || DependencyType.PRE_PERIOD_CHILD_DEPENDENCY_END.getType().equals(scheduleConf.getSelfReliance())) {
if (CollectionUtils.isNotEmpty(taskShadeList)) {
if (dependencyHandler == null) {
dependencyHandler = new UpstreamNextJobDependencyHandler(keyPreStr, currentTaskShade, taskShadeList, scheduleJobService);
} else {
dependencyHandler.setNext(new UpstreamNextJobDependencyHandler(keyPreStr, currentTaskShade, taskShadeList, scheduleJobService));
}
}
}
return dependencyHandler;
}
use of com.dtstack.taier.dao.domain.ScheduleTaskShade in project Taier by DTStack.
the class UpstreamNextJobDependencyHandler method generationJobJobForTask.
@Override
public List<ScheduleJobJob> generationJobJobForTask(ScheduleCorn corn, Date currentDate, String currentJobKey) {
List<ScheduleJobJob> jobJobList = Lists.newArrayList();
for (ScheduleTaskShade taskShade : taskShadeList) {
try {
String jobKey = getJobKey(taskShade, currentDate);
// 如果获取不到key,说明是第一天生成实例,则不生成这条边
if (StringUtils.isBlank(jobKey)) {
continue;
}
ScheduleJobJob scheduleJobJob = new ScheduleJobJob();
scheduleJobJob.setTenantId(currentTaskShade.getTenantId());
scheduleJobJob.setJobKey(currentJobKey);
scheduleJobJob.setParentJobKey(jobKey);
scheduleJobJob.setJobKeyType(RelyType.UPSTREAM_NEXT_JOB.getType());
scheduleJobJob.setRule(getRule(corn.getScheduleConf()));
scheduleJobJob.setIsDeleted(Deleted.NORMAL.getStatus());
jobJobList.add(scheduleJobJob);
} catch (Exception e) {
LOGGER.error("", e);
}
}
return jobJobList;
}
use of com.dtstack.taier.dao.domain.ScheduleTaskShade in project Taier by DTStack.
the class CycleJobBuilder method buildTaskJobGraph.
public void buildTaskJobGraph(String triggerDay) {
if (environmentContext.getJobGraphBuilderSwitch()) {
return;
}
lock.lock();
try {
String triggerTimeStr = triggerDay + " 00:00:00";
Timestamp triggerTime = Timestamp.valueOf(triggerTimeStr);
boolean hasBuild = jobGraphTriggerService.checkHasBuildJobGraph(triggerTime);
if (hasBuild) {
LOGGER.info("trigger Day {} has build so break", triggerDay);
return;
}
// 1. 获得今天预计要生成的所有周期实例
Integer totalTask = getTotalTask();
LOGGER.info("{} need build job : {}", triggerTimeStr, totalTask);
if (totalTask <= 0) {
saveJobGraph(triggerDay);
return;
}
clearInterruptJob(triggerTime);
// 2. 切割总数 限制 thread 并发
int totalBatch = totalTask / environmentContext.getJobLimitSize();
if (totalTask % environmentContext.getJobLimitSize() != 0) {
totalBatch++;
}
Semaphore sph = new Semaphore(environmentContext.getMaxTaskBuildThread());
CountDownLatch ctl = new CountDownLatch(totalBatch);
AtomicJobSortWorker sortWorker = new AtomicJobSortWorker();
// 3. 查询db多线程生成周期实例
Long startId = 0L;
for (int i = 0; i < totalBatch; i++) {
// 取50个任务
final List<ScheduleTaskShade> batchTaskShades = scheduleTaskService.listRunnableTask(startId, Lists.newArrayList(EScheduleStatus.NORMAL.getVal(), EScheduleStatus.FREEZE.getVal()), environmentContext.getJobLimitSize());
// 如果取出来的任务集合是空的
if (CollectionUtils.isEmpty(batchTaskShades)) {
continue;
}
startId = batchTaskShades.get(batchTaskShades.size() - 1).getId();
LOGGER.info("job-number:{} startId:{}", i, startId);
try {
sph.acquire();
jobGraphBuildPool.submit(() -> {
try {
for (ScheduleTaskShade batchTaskShade : batchTaskShades) {
try {
List<ScheduleJobDetails> scheduleJobDetails = RetryUtil.executeWithRetry(() -> buildJob(batchTaskShade, triggerDay, sortWorker), environmentContext.getBuildJobErrorRetry(), 200, false);
// 插入周期实例
savaJobList(scheduleJobDetails);
} catch (Throwable e) {
LOGGER.error("build task failure taskId:{} apptype:{}", batchTaskShade.getTaskId(), null, e);
}
}
} catch (Throwable e) {
LOGGER.error("!!! buildTaskJobGraph build job error !!!", e);
} finally {
sph.release();
ctl.countDown();
}
});
} catch (Throwable e) {
LOGGER.error("[acquire pool error]:", e);
throw new RdosDefineException(e);
}
}
ctl.await();
// 循环已经结束,说明周期实例已经全部生成了
saveJobGraph(triggerDay);
} catch (Exception e) {
LOGGER.error("buildTaskJobGraph !!!", e);
} finally {
LOGGER.info("buildTaskJobGraph exit & unlock ...");
lock.unlock();
}
}
use of com.dtstack.taier.dao.domain.ScheduleTaskShade in project Taier by DTStack.
the class BatchServerLogService method getLogsByJobId.
public BatchServerLogVO getLogsByJobId(String jobId, Integer pageInfo) {
if (StringUtils.isBlank(jobId)) {
return null;
}
final ScheduleJob job = scheduleJobService.getByJobId(jobId);
if (Objects.isNull(job)) {
LOGGER.info("can not find job by id:{}.", jobId);
throw new RdosDefineException(ErrorCode.CAN_NOT_FIND_JOB);
}
final Long tenantId = job.getTenantId();
final ScheduleTaskShade scheduleTaskShade = this.taskService.findTaskByTaskId(job.getTaskId());
if (Objects.isNull(scheduleTaskShade)) {
LOGGER.info("can not find task shade by jobId:{}.", jobId);
throw new RdosDefineException(ErrorCode.SERVER_EXCEPTION);
}
final BatchServerLogVO batchServerLogVO = new BatchServerLogVO();
// 日志从engine获取
final JSONObject logsBody = new JSONObject(2);
logsBody.put("jobId", jobId);
logsBody.put("computeType", ComputeType.BATCH.getType());
ActionLogVO actionLogVO = actionService.log(jobId);
JSONObject info = new JSONObject();
if (!Strings.isNullOrEmpty(actionLogVO.getLogInfo())) {
try {
info = JSON.parseObject(actionLogVO.getLogInfo());
} catch (final Exception e) {
LOGGER.error(String.format("parse jobId: %s logInfo:%s", jobId, actionLogVO.getLogInfo()), e);
info.put("msg_info", actionLogVO.getLogInfo());
}
}
if (Objects.nonNull(job.getVersionId())) {
// 需要获取执行任务时候版本对应的sql
BatchTaskVersionDetailDTO taskVersion = this.batchTaskVersionService.getByVersionId((long) job.getVersionId());
if (Objects.nonNull(taskVersion)) {
if (StringUtils.isEmpty(taskVersion.getOriginSql())) {
String jsonSql = StringUtils.isEmpty(taskVersion.getSqlText()) ? "{}" : taskVersion.getSqlText();
scheduleTaskShade.setSqlText(jsonSql);
} else {
scheduleTaskShade.setSqlText(taskVersion.getOriginSql());
}
}
}
info.put("status", job.getStatus());
if (EScheduleJobType.SPARK_SQL.getVal().equals(scheduleTaskShade.getTaskType())) {
// 处理sql注释,先把注释base64编码,再处理非注释的自定义参数
String sql = SqlFormatterUtil.dealAnnotationBefore(scheduleTaskShade.getSqlText());
final List<BatchTaskParamShade> taskParamsToReplace = this.batchTaskParamShadeService.getTaskParam(scheduleTaskShade.getId());
sql = this.jobParamReplace.paramReplace(sql, taskParamsToReplace, job.getCycTime());
sql = SqlFormatterUtil.dealAnnotationAfter(sql);
info.put("sql", sql);
} else if (EScheduleJobType.SYNC.getVal().equals(scheduleTaskShade.getTaskType())) {
final JSONObject jobJson;
// taskShade 需要解码
JSONObject sqlJson = null;
try {
sqlJson = JSON.parseObject(Base64Util.baseDecode(scheduleTaskShade.getSqlText()));
} catch (final Exception e) {
sqlJson = JSON.parseObject(scheduleTaskShade.getSqlText());
}
jobJson = sqlJson.getJSONObject("job");
// 密码脱敏
DataFilter.passwordFilter(jobJson);
String jobStr = jobJson.toJSONString();
final List<BatchTaskParamShade> taskParamsToReplace = this.batchTaskParamShadeService.getTaskParam(scheduleTaskShade.getId());
jobStr = this.jobParamReplace.paramReplace(jobStr, taskParamsToReplace, job.getCycTime());
info.put("sql", JsonUtils.formatJSON(jobStr));
if (Objects.nonNull(job.getExecEndTime()) && Objects.nonNull(job.getExecStartTime())) {
List<ActionJobEntityVO> engineEntities = actionService.entitys(Collections.singletonList(logsBody.getString("jobId")));
String engineJobId = "";
if (CollectionUtils.isNotEmpty(engineEntities)) {
engineJobId = engineEntities.get(0).getEngineJobId();
}
this.parseIncreInfo(info, jobStr, tenantId, engineJobId, job.getExecStartTime().getTime(), job.getExecEndTime().getTime(), "");
}
}
if (job.getJobId() != null) {
try {
if (StringUtils.isNotBlank(actionLogVO.getEngineLog())) {
final Map<String, Object> engineLogMap = BatchServerLogService.objectMapper.readValue(actionLogVO.getEngineLog(), Map.class);
this.dealPerfLog(engineLogMap);
info.putAll(engineLogMap);
// 去掉统计信息,界面不展示,调度端统计使用
info.remove("countInfo");
}
} catch (Exception e) {
// 非json格式的日志也返回
info.put("msg_info", actionLogVO.getEngineLog());
LOGGER.error("", e);
}
}
// 增加重试日志
final String retryLog = this.buildRetryLog(jobId, pageInfo, batchServerLogVO);
this.formatForLogInfo(info, job.getType(), scheduleTaskShade.getTaskType(), retryLog, null, null, null, batchServerLogVO, tenantId, jobId);
if (!scheduleTaskShade.getTaskType().equals(EScheduleJobType.SYNC.getVal()) && !scheduleTaskShade.getTaskType().equals(EScheduleJobType.VIRTUAL.getVal()) && !scheduleTaskShade.getTaskType().equals(EScheduleJobType.WORK_FLOW.getVal()) && TaskStatus.getStoppedStatus().contains(job.getStatus())) {
batchServerLogVO.setDownloadLog(String.format(DOWNLOAD_LOG, jobId, scheduleTaskShade.getTaskType(), 0L));
}
batchServerLogVO.setName(scheduleTaskShade.getName());
batchServerLogVO.setComputeType(scheduleTaskShade.getComputeType());
batchServerLogVO.setTaskType(scheduleTaskShade.getTaskType());
return batchServerLogVO;
}
use of com.dtstack.taier.dao.domain.ScheduleTaskShade in project Taier by DTStack.
the class ActionService method queryJobLog.
/**
* 查看周期实例日志
*
* @param jobId 实例id
* @param pageInfo 第几次重试日志
* @return 日志信息
*/
public ReturnJobLogVO queryJobLog(String jobId, Integer pageInfo) {
if (pageInfo == null) {
pageInfo = 1;
}
// 查询周期实例
ScheduleJob scheduleJob = jobService.lambdaQuery().eq(ScheduleJob::getJobId, jobId).eq(ScheduleJob::getIsDeleted, Deleted.NORMAL.getStatus()).one();
if (scheduleJob == null) {
throw new RdosDefineException("not find job,please contact the administrator");
}
// 取最新
if (0 == pageInfo) {
pageInfo = scheduleJob.getRetryNum();
}
ReturnJobLogVO jobLogVO = new ReturnJobLogVO();
jobLogVO.setPageIndex(pageInfo);
jobLogVO.setPageSize(scheduleJob.getRetryNum());
// 如果RetryNum>1 说明实例已经进行了一次重试,所以取查询重试日志
if (scheduleJob.getRetryNum() > 1) {
// 查询重试日志
ScheduleEngineJobRetry scheduleEngineJobRetry = jobRetryService.lambdaQuery().eq(ScheduleEngineJobRetry::getJobId, jobId).eq(ScheduleEngineJobRetry::getRetryNum, pageInfo).eq(ScheduleEngineJobRetry::getIsDeleted, Deleted.NORMAL.getStatus()).orderBy(true, false, ScheduleEngineJobRetry::getId).one();
if (scheduleEngineJobRetry != null) {
jobLogVO.setLogInfo(scheduleEngineJobRetry.getLogInfo());
jobLogVO.setEngineLog(scheduleEngineJobRetry.getEngineLog());
}
jobLogVO.setPageIndex(pageInfo);
jobLogVO.setPageSize(scheduleJob.getMaxRetryNum());
} else {
// 查询当前日志
ScheduleJobExpand scheduleJobExpand = jobExpandService.lambdaQuery().eq(ScheduleJobExpand::getIsDeleted, Deleted.NORMAL.getStatus()).eq(ScheduleJobExpand::getJobId, jobId).one();
if (scheduleJobExpand != null) {
jobLogVO.setLogInfo(scheduleJobExpand.getLogInfo());
jobLogVO.setEngineLog(scheduleJobExpand.getEngineLog());
}
}
// 封装sql信息
ScheduleTaskShade scheduleTaskShade = taskService.lambdaQuery().eq(ScheduleTaskShade::getTaskId, scheduleJob.getTaskId()).eq(ScheduleTaskShade::getIsDeleted, Deleted.NORMAL.getStatus()).one();
if (null != scheduleTaskShade) {
JSONObject shadeInfo = scheduleTaskShadeInfoService.getInfoJSON(scheduleTaskShade.getTaskId());
String taskParams = shadeInfo.getString("taskParamsToReplace");
List<ScheduleTaskParamShade> taskParamsToReplace = JSONObject.parseArray(taskParams, ScheduleTaskParamShade.class);
String sqlText = scheduleTaskShade.getSqlText();
if (EScheduleJobType.SYNC.getType().equals(scheduleTaskShade.getTaskType())) {
sqlText = Base64Util.baseDecode(sqlText);
}
sqlText = JobParamReplace.paramReplace(sqlText, taskParamsToReplace, scheduleJob.getCycTime());
jobLogVO.setSqlText(sqlText);
Timestamp execStartTime = scheduleJob.getExecStartTime();
Timestamp execEndTime = scheduleJob.getExecEndTime();
if (EScheduleJobType.SYNC.getType().equals(scheduleTaskShade.getTaskType())) {
String syncLog = null;
try {
syncLog = batchServerLogService.formatPerfLogInfo(scheduleJob.getEngineJobId(), scheduleJob.getJobId(), Optional.ofNullable(execStartTime).orElse(Timestamp.valueOf(LocalDateTime.now())).getTime(), Optional.ofNullable(execEndTime).orElse(Timestamp.valueOf(LocalDateTime.now())).getTime(), scheduleJob.getTenantId());
} catch (Exception e) {
LOGGER.error("queryJobLog {} sync log error", jobId, e);
}
jobLogVO.setSyncLog(syncLog);
}
if (EScheduleJobType.SPARK_SQL.getType().equals(scheduleTaskShade.getTaskType())) {
jobLogVO.setDownLoadUrl(String.format(CommonConstant.DOWNLOAD_LOG, scheduleJob.getJobId(), scheduleJob.getTaskType(), scheduleJob.getTenantId()));
}
}
return jobLogVO;
}
Aggregations