use of com.flink.platform.dao.entity.JobRunInfo in project flink-platform-backend by itinycheng.
the class JobExecuteThread method waitForComplete.
public StatusInfo waitForComplete(String routeUrl, JobRunInfo jobRunInfo) {
int retryTimes = 0;
int errorTimes = 0;
boolean isRemote = isRemoteUrl(routeUrl);
while (AppRunner.isRunning()) {
try {
StatusInfo statusInfo;
if (isRemote) {
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
HttpEntity<JobRunInfo> requestEntity = new HttpEntity<>(jobRunInfo, headers);
statusInfo = restTemplate.postForObject(routeUrl + REST_GET_STATUS, requestEntity, StatusInfo.class);
} else {
statusInfo = processJobStatusService.getStatus(jobRunInfo);
}
if (jobRunInfo.getExecMode() == STREAMING) {
if (jobRunInfo.getCreateTime() == null) {
jobRunInfo.setCreateTime(LocalDateTime.now());
}
statusInfo = updateAndGetStreamJobStatus(statusInfo, jobRunInfo.getCreateTime());
}
if (statusInfo != null) {
log.info("Job runId: {}, name: {} Status: {}", jobRunInfo.getJobId(), jobRunInfo.getName(), statusInfo.getStatus());
if (statusInfo.getStatus().isTerminalState()) {
return statusInfo;
}
}
} catch (Exception e) {
if (++errorTimes > errorRetries) {
return new CustomizeStatusInfo(ERROR, LocalDateTime.now(), LocalDateTime.now());
}
}
sleep(++retryTimes);
}
return null;
}
use of com.flink.platform.dao.entity.JobRunInfo in project flink-platform-backend by itinycheng.
the class JobExecuteThread method call.
@Override
public JobResponse call() {
Long jobId = jobVertex.getJobId();
Long jobRunId = jobVertex.getJobRunId();
try {
// Step 1: get job info
JobInfo jobInfo = jobInfoService.getOne(new QueryWrapper<JobInfo>().lambda().eq(JobInfo::getId, jobId).eq(JobInfo::getStatus, JobStatus.ONLINE));
if (jobInfo == null) {
log.warn("The job:{} is no longer exists or not in ready/scheduled status.", jobId);
return new JobResponse(jobId, jobRunId, NOT_EXIST);
}
// Step 2: build route url, set localhost as default url if not specified.
String routeUrl = jobInfo.getRouteUrl();
routeUrl = HttpUtil.getUrlOrDefault(routeUrl);
// Step 3: process job and get jobRun.
JobRunInfo jobRunInfo;
if (jobRunId != null) {
jobRunInfo = jobRunInfoService.getById(jobRunId);
log.info("Job:{} already submitted, runId = {}.", jobId, jobRunId);
} else {
jobRunInfo = processRemoteJob(routeUrl, jobId);
}
if (jobRunInfo == null) {
log.warn("The jobRun:{} is no longer exists.", jobRunId);
return new JobResponse(jobId, jobRunId, NOT_EXIST);
}
// Step 4: Update jobRunId in Memory.
jobRunId = jobRunInfo.getId();
// Step 5: Wait for job complete and get final status.
ExecutionStatus status = jobRunInfo.getStatus();
if (status == null || !status.isTerminalState()) {
StatusInfo statusInfo = waitForComplete(routeUrl, jobRunInfo);
if (statusInfo != null) {
status = statusInfo.getStatus();
updateJobRunInfo(jobRunId, statusInfo.getStatus(), statusInfo.getEndTime());
}
}
return new JobResponse(jobId, jobRunId, status);
} catch (Exception e) {
log.error("Submit job and wait for complete failed.", e);
updateJobRunInfo(jobRunId, ERROR, LocalDateTime.now());
return new JobResponse(jobId, jobRunId, ERROR);
}
}
use of com.flink.platform.dao.entity.JobRunInfo in project flink-platform-backend by itinycheng.
the class InitJobFlowScheduler method appendExistedJobFlowRunToScheduler.
public void appendExistedJobFlowRunToScheduler() {
List<JobFlowRun> unfinishedFlowRunList = jobFlowRunService.list(new QueryWrapper<JobFlowRun>().lambda().eq(JobFlowRun::getHost, Constant.HOST_IP).in(JobFlowRun::getStatus, getNonTerminals()));
for (JobFlowRun jobFlowRun : unfinishedFlowRunList) {
DAG<Long, JobVertex, JobEdge> flow = jobFlowRun.getFlow();
// Update status of JobVertex in flow.
jobRunInfoService.list(new QueryWrapper<JobRunInfo>().lambda().eq(JobRunInfo::getFlowRunId, jobFlowRun.getId())).forEach(jobRunInfo -> {
JobVertex vertex = flow.getVertex(jobRunInfo.getJobId());
vertex.setJobRunId(jobRunInfo.getId());
vertex.setJobRunStatus(jobRunInfo.getStatus());
});
jobFlowScheduleService.registerToScheduler(jobFlowRun);
}
}
use of com.flink.platform.dao.entity.JobRunInfo in project flink-platform-backend by itinycheng.
the class ProcessJobService method processJob.
public JobRunInfo processJob(final long jobId, final long flowRunId) throws Exception {
JobCommand jobCommand = null;
JobInfo jobInfo = null;
try {
// step 1: get job info
jobInfo = jobInfoService.getOne(new QueryWrapper<JobInfo>().lambda().eq(JobInfo::getId, jobId).eq(JobInfo::getStatus, JobStatus.ONLINE));
if (jobInfo == null) {
throw new JobCommandGenException(String.format("The job: %s is no longer exists or in delete status.", jobId));
}
// step 2: replace variables in the sql statement
JobInfo finalJobInfo = jobInfo;
Map<String, Object> variableMap = Arrays.stream(SqlVar.values()).filter(sqlVar -> sqlVar.type == SqlVar.VarType.VARIABLE).filter(sqlVar -> finalJobInfo.getSubject().contains(sqlVar.variable)).map(sqlVar -> Pair.of(sqlVar.variable, sqlVar.valueProvider.apply(finalJobInfo))).collect(toMap(Pair::getLeft, Pair::getRight));
MapUtils.emptyIfNull(finalJobInfo.getVariables()).forEach((name, value) -> {
SqlVar sqlVar = SqlVar.matchPrefix(name);
variableMap.put(name, sqlVar.valueProvider.apply(value));
});
// replace variable with actual value
for (Map.Entry<String, Object> entry : variableMap.entrySet()) {
String originSubject = jobInfo.getSubject();
String distSubject = originSubject.replace(entry.getKey(), entry.getValue().toString());
jobInfo.setSubject(distSubject);
}
JobType jobType = jobInfo.getType();
String version = jobInfo.getVersion();
// step 3: build job command, create a SqlContext if needed
jobCommand = jobCommandBuilders.stream().filter(builder -> builder.isSupported(jobType, version)).findFirst().orElseThrow(() -> new JobCommandGenException("No available job command builder")).buildCommand(jobInfo);
// step 4: submit job
LocalDateTime submitTime = LocalDateTime.now();
String commandString = jobCommand.toCommandString();
JobCallback callback = jobCommandExecutors.stream().filter(executor -> executor.isSupported(jobType)).findFirst().orElseThrow(() -> new JobCommandGenException("No available job command executor")).execCommand(commandString);
// step 5: write job run info to db
ExecutionStatus executionStatus = getExecutionStatus(jobType, callback);
JobRunInfo jobRunInfo = new JobRunInfo();
jobRunInfo.setName(jobInfo.getName() + "-" + System.currentTimeMillis());
jobRunInfo.setJobId(jobInfo.getId());
jobRunInfo.setFlowRunId(flowRunId);
jobRunInfo.setDeployMode(jobInfo.getDeployMode());
jobRunInfo.setExecMode(jobInfo.getExecMode());
jobRunInfo.setSubject(jobInfo.getSubject());
jobRunInfo.setStatus(executionStatus);
jobRunInfo.setVariables(JsonUtil.toJsonString(variableMap));
jobRunInfo.setBackInfo(JsonUtil.toJsonString(callback));
jobRunInfo.setSubmitTime(submitTime);
if (executionStatus.isTerminalState()) {
jobRunInfo.setStopTime(LocalDateTime.now());
}
jobRunInfoService.save(jobRunInfo);
// step 6: print job command info
log.info("Job: {} submitted, time: {}", jobId, System.currentTimeMillis());
return jobRunInfo;
} finally {
if (jobInfo != null && jobInfo.getType() == JobType.FLINK_SQL && jobCommand != null) {
try {
FlinkCommand flinkCommand = (FlinkCommand) jobCommand;
if (flinkCommand.getMainArgs() != null) {
Files.deleteIfExists(Paths.get(flinkCommand.getMainArgs()));
}
} catch (Exception e) {
log.warn("Delete sql context file failed", e);
}
}
}
}
use of com.flink.platform.dao.entity.JobRunInfo in project flink-platform-backend by itinycheng.
the class StatusRunner method execute.
@Override
public void execute(JobExecutionContext context) {
List<JobRunInfo> jobRunList = jobRunInfoService.list(new QueryWrapper<JobRunInfo>().lambda().in(JobRunInfo::getStatus, NON_TERMINAL_STATUS_LIST));
Map<String, List<JobRunInfo>> groupedJobRunList = jobRunList.stream().collect(groupingBy(jobRunInfo -> StringUtils.defaultString(jobRunInfo.getRouteUrl())));
for (Entry<String, List<JobRunInfo>> entry : groupedJobRunList.entrySet()) {
String routeUrl = HttpUtil.getUrlOrDefault(entry.getKey());
List<Long> ids = entry.getValue().stream().map(JobRunInfo::getId).collect(toList());
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
HttpEntity<List<Long>> requestEntity = new HttpEntity<>(ids, headers);
ResultInfo<Object> response = restTemplate.exchange(routeUrl + REST_UPDATE_STATUS, HttpMethod.POST, requestEntity, new ParameterizedTypeReference<ResultInfo<Object>>() {
}).getBody();
log.info("The job run id in : {} are processed, result: {}", ids, response);
}
}
Aggregations