use of com.flink.platform.common.exception.JobCommandGenException in project flink-platform-backend by itinycheng.
the class SqlContextHelper method toSqls.
public List<Sql> toSqls(String subject) {
subject = subject.trim();
if (!subject.endsWith(SEMICOLON)) {
subject = subject + SEMICOLON;
}
List<Sql> sqlList = new ArrayList<>();
Matcher matcher = SQL_PATTERN.matcher(subject);
while (matcher.find()) {
String statement = matcher.group();
sqlList.add(SqlType.parse(statement));
}
if (sqlList.size() == 0) {
throw new JobCommandGenException(String.format("no sql found or parsing failed, subject: %s", subject));
}
return sqlList;
}
use of com.flink.platform.common.exception.JobCommandGenException in project flink-platform-backend by itinycheng.
the class SqlContextHelper method saveToFile.
public String saveToFile(String fileName, SqlContext sqlContext) {
try {
String json = JsonUtil.toJsonString(sqlContext);
String sqlFilePath = String.join(SLASH, ROOT_DIR, sqlDir, fileName);
FileUtils.write(new File(sqlFilePath), json, StandardCharsets.UTF_8);
log.info("serial sql context to local disk successfully, path: {}, data: {}", sqlFilePath, json);
return sqlFilePath;
} catch (Exception e) {
throw new JobCommandGenException("serde sql context to local disk failed", e);
}
}
use of com.flink.platform.common.exception.JobCommandGenException in project flink-platform-backend by itinycheng.
the class FlinkCommandBuilder method buildCommand.
@Override
public JobCommand buildCommand(JobInfo jobInfo) throws Exception {
FlinkCommand command = new FlinkCommand();
DeployMode deployMode = jobInfo.getDeployMode();
String execMode = String.format(EXEC_MODE, deployMode.mode, deployMode.target);
command.setPrefix(flinkConfig.getCommandPath() + execMode);
// add configurations
Map<String, Object> configs = command.getConfigs();
if (jobInfo.getConfigs() != null) {
configs.putAll(jobInfo.getConfigs());
}
// add yarn application name
String appName = String.join("-", jobInfo.getExecMode().name(), jobInfo.getCode());
configs.put(YARN_APPLICATION_NAME, appName);
// add lib dirs and user classpaths
List<String> extJarList = ListUtils.defaultIfNull(jobInfo.getExtJars(), Collections.emptyList());
configs.put(YARN_PROVIDED_LIB_DIRS, getMergedLibDirs(extJarList));
List<URL> classpaths = getOrCreateClasspaths(jobInfo.getCode(), extJarList);
command.setClasspaths(classpaths);
switch(jobInfo.getType()) {
case FLINK_JAR:
command.setMainJar(jobInfo.getSubject());
command.setMainArgs(jobInfo.getMainArgs());
command.setMainClass(jobInfo.getMainClass());
break;
case FLINK_SQL:
String localJarPath = getLocalPathOfSqlJarFile();
String filePath = sqlContextHelper.convertFromAndSaveToFile(jobInfo);
command.setMainArgs(filePath);
command.setMainJar(localJarPath);
command.setMainClass(flinkConfig.getClassName());
break;
default:
throw new JobCommandGenException("unsupported job type");
}
return command;
}
use of com.flink.platform.common.exception.JobCommandGenException in project flink-platform-backend by itinycheng.
the class ProcessJobService method processJob.
public JobRunInfo processJob(final long jobId, final long flowRunId) throws Exception {
JobCommand jobCommand = null;
JobInfo jobInfo = null;
try {
// step 1: get job info
jobInfo = jobInfoService.getOne(new QueryWrapper<JobInfo>().lambda().eq(JobInfo::getId, jobId).eq(JobInfo::getStatus, JobStatus.ONLINE));
if (jobInfo == null) {
throw new JobCommandGenException(String.format("The job: %s is no longer exists or in delete status.", jobId));
}
// step 2: replace variables in the sql statement
JobInfo finalJobInfo = jobInfo;
Map<String, Object> variableMap = Arrays.stream(SqlVar.values()).filter(sqlVar -> sqlVar.type == SqlVar.VarType.VARIABLE).filter(sqlVar -> finalJobInfo.getSubject().contains(sqlVar.variable)).map(sqlVar -> Pair.of(sqlVar.variable, sqlVar.valueProvider.apply(finalJobInfo))).collect(toMap(Pair::getLeft, Pair::getRight));
MapUtils.emptyIfNull(finalJobInfo.getVariables()).forEach((name, value) -> {
SqlVar sqlVar = SqlVar.matchPrefix(name);
variableMap.put(name, sqlVar.valueProvider.apply(value));
});
// replace variable with actual value
for (Map.Entry<String, Object> entry : variableMap.entrySet()) {
String originSubject = jobInfo.getSubject();
String distSubject = originSubject.replace(entry.getKey(), entry.getValue().toString());
jobInfo.setSubject(distSubject);
}
JobType jobType = jobInfo.getType();
String version = jobInfo.getVersion();
// step 3: build job command, create a SqlContext if needed
jobCommand = jobCommandBuilders.stream().filter(builder -> builder.isSupported(jobType, version)).findFirst().orElseThrow(() -> new JobCommandGenException("No available job command builder")).buildCommand(jobInfo);
// step 4: submit job
LocalDateTime submitTime = LocalDateTime.now();
String commandString = jobCommand.toCommandString();
JobCallback callback = jobCommandExecutors.stream().filter(executor -> executor.isSupported(jobType)).findFirst().orElseThrow(() -> new JobCommandGenException("No available job command executor")).execCommand(commandString);
// step 5: write job run info to db
ExecutionStatus executionStatus = getExecutionStatus(jobType, callback);
JobRunInfo jobRunInfo = new JobRunInfo();
jobRunInfo.setName(jobInfo.getName() + "-" + System.currentTimeMillis());
jobRunInfo.setJobId(jobInfo.getId());
jobRunInfo.setFlowRunId(flowRunId);
jobRunInfo.setDeployMode(jobInfo.getDeployMode());
jobRunInfo.setExecMode(jobInfo.getExecMode());
jobRunInfo.setSubject(jobInfo.getSubject());
jobRunInfo.setStatus(executionStatus);
jobRunInfo.setVariables(JsonUtil.toJsonString(variableMap));
jobRunInfo.setBackInfo(JsonUtil.toJsonString(callback));
jobRunInfo.setSubmitTime(submitTime);
if (executionStatus.isTerminalState()) {
jobRunInfo.setStopTime(LocalDateTime.now());
}
jobRunInfoService.save(jobRunInfo);
// step 6: print job command info
log.info("Job: {} submitted, time: {}", jobId, System.currentTimeMillis());
return jobRunInfo;
} finally {
if (jobInfo != null && jobInfo.getType() == JobType.FLINK_SQL && jobCommand != null) {
try {
FlinkCommand flinkCommand = (FlinkCommand) jobCommand;
if (flinkCommand.getMainArgs() != null) {
Files.deleteIfExists(Paths.get(flinkCommand.getMainArgs()));
}
} catch (Exception e) {
log.warn("Delete sql context file failed", e);
}
}
}
}
Aggregations