Search in sources :

Example 1 with JobCommandGenException

use of com.flink.platform.common.exception.JobCommandGenException in project flink-platform-backend by itinycheng.

the class SqlContextHelper method toSqls.

public List<Sql> toSqls(String subject) {
    subject = subject.trim();
    if (!subject.endsWith(SEMICOLON)) {
        subject = subject + SEMICOLON;
    }
    List<Sql> sqlList = new ArrayList<>();
    Matcher matcher = SQL_PATTERN.matcher(subject);
    while (matcher.find()) {
        String statement = matcher.group();
        sqlList.add(SqlType.parse(statement));
    }
    if (sqlList.size() == 0) {
        throw new JobCommandGenException(String.format("no sql found or parsing failed, subject: %s", subject));
    }
    return sqlList;
}
Also used : Matcher(java.util.regex.Matcher) ArrayList(java.util.ArrayList) JobCommandGenException(com.flink.platform.common.exception.JobCommandGenException) Sql(com.flink.platform.common.job.Sql)

Example 2 with JobCommandGenException

use of com.flink.platform.common.exception.JobCommandGenException in project flink-platform-backend by itinycheng.

the class SqlContextHelper method saveToFile.

public String saveToFile(String fileName, SqlContext sqlContext) {
    try {
        String json = JsonUtil.toJsonString(sqlContext);
        String sqlFilePath = String.join(SLASH, ROOT_DIR, sqlDir, fileName);
        FileUtils.write(new File(sqlFilePath), json, StandardCharsets.UTF_8);
        log.info("serial sql context to local disk successfully, path: {}, data: {}", sqlFilePath, json);
        return sqlFilePath;
    } catch (Exception e) {
        throw new JobCommandGenException("serde sql context to local disk failed", e);
    }
}
Also used : JobCommandGenException(com.flink.platform.common.exception.JobCommandGenException) File(java.io.File) JobCommandGenException(com.flink.platform.common.exception.JobCommandGenException)

Example 3 with JobCommandGenException

use of com.flink.platform.common.exception.JobCommandGenException in project flink-platform-backend by itinycheng.

the class FlinkCommandBuilder method buildCommand.

@Override
public JobCommand buildCommand(JobInfo jobInfo) throws Exception {
    FlinkCommand command = new FlinkCommand();
    DeployMode deployMode = jobInfo.getDeployMode();
    String execMode = String.format(EXEC_MODE, deployMode.mode, deployMode.target);
    command.setPrefix(flinkConfig.getCommandPath() + execMode);
    // add configurations
    Map<String, Object> configs = command.getConfigs();
    if (jobInfo.getConfigs() != null) {
        configs.putAll(jobInfo.getConfigs());
    }
    // add yarn application name
    String appName = String.join("-", jobInfo.getExecMode().name(), jobInfo.getCode());
    configs.put(YARN_APPLICATION_NAME, appName);
    // add lib dirs and user classpaths
    List<String> extJarList = ListUtils.defaultIfNull(jobInfo.getExtJars(), Collections.emptyList());
    configs.put(YARN_PROVIDED_LIB_DIRS, getMergedLibDirs(extJarList));
    List<URL> classpaths = getOrCreateClasspaths(jobInfo.getCode(), extJarList);
    command.setClasspaths(classpaths);
    switch(jobInfo.getType()) {
        case FLINK_JAR:
            command.setMainJar(jobInfo.getSubject());
            command.setMainArgs(jobInfo.getMainArgs());
            command.setMainClass(jobInfo.getMainClass());
            break;
        case FLINK_SQL:
            String localJarPath = getLocalPathOfSqlJarFile();
            String filePath = sqlContextHelper.convertFromAndSaveToFile(jobInfo);
            command.setMainArgs(filePath);
            command.setMainJar(localJarPath);
            command.setMainClass(flinkConfig.getClassName());
            break;
        default:
            throw new JobCommandGenException("unsupported job type");
    }
    return command;
}
Also used : DeployMode(com.flink.platform.common.enums.DeployMode) JobCommandGenException(com.flink.platform.common.exception.JobCommandGenException) URL(java.net.URL)

Example 4 with JobCommandGenException

use of com.flink.platform.common.exception.JobCommandGenException in project flink-platform-backend by itinycheng.

the class ProcessJobService method processJob.

public JobRunInfo processJob(final long jobId, final long flowRunId) throws Exception {
    JobCommand jobCommand = null;
    JobInfo jobInfo = null;
    try {
        // step 1: get job info
        jobInfo = jobInfoService.getOne(new QueryWrapper<JobInfo>().lambda().eq(JobInfo::getId, jobId).eq(JobInfo::getStatus, JobStatus.ONLINE));
        if (jobInfo == null) {
            throw new JobCommandGenException(String.format("The job: %s is no longer exists or in delete status.", jobId));
        }
        // step 2: replace variables in the sql statement
        JobInfo finalJobInfo = jobInfo;
        Map<String, Object> variableMap = Arrays.stream(SqlVar.values()).filter(sqlVar -> sqlVar.type == SqlVar.VarType.VARIABLE).filter(sqlVar -> finalJobInfo.getSubject().contains(sqlVar.variable)).map(sqlVar -> Pair.of(sqlVar.variable, sqlVar.valueProvider.apply(finalJobInfo))).collect(toMap(Pair::getLeft, Pair::getRight));
        MapUtils.emptyIfNull(finalJobInfo.getVariables()).forEach((name, value) -> {
            SqlVar sqlVar = SqlVar.matchPrefix(name);
            variableMap.put(name, sqlVar.valueProvider.apply(value));
        });
        // replace variable with actual value
        for (Map.Entry<String, Object> entry : variableMap.entrySet()) {
            String originSubject = jobInfo.getSubject();
            String distSubject = originSubject.replace(entry.getKey(), entry.getValue().toString());
            jobInfo.setSubject(distSubject);
        }
        JobType jobType = jobInfo.getType();
        String version = jobInfo.getVersion();
        // step 3: build job command, create a SqlContext if needed
        jobCommand = jobCommandBuilders.stream().filter(builder -> builder.isSupported(jobType, version)).findFirst().orElseThrow(() -> new JobCommandGenException("No available job command builder")).buildCommand(jobInfo);
        // step 4: submit job
        LocalDateTime submitTime = LocalDateTime.now();
        String commandString = jobCommand.toCommandString();
        JobCallback callback = jobCommandExecutors.stream().filter(executor -> executor.isSupported(jobType)).findFirst().orElseThrow(() -> new JobCommandGenException("No available job command executor")).execCommand(commandString);
        // step 5: write job run info to db
        ExecutionStatus executionStatus = getExecutionStatus(jobType, callback);
        JobRunInfo jobRunInfo = new JobRunInfo();
        jobRunInfo.setName(jobInfo.getName() + "-" + System.currentTimeMillis());
        jobRunInfo.setJobId(jobInfo.getId());
        jobRunInfo.setFlowRunId(flowRunId);
        jobRunInfo.setDeployMode(jobInfo.getDeployMode());
        jobRunInfo.setExecMode(jobInfo.getExecMode());
        jobRunInfo.setSubject(jobInfo.getSubject());
        jobRunInfo.setStatus(executionStatus);
        jobRunInfo.setVariables(JsonUtil.toJsonString(variableMap));
        jobRunInfo.setBackInfo(JsonUtil.toJsonString(callback));
        jobRunInfo.setSubmitTime(submitTime);
        if (executionStatus.isTerminalState()) {
            jobRunInfo.setStopTime(LocalDateTime.now());
        }
        jobRunInfoService.save(jobRunInfo);
        // step 6: print job command info
        log.info("Job: {} submitted, time: {}", jobId, System.currentTimeMillis());
        return jobRunInfo;
    } finally {
        if (jobInfo != null && jobInfo.getType() == JobType.FLINK_SQL && jobCommand != null) {
            try {
                FlinkCommand flinkCommand = (FlinkCommand) jobCommand;
                if (flinkCommand.getMainArgs() != null) {
                    Files.deleteIfExists(Paths.get(flinkCommand.getMainArgs()));
                }
            } catch (Exception e) {
                log.warn("Delete sql context file failed", e);
            }
        }
    }
}
Also used : Arrays(java.util.Arrays) JsonUtil(com.flink.platform.common.util.JsonUtil) FlinkCommand(com.flink.platform.web.command.FlinkCommand) JobCallback(com.flink.platform.web.command.JobCallback) LocalDateTime(java.time.LocalDateTime) Autowired(org.springframework.beans.factory.annotation.Autowired) JobInfoService(com.flink.platform.dao.service.JobInfoService) SqlVar(com.flink.platform.web.enums.SqlVar) Pair(org.apache.commons.lang3.tuple.Pair) Collectors.toMap(java.util.stream.Collectors.toMap) Service(org.springframework.stereotype.Service) Map(java.util.Map) SUCCESS(com.flink.platform.common.enums.ExecutionStatus.SUCCESS) JobStatus(com.flink.platform.common.enums.JobStatus) CommandBuilder(com.flink.platform.web.command.CommandBuilder) JobType(com.flink.platform.common.enums.JobType) MapUtils(org.apache.commons.collections4.MapUtils) QueryWrapper(com.baomidou.mybatisplus.core.conditions.query.QueryWrapper) JobInfo(com.flink.platform.dao.entity.JobInfo) Files(java.nio.file.Files) JobRunInfoService(com.flink.platform.dao.service.JobRunInfoService) JobRunInfo(com.flink.platform.dao.entity.JobRunInfo) CommandExecutor(com.flink.platform.web.command.CommandExecutor) Slf4j(lombok.extern.slf4j.Slf4j) List(java.util.List) ExecutionStatus(com.flink.platform.common.enums.ExecutionStatus) JobCommand(com.flink.platform.web.command.JobCommand) Paths(java.nio.file.Paths) JobCommandGenException(com.flink.platform.common.exception.JobCommandGenException) LocalDateTime(java.time.LocalDateTime) JobCommandGenException(com.flink.platform.common.exception.JobCommandGenException) JobRunInfo(com.flink.platform.dao.entity.JobRunInfo) JobCommandGenException(com.flink.platform.common.exception.JobCommandGenException) JobType(com.flink.platform.common.enums.JobType) SqlVar(com.flink.platform.web.enums.SqlVar) JobInfo(com.flink.platform.dao.entity.JobInfo) ExecutionStatus(com.flink.platform.common.enums.ExecutionStatus) JobCommand(com.flink.platform.web.command.JobCommand) FlinkCommand(com.flink.platform.web.command.FlinkCommand) Collectors.toMap(java.util.stream.Collectors.toMap) Map(java.util.Map) JobCallback(com.flink.platform.web.command.JobCallback)

Aggregations

JobCommandGenException (com.flink.platform.common.exception.JobCommandGenException)4 QueryWrapper (com.baomidou.mybatisplus.core.conditions.query.QueryWrapper)1 DeployMode (com.flink.platform.common.enums.DeployMode)1 ExecutionStatus (com.flink.platform.common.enums.ExecutionStatus)1 SUCCESS (com.flink.platform.common.enums.ExecutionStatus.SUCCESS)1 JobStatus (com.flink.platform.common.enums.JobStatus)1 JobType (com.flink.platform.common.enums.JobType)1 Sql (com.flink.platform.common.job.Sql)1 JsonUtil (com.flink.platform.common.util.JsonUtil)1 JobInfo (com.flink.platform.dao.entity.JobInfo)1 JobRunInfo (com.flink.platform.dao.entity.JobRunInfo)1 JobInfoService (com.flink.platform.dao.service.JobInfoService)1 JobRunInfoService (com.flink.platform.dao.service.JobRunInfoService)1 CommandBuilder (com.flink.platform.web.command.CommandBuilder)1 CommandExecutor (com.flink.platform.web.command.CommandExecutor)1 FlinkCommand (com.flink.platform.web.command.FlinkCommand)1 JobCallback (com.flink.platform.web.command.JobCallback)1 JobCommand (com.flink.platform.web.command.JobCommand)1 SqlVar (com.flink.platform.web.enums.SqlVar)1 File (java.io.File)1