use of com.webank.wedatasphere.qualitis.exception.DataQualityTaskException in project Qualitis by WeBankFinTech.
the class ExecutionManagerImpl method submitApplication.
/**
* Submit job to linkis
*/
@Override
public List<TaskSubmitResult> submitApplication(List<Rule> rules, String nodeName, String createTime, String user, String database, StringBuffer partition, Date date, Application application, String cluster, String startupParam, String setFlag, Map<String, String> execParams, StringBuffer runDate, Map<Long, Map> dataSourceMysqlConnect) throws ArgumentException, TaskTypeException, ConvertException, DataQualityTaskException, RuleVariableNotSupportException, RuleVariableNotFoundException, JobSubmitException, ClusterInfoNotConfigException, IOException, UnExpectedRequestException, MetaDataAcquireFailedException {
String csId = rules.iterator().next().getCsId();
// Check if cluster supported
LOGGER.info("Start to collect rule to clusters");
Map<String, List<Rule>> clusterNameMap = getRuleCluster(rules);
LOGGER.info("Succeed to classify rules by cluster, cluster map: {}", clusterNameMap);
if (StringUtils.isNotBlank(cluster)) {
LOGGER.info("When pick up a cluster, these datasources of rules must be from one cluster. Now start to put into the specify cluster.\n");
putAllRulesIntoSpecifyCluster(clusterNameMap, cluster);
LOGGER.info("Success to put into the specify cluster.\n");
}
List<TaskSubmitResult> taskSubmitResults = new ArrayList<>();
for (String clusterName : clusterNameMap.keySet()) {
List<Rule> clusterRules = clusterNameMap.get(clusterName);
if (StringUtils.isNotBlank(cluster)) {
clusterName = cluster;
}
ClusterInfo clusterInfo = clusterInfoDao.findByClusterName(clusterName);
LOGGER.info("Start to check cluster config.");
if (clusterInfo == null) {
throw new ClusterInfoNotConfigException(clusterName + " {&DOES_NOT_EXIST}");
}
LOGGER.info("Succeed to pass the check of cluster config. All cluster of rules are configured");
// Divide rule into tasks
List<DataQualityTask> tasks = TaskDividerFactory.getDivider().divide(clusterRules, application.getId(), createTime, partition.toString(), date, database, user, taskExecuteLimitConfig.getTaskExecuteRuleSize());
LOGGER.info("Succeed to divide application into tasks. result: {}", tasks);
// Save divided tasks
saveDividedTask(tasks, clusterInfo, rules, application, createTime);
// Convert tasks into job
List<DataQualityJob> jobList = new ArrayList<>();
for (DataQualityTask task : tasks) {
DataQualityJob job = templateConverterFactory.getConverter(task).convert(task, date, setFlag, execParams, runDate.toString(), clusterInfo.getClusterType(), dataSourceMysqlConnect);
job.setUser(task.getUser());
jobList.add(job);
List<Long> ruleIdList = task.getRuleTaskDetails().stream().map(r -> r.getRule().getId()).collect(Collectors.toList());
LOGGER.info("Succeed to convert rule_id: {} into code. code: {}", ruleIdList, job.getJobCode());
}
LOGGER.info("Succeed to convert all template into codes. codes: {}", jobList);
// Submit job to linkis
List<JobSubmitResult> submitResults = new ArrayList<>();
for (DataQualityJob job : jobList) {
String code = String.join("\n", job.getJobCode());
String proxy = job.getUser();
Long taskId = job.getTaskId();
// Compatible with new and old submission interfaces.
JobSubmitResult result = null;
boolean engineReUse = false;
if (StringUtils.isNotBlank(startupParam)) {
String[] startupParams = startupParam.split(SpecCharEnum.DIVIDER.getValue());
for (String param : startupParams) {
if (StringUtils.isEmpty(param)) {
continue;
}
String[] paramStrs = param.split("=");
if (paramStrs.length < 2) {
continue;
}
String key = paramStrs[0];
String value = paramStrs[1];
if ("engine_reuse".equals(key)) {
if ("true".equals(value)) {
engineReUse = true;
startupParam = startupParam.replace("engine_reuse=true", "");
} else {
engineReUse = false;
startupParam = startupParam.replace("engine_reuse=false", "");
}
break;
}
}
}
if (clusterInfo.getClusterType().endsWith(LINKIS_ONE_VERSION)) {
result = abstractJobSubmitter.submitJobNew(code, linkisConfig.getEngineName(), StringUtils.isNotBlank(proxy) ? proxy : user, clusterInfo.getLinkisAddress(), clusterName, taskId, csId, nodeName, StringUtils.isNotBlank(startupParam) ? startupParam : job.getStartupParam(), engineReUse);
} else {
result = abstractJobSubmitter.submitJob(code, linkisConfig.getEngineName(), StringUtils.isNotBlank(proxy) ? proxy : user, clusterInfo.getLinkisAddress(), clusterName, taskId, csId, nodeName, StringUtils.isNotBlank(startupParam) ? startupParam : job.getStartupParam());
}
if (result != null) {
submitResults.add(result);
} else {
Task taskInDb = taskDao.findById(taskId);
taskInDb.setStatus(TaskStatusEnum.TASK_NOT_EXIST.getCode());
taskDao.save(taskInDb);
taskSubmitResults.add(new TaskSubmitResult(application.getId(), null, clusterInfo.getClusterName()));
}
}
// Rewrite task remote ID.
rewriteTaskRemoteInfo(submitResults, taskSubmitResults, application.getId(), clusterInfo.getClusterName());
}
return taskSubmitResults;
}
use of com.webank.wedatasphere.qualitis.exception.DataQualityTaskException in project Qualitis by WeBankFinTech.
the class SqlTemplateConverter method convert.
/**
* Convert task into scala code
* @param dataQualityTask
* @param date
* @param setFlag
* @param execParams
* @param runDate
* @param clusterType
* @param dataSourceMysqlConnect
* @return
* @throws ConvertException
* @throws DataQualityTaskException
* @throws RuleVariableNotSupportException
* @throws RuleVariableNotFoundException
*/
@Override
public DataQualityJob convert(DataQualityTask dataQualityTask, Date date, String setFlag, Map<String, String> execParams, String runDate, String clusterType, Map<Long, Map> dataSourceMysqlConnect) throws ConvertException, DataQualityTaskException, RuleVariableNotSupportException, RuleVariableNotFoundException, IOException, UnExpectedRequestException {
LOGGER.info("Start to convert template to actual code, task: " + dataQualityTask);
if (null == dataQualityTask || dataQualityTask.getRuleTaskDetails().isEmpty()) {
throw new DataQualityTaskException("Task can not be null or empty");
}
DataQualityJob job = new DataQualityJob();
List<String> initSentence = abstractTranslator.getInitSentence();
job.getJobCode().addAll(initSentence);
LOGGER.info("Succeed to get init code. codes: " + initSentence);
if (StringUtils.isNotBlank(setFlag)) {
LOGGER.info("Start to solve with set flag. Spark set conf string: {}", setFlag);
String[] setStrs = setFlag.split(SpecCharEnum.DIVIDER.getValue());
for (String str : setStrs) {
job.getJobCode().add("spark.sql(\"set " + str + "\")");
}
LOGGER.info("Finish to solve with set flag.");
}
int count = 0;
for (RuleTaskDetail ruleTaskDetail : dataQualityTask.getRuleTaskDetails()) {
count++;
List<String> codes = generateSparkSqlByTask(ruleTaskDetail.getRule(), date, dataQualityTask.getApplicationId(), ruleTaskDetail.getMidTableName(), dataQualityTask.getCreateTime(), new StringBuffer(dataQualityTask.getPartition()), execParams, count, runDate, dataSourceMysqlConnect);
job.getJobCode().addAll(codes);
LOGGER.info("Succeed to convert rule into code. rule_id: {}, rul_name: {}, codes: {}", ruleTaskDetail.getRule().getId(), ruleTaskDetail.getRule().getName(), codes);
}
LOGGER.info("Succeed to convert all rule into actual scala code.");
job.setTaskId(dataQualityTask.getTaskId());
job.setStartupParam(dataQualityTask.getStartupParam());
return job;
}
Aggregations