use of com.webank.wedatasphere.qualitis.exception.RuleVariableNotFoundException in project Qualitis by WeBankFinTech.
the class ExecutionManagerImpl method submitApplication.
/**
* Submit job to linkis
*/
@Override
public List<TaskSubmitResult> submitApplication(List<Rule> rules, String nodeName, String createTime, String user, String database, StringBuffer partition, Date date, Application application, String cluster, String startupParam, String setFlag, Map<String, String> execParams, StringBuffer runDate, Map<Long, Map> dataSourceMysqlConnect) throws ArgumentException, TaskTypeException, ConvertException, DataQualityTaskException, RuleVariableNotSupportException, RuleVariableNotFoundException, JobSubmitException, ClusterInfoNotConfigException, IOException, UnExpectedRequestException, MetaDataAcquireFailedException {
String csId = rules.iterator().next().getCsId();
// Check if cluster supported
LOGGER.info("Start to collect rule to clusters");
Map<String, List<Rule>> clusterNameMap = getRuleCluster(rules);
LOGGER.info("Succeed to classify rules by cluster, cluster map: {}", clusterNameMap);
if (StringUtils.isNotBlank(cluster)) {
LOGGER.info("When pick up a cluster, these datasources of rules must be from one cluster. Now start to put into the specify cluster.\n");
putAllRulesIntoSpecifyCluster(clusterNameMap, cluster);
LOGGER.info("Success to put into the specify cluster.\n");
}
List<TaskSubmitResult> taskSubmitResults = new ArrayList<>();
for (String clusterName : clusterNameMap.keySet()) {
List<Rule> clusterRules = clusterNameMap.get(clusterName);
if (StringUtils.isNotBlank(cluster)) {
clusterName = cluster;
}
ClusterInfo clusterInfo = clusterInfoDao.findByClusterName(clusterName);
LOGGER.info("Start to check cluster config.");
if (clusterInfo == null) {
throw new ClusterInfoNotConfigException(clusterName + " {&DOES_NOT_EXIST}");
}
LOGGER.info("Succeed to pass the check of cluster config. All cluster of rules are configured");
// Divide rule into tasks
List<DataQualityTask> tasks = TaskDividerFactory.getDivider().divide(clusterRules, application.getId(), createTime, partition.toString(), date, database, user, taskExecuteLimitConfig.getTaskExecuteRuleSize());
LOGGER.info("Succeed to divide application into tasks. result: {}", tasks);
// Save divided tasks
saveDividedTask(tasks, clusterInfo, rules, application, createTime);
// Convert tasks into job
List<DataQualityJob> jobList = new ArrayList<>();
for (DataQualityTask task : tasks) {
DataQualityJob job = templateConverterFactory.getConverter(task).convert(task, date, setFlag, execParams, runDate.toString(), clusterInfo.getClusterType(), dataSourceMysqlConnect);
job.setUser(task.getUser());
jobList.add(job);
List<Long> ruleIdList = task.getRuleTaskDetails().stream().map(r -> r.getRule().getId()).collect(Collectors.toList());
LOGGER.info("Succeed to convert rule_id: {} into code. code: {}", ruleIdList, job.getJobCode());
}
LOGGER.info("Succeed to convert all template into codes. codes: {}", jobList);
// Submit job to linkis
List<JobSubmitResult> submitResults = new ArrayList<>();
for (DataQualityJob job : jobList) {
String code = String.join("\n", job.getJobCode());
String proxy = job.getUser();
Long taskId = job.getTaskId();
// Compatible with new and old submission interfaces.
JobSubmitResult result = null;
boolean engineReUse = false;
if (StringUtils.isNotBlank(startupParam)) {
String[] startupParams = startupParam.split(SpecCharEnum.DIVIDER.getValue());
for (String param : startupParams) {
if (StringUtils.isEmpty(param)) {
continue;
}
String[] paramStrs = param.split("=");
if (paramStrs.length < 2) {
continue;
}
String key = paramStrs[0];
String value = paramStrs[1];
if ("engine_reuse".equals(key)) {
if ("true".equals(value)) {
engineReUse = true;
startupParam = startupParam.replace("engine_reuse=true", "");
} else {
engineReUse = false;
startupParam = startupParam.replace("engine_reuse=false", "");
}
break;
}
}
}
if (clusterInfo.getClusterType().endsWith(LINKIS_ONE_VERSION)) {
result = abstractJobSubmitter.submitJobNew(code, linkisConfig.getEngineName(), StringUtils.isNotBlank(proxy) ? proxy : user, clusterInfo.getLinkisAddress(), clusterName, taskId, csId, nodeName, StringUtils.isNotBlank(startupParam) ? startupParam : job.getStartupParam(), engineReUse);
} else {
result = abstractJobSubmitter.submitJob(code, linkisConfig.getEngineName(), StringUtils.isNotBlank(proxy) ? proxy : user, clusterInfo.getLinkisAddress(), clusterName, taskId, csId, nodeName, StringUtils.isNotBlank(startupParam) ? startupParam : job.getStartupParam());
}
if (result != null) {
submitResults.add(result);
} else {
Task taskInDb = taskDao.findById(taskId);
taskInDb.setStatus(TaskStatusEnum.TASK_NOT_EXIST.getCode());
taskDao.save(taskInDb);
taskSubmitResults.add(new TaskSubmitResult(application.getId(), null, clusterInfo.getClusterName()));
}
}
// Rewrite task remote ID.
rewriteTaskRemoteInfo(submitResults, taskSubmitResults, application.getId(), clusterInfo.getClusterName());
}
return taskSubmitResults;
}
use of com.webank.wedatasphere.qualitis.exception.RuleVariableNotFoundException in project Qualitis by WeBankFinTech.
the class SqlTemplateConverter method generateSparkSqlByTask.
/**
* Convert task into scala code
* @param rule
* @param date
* @param applicationId
* @param midTableName
* @param createTime
* @param partition
* @param execParams
* @param count
* @param runDate
* @param dataSourceMysqlConnect
* @return
* @throws ConvertException
* @throws RuleVariableNotSupportException
* @throws RuleVariableNotFoundException
*/
private List<String> generateSparkSqlByTask(Rule rule, Date date, String applicationId, String midTableName, String createTime, StringBuffer partition, Map<String, String> execParams, int count, String runDate, Map<Long, Map> dataSourceMysqlConnect) throws ConvertException, RuleVariableNotSupportException, RuleVariableNotFoundException, UnExpectedRequestException {
List<String> sqlList = new ArrayList<>();
// Collect rule metric and build in save sentence sql.
List<RuleMetric> ruleMetrics = rule.getAlarmConfigs().stream().map(AlarmConfig::getRuleMetric).distinct().collect(Collectors.toList());
Map<String, Long> ruleMetricMap = new HashMap<>(ruleMetrics.size());
if (CollectionUtils.isNotEmpty(ruleMetrics)) {
LOGGER.info("Start to get rule metric for task result save. Rule metrics: {}", Arrays.toString(ruleMetrics.toArray()));
for (RuleMetric ruleMetric : ruleMetrics) {
if (ruleMetric != null) {
ruleMetricMap.put(ruleMetric.getName(), ruleMetric.getId());
}
}
LOGGER.info("Finish to get rule metric for task result save.");
}
// Get SQL from template after remove '\n'
String templateMidTableAction = rule.getTemplate().getMidTableAction().replace("\n", " ");
Map<String, String> filters = new HashMap<>(2);
if (CUSTOM_RULE.intValue() == rule.getRuleType()) {
templateMidTableAction = customMidTableActionUpdate(rule, templateMidTableAction, date, execParams, partition, ruleMetricMap);
} else if (MUL_SOURCE_RULE.intValue() == rule.getRuleType()) {
templateMidTableAction = multiMidTableActionUpdate(rule, templateMidTableAction, date, filters);
}
// Get input meta from template
List<RuleVariable> inputMetaRuleVariables = rule.getRuleVariables().stream().filter(ruleVariable -> ruleVariable.getInputActionStep().equals(InputActionStepEnum.TEMPLATE_INPUT_META.getCode())).collect(Collectors.toList());
// If partition is not specified, replace with filter in rule configuration.
if (StringUtils.isBlank(partition.toString())) {
templateMidTableAction = fillPartitionWithRuleConfiguration(partition, rule, templateMidTableAction, inputMetaRuleVariables);
}
// Get dbs and tables
Map<String, String> dbTableMap = new HashMap<>(4);
// Get mappings
StringBuffer mappings = new StringBuffer();
StringBuffer realFilter = new StringBuffer();
// Get SQL From template and replace all replaceholders
String midTableAction = replaceVariable(templateMidTableAction, inputMetaRuleVariables, partition.toString(), realFilter, dbTableMap, mappings, date);
Set<TemplateStatisticsInputMeta> templateStatisticsAction = rule.getTemplate().getStatisticAction();
Map sourceConnect = new HashMap(8);
Map targetConnect = new HashMap(8);
if (dataSourceMysqlConnect != null && dataSourceMysqlConnect.size() > 0) {
for (RuleDataSource ruleDataSource : rule.getRuleDataSources()) {
Map connectParams = dataSourceMysqlConnect.get(ruleDataSource.getId());
if (connectParams == null) {
continue;
}
if (ruleDataSource.getDatasourceIndex() != null && ruleDataSource.getDatasourceIndex().equals(0)) {
// If mysql sec, decrypt password and user name.
sourceConnect = dataSourceMysqlConnect.get(ruleDataSource.getId());
}
if (ruleDataSource.getDatasourceIndex() != null && ruleDataSource.getDatasourceIndex().equals(1)) {
// If mysql sec, decrypt password and user name.
targetConnect = dataSourceMysqlConnect.get(ruleDataSource.getId());
}
}
}
sqlList.add("val UUID = java.util.UUID.randomUUID.toString");
// 跨表规则
if (RuleTemplateTypeEnum.MULTI_SOURCE_TEMPLATE.getCode().equals(rule.getTemplate().getTemplateType()) && dbTableMap.size() > 0) {
// Import sql function.
sqlList.addAll(getImportSql());
// Generate UUID.
// Transform original table.
Set<String> columns = new HashSet<>();
if (rule.getTemplate().getId().longValue() == MUL_SOURCE_ACCURACY_TEMPLATE_ID.longValue()) {
// Get accuracy columns.
columns = rule.getRuleDataSourceMappings().stream().map(RuleDataSourceMapping::getLeftColumnNames).map(column -> column.replace("tmp1.", "").replace("tmp2.", "")).collect(Collectors.toSet());
}
if (rule.getTemplate().getId().longValue() == MUL_SOURCE_COMMON_TEMPLATE_ID.longValue()) {
sqlList.addAll(getCommonTransformSql(dbTableMap, mappings, count, partition.toString(), filters, sourceConnect, targetConnect));
} else {
sqlList.addAll(getSpecialTransformSql(dbTableMap, count, partition.toString(), filters, Strings.join(columns, ','), sourceConnect, targetConnect));
if (optimizationConfig.getLightweightQuery()) {
count += 3;
}
}
sqlList.addAll(getSaveMidTableSentenceSettings());
sqlList.addAll(getSaveMidTableSentence(midTableName, count, runDate));
} else {
// Generate select statement and save into hive database
RuleDataSource ruleDataSource = rule.getRuleDataSources().stream().filter(dataSource -> dataSource.getDatasourceIndex() == null).iterator().next();
Map connParams = dataSourceMysqlConnect.get(ruleDataSource.getId());
if (connParams != null) {
connParams = dataSourceMysqlConnect.get(ruleDataSource.getId());
}
sqlList.addAll(generateSparkSqlAndSaveSentence(midTableAction, midTableName, rule.getTemplate(), count, connParams, runDate));
count++;
}
// Generate statistics statement, and save into mysql
List<RuleVariable> statisticsRuleVariables = rule.getRuleVariables().stream().filter(ruleVariable -> ruleVariable.getInputActionStep().equals(InputActionStepEnum.STATISTICS_ARG.getCode())).collect(Collectors.toList());
sqlList.addAll(saveStatisticAndSaveMySqlSentence(rule.getId(), ruleMetricMap, templateStatisticsAction, applicationId, statisticsRuleVariables, createTime, count, runDate));
return sqlList;
}
Aggregations