use of com.webank.wedatasphere.qualitis.entity.ClusterInfo in project Qualitis by WeBankFinTech.
the class RuleDataSourceServiceImpl method checkDataSourceClusterSupport.
/**
* Check if cluster name supported
* return if there is no cluster config
* @param submittedClusterNames
* @throws UnExpectedRequestException
*/
@Override
public void checkDataSourceClusterSupport(Set<String> submittedClusterNames) throws UnExpectedRequestException {
if (submittedClusterNames == null || submittedClusterNames.isEmpty()) {
return;
}
List<ClusterInfo> clusters = clusterInfoDao.findAllClusterInfo(0, Integer.MAX_VALUE);
if (clusters == null || clusters.isEmpty()) {
LOGGER.info("Failed to find cluster info config. End to check the limitation of cluster info.");
return;
}
Set<String> supportClusterNames = new HashSet<>();
for (ClusterInfo info : clusters) {
supportClusterNames.add(info.getClusterName());
}
Set<String> unSupportClusterNameSet = new HashSet<>();
for (String clusterName : submittedClusterNames) {
if (!supportClusterNames.contains(clusterName)) {
unSupportClusterNameSet.add(clusterName);
}
}
if (unSupportClusterNameSet.size() > 0) {
throw new UnExpectedRequestException(String.format("{&NOT_SUPPORT_CLUSTER_NAME}:%s,{&ONLY_SUPPORT_CLUSTER_NAME_ARE}:%s", unSupportClusterNameSet, submittedClusterNames.toString()));
}
}
use of com.webank.wedatasphere.qualitis.entity.ClusterInfo in project Qualitis by WeBankFinTech.
the class ExecutionManagerImpl method submitApplication.
/**
* Submit job to linkis
*/
@Override
public List<TaskSubmitResult> submitApplication(List<Rule> rules, String nodeName, String createTime, String user, String database, StringBuffer partition, Date date, Application application, String cluster, String startupParam, String setFlag, Map<String, String> execParams, StringBuffer runDate, Map<Long, Map> dataSourceMysqlConnect) throws ArgumentException, TaskTypeException, ConvertException, DataQualityTaskException, RuleVariableNotSupportException, RuleVariableNotFoundException, JobSubmitException, ClusterInfoNotConfigException, IOException, UnExpectedRequestException, MetaDataAcquireFailedException {
String csId = rules.iterator().next().getCsId();
// Check if cluster supported
LOGGER.info("Start to collect rule to clusters");
Map<String, List<Rule>> clusterNameMap = getRuleCluster(rules);
LOGGER.info("Succeed to classify rules by cluster, cluster map: {}", clusterNameMap);
if (StringUtils.isNotBlank(cluster)) {
LOGGER.info("When pick up a cluster, these datasources of rules must be from one cluster. Now start to put into the specify cluster.\n");
putAllRulesIntoSpecifyCluster(clusterNameMap, cluster);
LOGGER.info("Success to put into the specify cluster.\n");
}
List<TaskSubmitResult> taskSubmitResults = new ArrayList<>();
for (String clusterName : clusterNameMap.keySet()) {
List<Rule> clusterRules = clusterNameMap.get(clusterName);
if (StringUtils.isNotBlank(cluster)) {
clusterName = cluster;
}
ClusterInfo clusterInfo = clusterInfoDao.findByClusterName(clusterName);
LOGGER.info("Start to check cluster config.");
if (clusterInfo == null) {
throw new ClusterInfoNotConfigException(clusterName + " {&DOES_NOT_EXIST}");
}
LOGGER.info("Succeed to pass the check of cluster config. All cluster of rules are configured");
// Divide rule into tasks
List<DataQualityTask> tasks = TaskDividerFactory.getDivider().divide(clusterRules, application.getId(), createTime, partition.toString(), date, database, user, taskExecuteLimitConfig.getTaskExecuteRuleSize());
LOGGER.info("Succeed to divide application into tasks. result: {}", tasks);
// Save divided tasks
saveDividedTask(tasks, clusterInfo, rules, application, createTime);
// Convert tasks into job
List<DataQualityJob> jobList = new ArrayList<>();
for (DataQualityTask task : tasks) {
DataQualityJob job = templateConverterFactory.getConverter(task).convert(task, date, setFlag, execParams, runDate.toString(), clusterInfo.getClusterType(), dataSourceMysqlConnect);
job.setUser(task.getUser());
jobList.add(job);
List<Long> ruleIdList = task.getRuleTaskDetails().stream().map(r -> r.getRule().getId()).collect(Collectors.toList());
LOGGER.info("Succeed to convert rule_id: {} into code. code: {}", ruleIdList, job.getJobCode());
}
LOGGER.info("Succeed to convert all template into codes. codes: {}", jobList);
// Submit job to linkis
List<JobSubmitResult> submitResults = new ArrayList<>();
for (DataQualityJob job : jobList) {
String code = String.join("\n", job.getJobCode());
String proxy = job.getUser();
Long taskId = job.getTaskId();
// Compatible with new and old submission interfaces.
JobSubmitResult result = null;
boolean engineReUse = false;
if (StringUtils.isNotBlank(startupParam)) {
String[] startupParams = startupParam.split(SpecCharEnum.DIVIDER.getValue());
for (String param : startupParams) {
if (StringUtils.isEmpty(param)) {
continue;
}
String[] paramStrs = param.split("=");
if (paramStrs.length < 2) {
continue;
}
String key = paramStrs[0];
String value = paramStrs[1];
if ("engine_reuse".equals(key)) {
if ("true".equals(value)) {
engineReUse = true;
startupParam = startupParam.replace("engine_reuse=true", "");
} else {
engineReUse = false;
startupParam = startupParam.replace("engine_reuse=false", "");
}
break;
}
}
}
if (clusterInfo.getClusterType().endsWith(LINKIS_ONE_VERSION)) {
result = abstractJobSubmitter.submitJobNew(code, linkisConfig.getEngineName(), StringUtils.isNotBlank(proxy) ? proxy : user, clusterInfo.getLinkisAddress(), clusterName, taskId, csId, nodeName, StringUtils.isNotBlank(startupParam) ? startupParam : job.getStartupParam(), engineReUse);
} else {
result = abstractJobSubmitter.submitJob(code, linkisConfig.getEngineName(), StringUtils.isNotBlank(proxy) ? proxy : user, clusterInfo.getLinkisAddress(), clusterName, taskId, csId, nodeName, StringUtils.isNotBlank(startupParam) ? startupParam : job.getStartupParam());
}
if (result != null) {
submitResults.add(result);
} else {
Task taskInDb = taskDao.findById(taskId);
taskInDb.setStatus(TaskStatusEnum.TASK_NOT_EXIST.getCode());
taskDao.save(taskInDb);
taskSubmitResults.add(new TaskSubmitResult(application.getId(), null, clusterInfo.getClusterName()));
}
}
// Rewrite task remote ID.
rewriteTaskRemoteInfo(submitResults, taskSubmitResults, application.getId(), clusterInfo.getClusterName());
}
return taskSubmitResults;
}
use of com.webank.wedatasphere.qualitis.entity.ClusterInfo in project Qualitis by WeBankFinTech.
the class ExecutionManagerImpl method killApplication.
@Override
public GeneralResponse<?> killApplication(Application applicationInDb, String user) throws JobKillException, UnExpectedRequestException, ClusterInfoNotConfigException {
List<Task> tasks = taskDao.findByApplication(applicationInDb);
List<JobKillResult> results = new ArrayList<>();
if (tasks == null || tasks.isEmpty()) {
throw new UnExpectedRequestException("Sub tasks {&CAN_NOT_BE_NULL_OR_EMPTY}");
}
for (Task task : tasks) {
ClusterInfo clusterInfo = clusterInfoDao.findByClusterName(task.getClusterName());
if (clusterInfo == null) {
throw new ClusterInfoNotConfigException("Failed to find cluster id: " + task.getClusterName() + " configuration");
}
results.add(abstractJobSubmitter.killJob(user, clusterInfo.getClusterName(), task));
task.setStatus(TaskStatusEnum.CANCELLED.getCode());
task.setEndTime(ExecutionManagerImpl.PRINT_TIME_FORMAT.format(new Date()));
taskDao.save(task);
}
return new GeneralResponse<>("200", "{&SUCCESS_TO_KILL_TASK}", results.size());
}
use of com.webank.wedatasphere.qualitis.entity.ClusterInfo in project Qualitis by WeBankFinTech.
the class LinkisConfiguration method saveFullTree.
public Map saveFullTree(String clusterName, String creator, List<Map> fullTreeQueueName, List<Map> fullTree, String userName) throws UnExpectedRequestException {
ClusterInfo clusterInfoInDb = clusterInfoDao.findByClusterName(clusterName);
if (clusterInfoInDb == null) {
throw new UnExpectedRequestException("cluster name {&ALREADY_EXIST}");
}
String url = UriBuilder.fromUri(clusterInfoInDb.getLinkisAddress()).path(linkisConfig.getPrefix()).path(linkisConfig.getSaveFullTree()).toString();
Gson gson = new Gson();
Map<String, Object> map = new HashMap<>(2);
map.put("creator", creator);
map.put("fullTree", fullTree);
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
headers.add("Token-User", userName);
headers.add("Token-Code", clusterInfoInDb.getLinkisToken());
HttpEntity entity = new HttpEntity<>(gson.toJson(map), headers);
Map response = null;
Map responseQueueName = null;
LOGGER.info("Start to save configuration to linkis. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.POST, entity);
try {
response = restTemplate.exchange(url, HttpMethod.POST, entity, Map.class).getBody();
LOGGER.info("Finish to save configuration to linkis. response: {}", response);
Integer code = (Integer) response.get("status");
if (code != 0) {
throw new UnExpectedRequestException("Failed to get configuration from linkis.");
}
} catch (Exception e) {
LOGGER.error(e.getMessage(), e);
LOGGER.info("Failed to get configuration from linkis.");
throw new UnExpectedRequestException("Failed to get configuration from linkis.");
}
return (Map) response.get("data");
}
use of com.webank.wedatasphere.qualitis.entity.ClusterInfo in project Qualitis by WeBankFinTech.
the class LinkisConfiguration method getFullTree.
public Map getFullTree(String clusterName, String user) throws UnExpectedRequestException {
ClusterInfo clusterInfoInDb = clusterInfoDao.findByClusterName(clusterName);
if (clusterInfoInDb == null) {
throw new UnExpectedRequestException("cluster name {&DOES_NOT_EXIST}");
}
String url = UriBuilder.fromUri(clusterInfoInDb.getLinkisAddress()).path(linkisConfig.getPrefix()).path(linkisConfig.getGetFullTree()).queryParam("creator", linkisConfig.getAppName()).queryParam("engineType", "spark").queryParam("version", "2.4.3").toString();
String urlQueueName = UriBuilder.fromUri(clusterInfoInDb.getLinkisAddress()).path(linkisConfig.getPrefix()).path(linkisConfig.getGetFullTree()).queryParam("creator", linkisConfig.getAppName()).toString();
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
headers.add("Token-User", user);
headers.add("Token-Code", clusterInfoInDb.getLinkisToken());
HttpEntity entity = new HttpEntity<>(headers);
HttpEntity entityQueueName = new HttpEntity<>(headers);
Map response = null;
Map responseQueueName = null;
LOGGER.info("Start to get configuration from linkis. url: {}, method: {}, body: {}", url, javax.ws.rs.HttpMethod.GET, entity);
try {
response = restTemplate.exchange(url, HttpMethod.GET, entity, Map.class).getBody();
responseQueueName = restTemplate.exchange(urlQueueName, HttpMethod.GET, entityQueueName, Map.class).getBody();
LOGGER.info("Finish to get configuration from linkis. response: {}", response);
Integer code = (Integer) response.get("status");
if (code != 0) {
throw new UnExpectedRequestException("Failed to get configuration from linkis.");
}
Integer codeQueueName = (Integer) responseQueueName.get("status");
if (codeQueueName != 0) {
throw new UnExpectedRequestException("Failed to get configuration from linkis.");
}
} catch (Exception e) {
LOGGER.error(e.getMessage(), e);
LOGGER.info("Failed to get configuration from linkis.");
}
Map<String, Map> responseMap = new HashMap<>(2);
responseMap.put("fule_tree", (Map) response.get("data"));
responseMap.put("full_tree_queue_name", (Map) responseQueueName.get("data"));
return responseMap;
}
Aggregations