use of com.dtstack.taier.scheduler.server.pipeline.IPipeline in project Taier by DTStack.
the class ScheduleActionService method dealActionParam.
private void dealActionParam(Map<String, Object> actionParam, ScheduleTaskShade batchTask, ScheduleJob scheduleJob) throws Exception {
IPipeline pipeline = null;
String pipelineConfig = null;
if (actionParam.containsKey(PipelineBuilder.pipelineKey)) {
pipelineConfig = (String) actionParam.get(PipelineBuilder.pipelineKey);
pipeline = PipelineBuilder.buildPipeline(pipelineConfig);
} else if (EScheduleJobType.SPARK_SQL.getType().equals(batchTask.getTaskType())) {
pipeline = PipelineBuilder.buildDefaultSqlPipeline();
} else if (EScheduleJobType.SYNC.getType().equals(batchTask.getTaskType())) {
pipeline = syncOperatorPipeline;
}
if (pipeline == null) {
throw new RdosDefineException(ErrorCode.CONFIG_ERROR);
}
List<ScheduleTaskParamShade> taskParamsToReplace = JSONObject.parseArray((String) actionParam.get("taskParamsToReplace"), ScheduleTaskParamShade.class);
Map<String, Object> pipelineInitMap = PipelineBuilder.getPipelineInitMap(pipelineConfig, scheduleJob, batchTask, taskParamsToReplace, (uploadPipelineMap) -> {
// fill 文件上传的信息
JSONObject pluginInfo = clusterService.pluginInfoJSON(batchTask.getTenantId(), batchTask.getTaskType(), null, null);
String hdfsTypeName = componentService.buildHdfsTypeName(batchTask.getTenantId(), null);
pluginInfo.put(ConfigConstant.TYPE_NAME_KEY, hdfsTypeName);
uploadPipelineMap.put(UploadParamPipeline.pluginInfoKey, pluginInfo);
uploadPipelineMap.put(UploadParamPipeline.fileUploadPathKey, environmentContext.getHdfsTaskPath());
});
pipeline.execute(actionParam, pipelineInitMap);
}
Aggregations