Search in sources :

Example 1 with TaskNode

use of org.apache.dolphinscheduler.common.model.TaskNode in project dolphinscheduler by apache.

the class ProcessDefinitionService method graphHasCycle.

/**
 * whether the graph has a ring
 *
 * @param taskNodeResponseList task node response list
 * @return if graph has cycle flag
 */
private boolean graphHasCycle(List<TaskNode> taskNodeResponseList) {
    DAG<String, TaskNode, String> graph = new DAG<>();
    // Fill the vertices
    for (TaskNode taskNodeResponse : taskNodeResponseList) {
        graph.addNode(taskNodeResponse.getName(), taskNodeResponse);
    }
    // Fill edge relations
    for (TaskNode taskNodeResponse : taskNodeResponseList) {
        taskNodeResponse.getPreTasks();
        List<String> preTasks = JSONUtils.toList(taskNodeResponse.getPreTasks(), String.class);
        if (CollectionUtils.isNotEmpty(preTasks)) {
            for (String preTask : preTasks) {
                if (!graph.addEdge(preTask, taskNodeResponse.getName())) {
                    return true;
                }
            }
        }
    }
    return graph.hasCycle();
}
Also used : TaskNode(org.apache.dolphinscheduler.common.model.TaskNode) DAG(org.apache.dolphinscheduler.common.graph.DAG)

Example 2 with TaskNode

use of org.apache.dolphinscheduler.common.model.TaskNode in project dolphinscheduler by apache.

the class ProcessDefinitionService method viewTree.

/**
 * Encapsulates the TreeView structure
 *
 * @param processId process definition id
 * @param limit limit
 * @return tree view json data
 * @throws Exception exception
 */
public Map<String, Object> viewTree(Integer processId, Integer limit) throws Exception {
    Map<String, Object> result = new HashMap<>();
    ProcessDefinition processDefinition = processDefineMapper.selectById(processId);
    if (null == processDefinition) {
        logger.info("process define not exists");
        putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinition);
        return result;
    }
    DAG<String, TaskNode, TaskNodeRelation> dag = genDagGraph(processDefinition);
    /**
     * nodes that is running
     */
    Map<String, List<TreeViewDto>> runningNodeMap = new ConcurrentHashMap<>();
    /**
     * nodes that is waiting torun
     */
    Map<String, List<TreeViewDto>> waitingRunningNodeMap = new ConcurrentHashMap<>();
    /**
     * List of process instances
     */
    List<ProcessInstance> processInstanceList = processInstanceMapper.queryByProcessDefineId(processId, limit);
    for (ProcessInstance processInstance : processInstanceList) {
        processInstance.setDuration(DateUtils.differSec(processInstance.getStartTime(), processInstance.getEndTime()));
    }
    if (limit > processInstanceList.size()) {
        limit = processInstanceList.size();
    }
    TreeViewDto parentTreeViewDto = new TreeViewDto();
    parentTreeViewDto.setName("DAG");
    parentTreeViewDto.setType("");
    for (int i = limit - 1; i >= 0; i--) {
        ProcessInstance processInstance = processInstanceList.get(i);
        Date endTime = processInstance.getEndTime() == null ? new Date() : processInstance.getEndTime();
        parentTreeViewDto.getInstances().add(new Instance(processInstance.getId(), processInstance.getName(), "", processInstance.getState().toString(), processInstance.getStartTime(), endTime, processInstance.getHost(), DateUtils.format2Readable(endTime.getTime() - processInstance.getStartTime().getTime())));
    }
    List<TreeViewDto> parentTreeViewDtoList = new ArrayList<>();
    parentTreeViewDtoList.add(parentTreeViewDto);
    // Here is the encapsulation task instance
    for (String startNode : dag.getBeginNode()) {
        runningNodeMap.put(startNode, parentTreeViewDtoList);
    }
    while (Stopper.isRunning()) {
        Set<String> postNodeList = null;
        Iterator<Map.Entry<String, List<TreeViewDto>>> iter = runningNodeMap.entrySet().iterator();
        while (iter.hasNext()) {
            Map.Entry<String, List<TreeViewDto>> en = iter.next();
            String nodeName = en.getKey();
            parentTreeViewDtoList = en.getValue();
            TreeViewDto treeViewDto = new TreeViewDto();
            treeViewDto.setName(nodeName);
            TaskNode taskNode = dag.getNode(nodeName);
            treeViewDto.setType(taskNode.getType());
            // set treeViewDto instances
            for (int i = limit - 1; i >= 0; i--) {
                ProcessInstance processInstance = processInstanceList.get(i);
                TaskInstance taskInstance = taskInstanceMapper.queryByInstanceIdAndName(processInstance.getId(), nodeName);
                if (taskInstance == null) {
                    treeViewDto.getInstances().add(new Instance(-1, "not running", null));
                } else {
                    Date startTime = taskInstance.getStartTime() == null ? new Date() : taskInstance.getStartTime();
                    Date endTime = taskInstance.getEndTime() == null ? new Date() : taskInstance.getEndTime();
                    int subProcessId = 0;
                    /**
                     * if process is sub process, the return sub id, or sub id=0
                     */
                    if (taskInstance.getTaskType().equals(TaskType.SUB_PROCESS.name())) {
                        String taskJson = taskInstance.getTaskJson();
                        taskNode = JSON.parseObject(taskJson, TaskNode.class);
                        subProcessId = Integer.parseInt(JSON.parseObject(taskNode.getParams()).getString(CMDPARAM_SUB_PROCESS_DEFINE_ID));
                    }
                    treeViewDto.getInstances().add(new Instance(taskInstance.getId(), taskInstance.getName(), taskInstance.getTaskType(), taskInstance.getState().toString(), taskInstance.getStartTime(), taskInstance.getEndTime(), taskInstance.getHost(), DateUtils.format2Readable(endTime.getTime() - startTime.getTime()), subProcessId));
                }
            }
            for (TreeViewDto pTreeViewDto : parentTreeViewDtoList) {
                pTreeViewDto.getChildren().add(treeViewDto);
            }
            postNodeList = dag.getSubsequentNodes(nodeName);
            if (CollectionUtils.isNotEmpty(postNodeList)) {
                for (String nextNodeName : postNodeList) {
                    List<TreeViewDto> treeViewDtoList = waitingRunningNodeMap.get(nextNodeName);
                    if (CollectionUtils.isNotEmpty(treeViewDtoList)) {
                        treeViewDtoList.add(treeViewDto);
                        waitingRunningNodeMap.put(nextNodeName, treeViewDtoList);
                    } else {
                        treeViewDtoList = new ArrayList<>();
                        treeViewDtoList.add(treeViewDto);
                        waitingRunningNodeMap.put(nextNodeName, treeViewDtoList);
                    }
                }
            }
            runningNodeMap.remove(nodeName);
        }
        if (waitingRunningNodeMap == null || waitingRunningNodeMap.size() == 0) {
            break;
        } else {
            runningNodeMap.putAll(waitingRunningNodeMap);
            waitingRunningNodeMap.clear();
        }
    }
    result.put(Constants.DATA_LIST, parentTreeViewDto);
    result.put(Constants.STATUS, Status.SUCCESS);
    result.put(Constants.MSG, Status.SUCCESS.getMsg());
    return result;
}
Also used : ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ProcessInstance(org.apache.dolphinscheduler.dao.entity.ProcessInstance) Instance(org.apache.dolphinscheduler.api.dto.treeview.Instance) TaskInstance(org.apache.dolphinscheduler.dao.entity.TaskInstance) ArrayList(java.util.ArrayList) ProcessDefinition(org.apache.dolphinscheduler.dao.entity.ProcessDefinition) TaskNodeRelation(org.apache.dolphinscheduler.common.model.TaskNodeRelation) List(java.util.List) ArrayList(java.util.ArrayList) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) TaskInstance(org.apache.dolphinscheduler.dao.entity.TaskInstance) TaskNode(org.apache.dolphinscheduler.common.model.TaskNode) Date(java.util.Date) JSONObject(com.alibaba.fastjson.JSONObject) ProcessInstance(org.apache.dolphinscheduler.dao.entity.ProcessInstance) TreeViewDto(org.apache.dolphinscheduler.api.dto.treeview.TreeViewDto) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap)

Example 3 with TaskNode

use of org.apache.dolphinscheduler.common.model.TaskNode in project dolphinscheduler by apache.

the class ProcessDefinitionService method getTaskNodeListByDefinitionIdList.

/**
 * get task node details based on process definition
 *
 * @param defineIdList define id list
 * @return task node list
 * @throws Exception exception
 */
public Map<String, Object> getTaskNodeListByDefinitionIdList(String defineIdList) throws Exception {
    Map<String, Object> result = new HashMap<>();
    Map<Integer, List<TaskNode>> taskNodeMap = new HashMap<>();
    String[] idList = defineIdList.split(",");
    List<Integer> idIntList = new ArrayList<>();
    for (String definitionId : idList) {
        idIntList.add(Integer.parseInt(definitionId));
    }
    Integer[] idArray = idIntList.toArray(new Integer[idIntList.size()]);
    List<ProcessDefinition> processDefinitionList = processDefineMapper.queryDefinitionListByIdList(idArray);
    if (CollectionUtils.isEmpty(processDefinitionList)) {
        logger.info("process definition not exists");
        putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, defineIdList);
        return result;
    }
    for (ProcessDefinition processDefinition : processDefinitionList) {
        String processDefinitionJson = processDefinition.getProcessDefinitionJson();
        ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
        List<TaskNode> taskNodeList = (processData.getTasks() == null) ? new ArrayList<>() : processData.getTasks();
        taskNodeMap.put(processDefinition.getId(), taskNodeList);
    }
    result.put(Constants.DATA_LIST, taskNodeMap);
    putMsg(result, Status.SUCCESS);
    return result;
}
Also used : TaskNode(org.apache.dolphinscheduler.common.model.TaskNode) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) ProcessDefinition(org.apache.dolphinscheduler.dao.entity.ProcessDefinition) ProcessData(org.apache.dolphinscheduler.dao.entity.ProcessData) JSONObject(com.alibaba.fastjson.JSONObject) List(java.util.List) ArrayList(java.util.ArrayList)

Example 4 with TaskNode

use of org.apache.dolphinscheduler.common.model.TaskNode in project dolphinscheduler by apache.

the class ProcessDefinitionService method genDagGraph.

/**
 * Generate the DAG Graph based on the process definition id
 *
 * @param processDefinition process definition
 * @return dag graph
 * @throws Exception if exception happens
 */
private DAG<String, TaskNode, TaskNodeRelation> genDagGraph(ProcessDefinition processDefinition) throws Exception {
    String processDefinitionJson = processDefinition.getProcessDefinitionJson();
    ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
    // check process data
    if (null != processData) {
        List<TaskNode> taskNodeList = processData.getTasks();
        processDefinition.setGlobalParamList(processData.getGlobalParams());
        ProcessDag processDag = DagHelper.getProcessDag(taskNodeList);
        // Generate concrete Dag to be executed
        return DagHelper.buildDagGraph(processDag);
    }
    return new DAG<>();
}
Also used : ProcessDag(org.apache.dolphinscheduler.common.process.ProcessDag) TaskNode(org.apache.dolphinscheduler.common.model.TaskNode) DAG(org.apache.dolphinscheduler.common.graph.DAG) ProcessData(org.apache.dolphinscheduler.dao.entity.ProcessData)

Example 5 with TaskNode

use of org.apache.dolphinscheduler.common.model.TaskNode in project dolphinscheduler by apache.

the class DagHelper method generateRelationListByFlowNodes.

/**
 * generate flow node relation list by task node list;
 * Edges that are not in the task Node List will not be added to the result
 * @param taskNodeList taskNodeList
 * @return task node relation list
 */
public static List<TaskNodeRelation> generateRelationListByFlowNodes(List<TaskNode> taskNodeList) {
    List<TaskNodeRelation> nodeRelationList = new ArrayList<>();
    for (TaskNode taskNode : taskNodeList) {
        String preTasks = taskNode.getPreTasks();
        List<String> preTaskList = JSONUtils.toList(preTasks, String.class);
        if (preTaskList != null) {
            for (String depNodeName : preTaskList) {
                if (null != findNodeByName(taskNodeList, depNodeName)) {
                    nodeRelationList.add(new TaskNodeRelation(depNodeName, taskNode.getName()));
                }
            }
        }
    }
    return nodeRelationList;
}
Also used : TaskNode(org.apache.dolphinscheduler.common.model.TaskNode) TaskNodeRelation(org.apache.dolphinscheduler.common.model.TaskNodeRelation)

Aggregations

TaskNode (org.apache.dolphinscheduler.common.model.TaskNode)42 TaskNodeRelation (org.apache.dolphinscheduler.common.model.TaskNodeRelation)14 HashMap (java.util.HashMap)10 TaskInstance (org.apache.dolphinscheduler.dao.entity.TaskInstance)10 ProcessData (org.apache.dolphinscheduler.dao.entity.ProcessData)9 Test (org.junit.Test)9 JSONObject (com.alibaba.fastjson.JSONObject)8 ProcessDag (org.apache.dolphinscheduler.common.process.ProcessDag)8 ArrayList (java.util.ArrayList)7 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)7 Date (java.util.Date)6 ProcessDefinition (org.apache.dolphinscheduler.dao.entity.ProcessDefinition)5 Logger (org.slf4j.Logger)5 IOException (java.io.IOException)4 List (java.util.List)4 DAG (org.apache.dolphinscheduler.common.graph.DAG)4 AbstractParameters (org.apache.dolphinscheduler.common.task.AbstractParameters)4 ProcessInstance (org.apache.dolphinscheduler.dao.entity.ProcessInstance)4 JsonProcessingException (com.fasterxml.jackson.core.JsonProcessingException)3 HashSet (java.util.HashSet)3