Search in sources :

Example 1 with RegistryException

use of org.apache.airavata.registry.cpi.RegistryException in project airavata by apache.

the class GFacEngineImpl method executeTaskListFrom.

private void executeTaskListFrom(ProcessContext processContext, String startingTaskId) throws GFacException {
    // checkpoint
    if (processContext.isInterrupted() && processContext.getProcessState() != ProcessState.MONITORING) {
        GFacUtils.handleProcessInterrupt(processContext);
        return;
    }
    List<TaskModel> taskList = processContext.getTaskList();
    Map<String, TaskModel> taskMap = processContext.getTaskMap();
    boolean fastForward = true;
    for (String taskId : processContext.getTaskExecutionOrder()) {
        if (fastForward) {
            if (taskId.equalsIgnoreCase(startingTaskId)) {
                fastForward = false;
            } else {
                continue;
            }
        }
        TaskModel taskModel = taskMap.get(taskId);
        processContext.setCurrentExecutingTaskModel(taskModel);
        TaskTypes taskType = taskModel.getTaskType();
        TaskContext taskContext = getTaskContext(processContext);
        taskContext.setTaskModel(taskModel);
        ProcessStatus status = null;
        switch(taskType) {
            case ENV_SETUP:
                status = new ProcessStatus(ProcessState.CONFIGURING_WORKSPACE);
                status.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
                processContext.setProcessStatus(status);
                GFacUtils.saveAndPublishProcessStatus(processContext);
                // checkpoint
                if (processContext.isInterrupted()) {
                    GFacUtils.handleProcessInterrupt(processContext);
                    return;
                }
                configureWorkspace(taskContext, processContext.isRecovery());
                // checkpoint
                if (processContext.isInterrupted()) {
                    GFacUtils.handleProcessInterrupt(processContext);
                    return;
                }
                break;
            case DATA_STAGING:
                try {
                    // checkpoint
                    if (processContext.isInterrupted()) {
                        GFacUtils.handleProcessInterrupt(processContext);
                        return;
                    }
                    DataStagingTaskModel subTaskModel = (DataStagingTaskModel) taskContext.getSubTaskModel();
                    DataStageType type = subTaskModel.getType();
                    switch(type) {
                        case INPUT:
                            status = new ProcessStatus(ProcessState.INPUT_DATA_STAGING);
                            status.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
                            processContext.setProcessStatus(status);
                            GFacUtils.saveAndPublishProcessStatus(processContext);
                            taskContext.setProcessInput(subTaskModel.getProcessInput());
                            inputDataStaging(taskContext, processContext.isRecovery());
                            break;
                        case OUPUT:
                            status = new ProcessStatus(ProcessState.OUTPUT_DATA_STAGING);
                            status.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
                            processContext.setProcessStatus(status);
                            GFacUtils.saveAndPublishProcessStatus(processContext);
                            taskContext.setProcessOutput(subTaskModel.getProcessOutput());
                            outputDataStaging(taskContext, processContext.isRecovery(), false);
                            break;
                        case ARCHIVE_OUTPUT:
                            status = new ProcessStatus(ProcessState.OUTPUT_DATA_STAGING);
                            status.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
                            processContext.setProcessStatus(status);
                            GFacUtils.saveAndPublishProcessStatus(processContext);
                            outputDataStaging(taskContext, processContext.isRecovery(), true);
                            break;
                    }
                    // checkpoint
                    if (processContext.isInterrupted()) {
                        GFacUtils.handleProcessInterrupt(processContext);
                        return;
                    }
                } catch (TException e) {
                    throw new GFacException(e);
                }
                break;
            case JOB_SUBMISSION:
                // checkpoint
                if (processContext.isInterrupted()) {
                    GFacUtils.handleProcessInterrupt(processContext);
                    return;
                }
                status = new ProcessStatus(ProcessState.EXECUTING);
                status.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
                processContext.setProcessStatus(status);
                GFacUtils.saveAndPublishProcessStatus(processContext);
                executeJobSubmission(taskContext, processContext.isRecovery());
                // Don't put any checkpoint in between JobSubmission and Monitoring tasks
                JobStatus jobStatus = processContext.getJobModel().getJobStatuses().get(0);
                if (jobStatus != null && (jobStatus.getJobState() == JobState.SUBMITTED || jobStatus.getJobState() == JobState.QUEUED || jobStatus.getJobState() == JobState.ACTIVE)) {
                    List<OutputDataObjectType> processOutputs = processContext.getProcessModel().getProcessOutputs();
                    if (processOutputs != null && !processOutputs.isEmpty()) {
                        for (OutputDataObjectType output : processOutputs) {
                            try {
                                if (output.isOutputStreaming()) {
                                    TaskModel streamingTaskModel = new TaskModel();
                                    streamingTaskModel.setTaskType(TaskTypes.OUTPUT_FETCHING);
                                    streamingTaskModel.setTaskStatuses(Arrays.asList(new TaskStatus(TaskState.CREATED)));
                                    streamingTaskModel.setCreationTime(AiravataUtils.getCurrentTimestamp().getTime());
                                    streamingTaskModel.setParentProcessId(processContext.getProcessId());
                                    TaskContext streamingTaskContext = getTaskContext(processContext);
                                    DataStagingTaskModel submodel = new DataStagingTaskModel();
                                    submodel.setType(DataStageType.OUPUT);
                                    submodel.setProcessOutput(output);
                                    URI source = new URI(processContext.getDataMovementProtocol().name(), processContext.getComputeResourceLoginUserName(), processContext.getComputeResourceDescription().getHostName(), 22, processContext.getWorkingDir() + output.getValue(), null, null);
                                    submodel.setSource(source.getPath());
                                    submodel.setDestination("dummy://temp/file/location");
                                    streamingTaskModel.setSubTaskModel(ThriftUtils.serializeThriftObject(submodel));
                                    String streamTaskId = (String) processContext.getExperimentCatalog().add(ExpCatChildDataType.TASK, streamingTaskModel, processContext.getProcessId());
                                    streamingTaskModel.setTaskId(streamTaskId);
                                    streamingTaskContext.setTaskModel(streamingTaskModel);
                                    executeDataStreaming(streamingTaskContext, processContext.isRecovery());
                                }
                            } catch (URISyntaxException | TException | RegistryException e) {
                                log.error("Error while streaming output " + output.getValue());
                            }
                        }
                    }
                }
                break;
            case MONITORING:
                status = new ProcessStatus(ProcessState.MONITORING);
                status.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
                processContext.setProcessStatus(status);
                GFacUtils.saveAndPublishProcessStatus(processContext);
                executeJobMonitoring(taskContext, processContext.isRecovery());
                break;
            case ENV_CLEANUP:
                // TODO implement environment clean up task logic
                break;
            default:
                throw new GFacException("Unsupported Task type");
        }
        if (processContext.isPauseTaskExecution()) {
            // If any task put processContext to wait, the same task must continue processContext execution.
            return;
        }
    }
    processContext.setComplete(true);
}
Also used : TException(org.apache.thrift.TException) TaskContext(org.apache.airavata.gfac.core.context.TaskContext) ProcessStatus(org.apache.airavata.model.status.ProcessStatus) URISyntaxException(java.net.URISyntaxException) TaskStatus(org.apache.airavata.model.status.TaskStatus) URI(java.net.URI) RegistryException(org.apache.airavata.registry.cpi.RegistryException) DataStageType(org.apache.airavata.model.task.DataStageType) JobStatus(org.apache.airavata.model.status.JobStatus) GFacException(org.apache.airavata.gfac.core.GFacException) OutputDataObjectType(org.apache.airavata.model.application.io.OutputDataObjectType) TaskTypes(org.apache.airavata.model.task.TaskTypes) DataStagingTaskModel(org.apache.airavata.model.task.DataStagingTaskModel) EnvironmentSetupTaskModel(org.apache.airavata.model.task.EnvironmentSetupTaskModel) JobSubmissionTaskModel(org.apache.airavata.model.task.JobSubmissionTaskModel) MonitorTaskModel(org.apache.airavata.model.task.MonitorTaskModel) DataStagingTaskModel(org.apache.airavata.model.task.DataStagingTaskModel) TaskModel(org.apache.airavata.model.task.TaskModel)

Example 2 with RegistryException

use of org.apache.airavata.registry.cpi.RegistryException in project airavata by apache.

the class BESJobSubmissionTask method getSecurityConfig.

private DefaultClientConfiguration getSecurityConfig(ProcessContext pc) throws GFacException {
    DefaultClientConfiguration clientConfig = null;
    try {
        UNICORESecurityContext unicoreSecurityContext = SecurityUtils.getSecurityContext(pc);
        UserConfigurationDataModel userConfigDataModel = (UserConfigurationDataModel) pc.getExperimentCatalog().get(ExperimentCatalogModelType.USER_CONFIGURATION_DATA, pc.getExperimentId());
        // FIXME - remove following setter lines, and use original value comes with user configuration data model.
        userConfigDataModel.setGenerateCert(true);
        // userConfigDataModel.setUserDN("CN=swus3, O=Ultrascan Gateway, C=DE");
        if (userConfigDataModel.isGenerateCert()) {
            clientConfig = unicoreSecurityContext.getDefaultConfiguration(false, userConfigDataModel);
        } else {
            clientConfig = unicoreSecurityContext.getDefaultConfiguration(false);
        }
    } catch (RegistryException e) {
        throw new GFacException("Error! reading user configuration data from registry", e);
    } catch (ApplicationSettingsException e) {
        throw new GFacException("Error! retrieving default client configurations", e);
    }
    return clientConfig;
}
Also used : ApplicationSettingsException(org.apache.airavata.common.exception.ApplicationSettingsException) GFacException(org.apache.airavata.gfac.core.GFacException) UserConfigurationDataModel(org.apache.airavata.model.experiment.UserConfigurationDataModel) DefaultClientConfiguration(eu.unicore.util.httpclient.DefaultClientConfiguration) RegistryException(org.apache.airavata.registry.cpi.RegistryException)

Example 3 with RegistryException

use of org.apache.airavata.registry.cpi.RegistryException in project airavata by apache.

the class SCPDataStageTask method execute.

@Override
public TaskStatus execute(TaskContext taskContext) {
    TaskStatus status = new TaskStatus(TaskState.EXECUTING);
    AuthenticationInfo authenticationInfo = null;
    DataStagingTaskModel subTaskModel = null;
    String localDataDir = null;
    ProcessContext processContext = taskContext.getParentProcessContext();
    ProcessState processState = processContext.getProcessState();
    try {
        subTaskModel = ((DataStagingTaskModel) taskContext.getSubTaskModel());
        if (processState == ProcessState.OUTPUT_DATA_STAGING) {
            OutputDataObjectType processOutput = taskContext.getProcessOutput();
            if (processOutput != null && processOutput.getValue() == null) {
                log.error("expId: {}, processId:{}, taskId: {}:- Couldn't stage file {} , file name shouldn't be null", taskContext.getExperimentId(), taskContext.getProcessId(), taskContext.getTaskId(), processOutput.getName());
                status = new TaskStatus(TaskState.FAILED);
                if (processOutput.isIsRequired()) {
                    status.setReason("File name is null, but this output's isRequired bit is not set");
                } else {
                    status.setReason("File name is null");
                }
                return status;
            }
        } else if (processState == ProcessState.INPUT_DATA_STAGING) {
            InputDataObjectType processInput = taskContext.getProcessInput();
            if (processInput != null && processInput.getValue() == null) {
                log.error("expId: {}, processId:{}, taskId: {}:- Couldn't stage file {} , file name shouldn't be null", taskContext.getExperimentId(), taskContext.getProcessId(), taskContext.getTaskId(), processInput.getName());
                status = new TaskStatus(TaskState.FAILED);
                if (processInput.isIsRequired()) {
                    status.setReason("File name is null, but this input's isRequired bit is not set");
                } else {
                    status.setReason("File name is null");
                }
                return status;
            }
        } else {
            status.setState(TaskState.FAILED);
            status.setReason("Invalid task invocation, Support " + ProcessState.INPUT_DATA_STAGING.name() + " and " + "" + ProcessState.OUTPUT_DATA_STAGING.name() + " process phases. found " + processState.name());
            return status;
        }
        StorageResourceDescription storageResource = processContext.getStorageResource();
        // StoragePreference storagePreference = taskContext.getParentProcessContext().getStoragePreference();
        String hostName = null;
        if (storageResource != null) {
            hostName = storageResource.getHostName();
        } else {
            throw new GFacException("Storage Resource is null");
        }
        String inputPath = processContext.getStorageFileSystemRootLocation();
        inputPath = (inputPath.endsWith(File.separator) ? inputPath : inputPath + File.separator);
        // use rsync instead of scp if source and destination host and user name is same.
        URI sourceURI = new URI(subTaskModel.getSource());
        String fileName = sourceURI.getPath().substring(sourceURI.getPath().lastIndexOf(File.separator) + 1, sourceURI.getPath().length());
        Session remoteSession = Factory.getSSHSession(Factory.getComputerResourceSSHKeyAuthentication(processContext), processContext.getComputeResourceServerInfo());
        Session storageSession = Factory.getSSHSession(Factory.getStorageSSHKeyAuthentication(processContext), processContext.getStorageResourceServerInfo());
        URI destinationURI = null;
        if (subTaskModel.getDestination().startsWith("dummy")) {
            destinationURI = TaskUtils.getDestinationURI(taskContext, hostName, inputPath, fileName);
            subTaskModel.setDestination(destinationURI.toString());
        } else {
            destinationURI = new URI(subTaskModel.getDestination());
        }
        if (sourceURI.getHost().equalsIgnoreCase(destinationURI.getHost()) && sourceURI.getUserInfo().equalsIgnoreCase(destinationURI.getUserInfo())) {
            localDataCopy(taskContext, sourceURI, destinationURI);
            status.setState(TaskState.COMPLETED);
            status.setReason("Locally copied file using 'cp' command ");
            return status;
        }
        status = new TaskStatus(TaskState.COMPLETED);
        // Wildcard for file name. Has to find the correct name.
        if (fileName.contains("*")) {
            String destParentPath = (new File(destinationURI.getPath())).getParentFile().getPath();
            String sourceParentPath = (new File(sourceURI.getPath())).getParentFile().getPath();
            List<String> fileNames = taskContext.getParentProcessContext().getDataMovementRemoteCluster().getFileNameFromExtension(fileName, sourceParentPath, remoteSession);
            ExperimentCatalog experimentCatalog = processContext.getExperimentCatalog();
            String experimentId = processContext.getExperimentId();
            String processId = processContext.getProcessId();
            OutputDataObjectType processOutput = taskContext.getProcessOutput();
            for (int i = 0; i < fileNames.size(); i++) {
                String temp = fileNames.get(i);
                if (temp != null && temp != "") {
                    fileName = temp;
                }
                if (destParentPath.endsWith(File.separator)) {
                    destinationURI = new URI(destParentPath + fileName);
                } else {
                    destinationURI = new URI(destParentPath + File.separator + fileName);
                }
                // Wildcard support is only enabled for output data staging
                if (processState == ProcessState.OUTPUT_DATA_STAGING) {
                    processOutput.setName(fileName);
                    experimentCatalog.add(ExpCatChildDataType.EXPERIMENT_OUTPUT, Arrays.asList(processOutput), experimentId);
                    experimentCatalog.add(ExpCatChildDataType.PROCESS_OUTPUT, Arrays.asList(processOutput), processId);
                    taskContext.setProcessOutput(processOutput);
                    makeDir(taskContext, destinationURI);
                    // TODO - save updated subtask model with new destination
                    outputDataStaging(taskContext, remoteSession, sourceURI, storageSession, destinationURI);
                    status.setReason("Successfully staged output data");
                }
            }
            if (processState == ProcessState.OUTPUT_DATA_STAGING) {
                status.setReason("Successfully staged output data");
            } else {
                status.setReason("Wildcard support is only enabled for output data staging");
            }
        } else {
            if (processState == ProcessState.INPUT_DATA_STAGING) {
                inputDataStaging(taskContext, storageSession, sourceURI, remoteSession, destinationURI);
                status.setReason("Successfully staged input data");
            } else if (processState == ProcessState.OUTPUT_DATA_STAGING) {
                makeDir(taskContext, destinationURI);
                // TODO - save updated subtask model with new destination
                outputDataStaging(taskContext, remoteSession, sourceURI, storageSession, destinationURI);
                status.setReason("Successfully staged output data");
            }
        }
    } catch (TException e) {
        String msg = "Couldn't create subTask model thrift model";
        log.error(msg, e);
        status.setState(TaskState.FAILED);
        status.setReason(msg);
        ErrorModel errorModel = new ErrorModel();
        errorModel.setActualErrorMessage(e.getMessage());
        errorModel.setUserFriendlyMessage(msg);
        taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
        return status;
    } catch (ApplicationSettingsException | FileNotFoundException e) {
        String msg = "Failed while reading credentials";
        log.error(msg, e);
        status.setState(TaskState.FAILED);
        status.setReason(msg);
        ErrorModel errorModel = new ErrorModel();
        errorModel.setActualErrorMessage(e.getMessage());
        errorModel.setUserFriendlyMessage(msg);
        taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
    } catch (URISyntaxException e) {
        String msg = "Source or destination uri is not correct source : " + subTaskModel.getSource() + ", " + "destination : " + subTaskModel.getDestination();
        log.error(msg, e);
        status.setState(TaskState.FAILED);
        status.setReason(msg);
        ErrorModel errorModel = new ErrorModel();
        errorModel.setActualErrorMessage(e.getMessage());
        errorModel.setUserFriendlyMessage(msg);
        taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
    } catch (CredentialStoreException e) {
        String msg = "Storage authentication issue, could be invalid credential token";
        log.error(msg, e);
        status.setState(TaskState.FAILED);
        status.setReason(msg);
        ErrorModel errorModel = new ErrorModel();
        errorModel.setActualErrorMessage(e.getMessage());
        errorModel.setUserFriendlyMessage(msg);
        taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
    } catch (AiravataException e) {
        String msg = "Error while creating ssh session with client";
        log.error(msg, e);
        status.setState(TaskState.FAILED);
        status.setReason(msg);
        ErrorModel errorModel = new ErrorModel();
        errorModel.setActualErrorMessage(e.getMessage());
        errorModel.setUserFriendlyMessage(msg);
        taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
    } catch (JSchException | IOException e) {
        String msg = "Failed to do scp with client";
        log.error(msg, e);
        status.setState(TaskState.FAILED);
        status.setReason(msg);
        ErrorModel errorModel = new ErrorModel();
        errorModel.setActualErrorMessage(e.getMessage());
        errorModel.setUserFriendlyMessage(msg);
        taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
    } catch (RegistryException | GFacException e) {
        String msg = "Data staging failed";
        log.error(msg, e);
        status.setState(TaskState.FAILED);
        status.setReason(msg);
        ErrorModel errorModel = new ErrorModel();
        errorModel.setActualErrorMessage(e.getMessage());
        errorModel.setUserFriendlyMessage(msg);
        taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
    }
    return status;
}
Also used : TException(org.apache.thrift.TException) JSchException(com.jcraft.jsch.JSchException) ApplicationSettingsException(org.apache.airavata.common.exception.ApplicationSettingsException) ExperimentCatalog(org.apache.airavata.registry.cpi.ExperimentCatalog) FileNotFoundException(java.io.FileNotFoundException) URISyntaxException(java.net.URISyntaxException) CredentialStoreException(org.apache.airavata.credential.store.store.CredentialStoreException) URI(java.net.URI) AuthenticationInfo(org.apache.airavata.gfac.core.authentication.AuthenticationInfo) ProcessContext(org.apache.airavata.gfac.core.context.ProcessContext) OutputDataObjectType(org.apache.airavata.model.application.io.OutputDataObjectType) AiravataException(org.apache.airavata.common.exception.AiravataException) InputDataObjectType(org.apache.airavata.model.application.io.InputDataObjectType) IOException(java.io.IOException) TaskStatus(org.apache.airavata.model.status.TaskStatus) RegistryException(org.apache.airavata.registry.cpi.RegistryException) ProcessState(org.apache.airavata.model.status.ProcessState) StorageResourceDescription(org.apache.airavata.model.appcatalog.storageresource.StorageResourceDescription) GFacException(org.apache.airavata.gfac.core.GFacException) DataStagingTaskModel(org.apache.airavata.model.task.DataStagingTaskModel) ErrorModel(org.apache.airavata.model.commons.ErrorModel) File(java.io.File) Session(com.jcraft.jsch.Session)

Example 4 with RegistryException

use of org.apache.airavata.registry.cpi.RegistryException in project airavata by apache.

the class DataTransferrer method publishFinalOutputs.

public void publishFinalOutputs() throws GFacException {
    try {
        if (!resultantOutputsLst.isEmpty()) {
            log.debug("Publishing the list of outputs to the registry instance..");
            ExperimentCatalog experimentCatalog = processContext.getExperimentCatalog();
            experimentCatalog.add(ExpCatChildDataType.EXPERIMENT_OUTPUT, resultantOutputsLst, processContext.getExperimentId());
        }
    } catch (RegistryException e) {
        throw new GFacException("Cannot publish outputs to the registry.");
    }
}
Also used : ExperimentCatalog(org.apache.airavata.registry.cpi.ExperimentCatalog) GFacException(org.apache.airavata.gfac.core.GFacException) RegistryException(org.apache.airavata.registry.cpi.RegistryException)

Example 5 with RegistryException

use of org.apache.airavata.registry.cpi.RegistryException in project airavata by apache.

the class WorkflowInterpreter method handleForEach.

private void handleForEach(Node node) throws WorkflowException {
    final ForEachNode forEachNode = (ForEachNode) node;
    EndForEachNode endForEachNode = null;
    Collection<Node> repeatNodes = node.getOutputPort(0).getToNodes();
    // we will support only one for now
    if (repeatNodes.size() != 1) {
        throw new WorkFlowInterpreterException("Only one node allowed inside foreach");
    }
    Iterator<Node> iterator = repeatNodes.iterator();
    if (iterator.hasNext()) {
        Node middleNode = iterator.next();
        // output
        if ((!(middleNode instanceof WSNode)) && (!(middleNode instanceof SubWorkflowNode))) {
            throw new WorkFlowInterpreterException("Encountered Node inside foreach that is not a WSNode" + middleNode);
        } else if (middleNode instanceof SubWorkflowNode) {
            /* Get the EndforEach Node of the Subworkflow */
            Iterator<Node> subWorkflowOut = middleNode.getOutputPort(0).getToNodes().iterator();
            while (subWorkflowOut.hasNext()) {
                Node node2 = subWorkflowOut.next();
                if (node2 instanceof EndForEachNode) {
                    endForEachNode = (EndForEachNode) node2;
                }
            }
            final LinkedList<String> listOfValues = new LinkedList<String>();
            InterpreterUtil.getInputsForForEachNode(forEachNode, listOfValues, this.invokerMap);
            final Integer[] inputNumbers = InterpreterUtil.getNumberOfInputsForForEachNode(forEachNode, this.invokerMap);
            Workflow workflow1 = ((SubWorkflowNode) middleNode).getWorkflow();
            List<NodeImpl> nodes = workflow1.getGraph().getNodes();
            List<Node> wsNodes = new ArrayList<Node>();
            /* Take the List of WSNodes in the subworkflow */
            for (NodeImpl subWorkflowNode : nodes) {
                if (subWorkflowNode instanceof WSNode) {
                    wsNodes.add(subWorkflowNode);
                }
            }
            for (int i = 0; i < wsNodes.size(); i++) {
                final WSNode node1 = (WSNode) wsNodes.get(i);
                SystemComponentInvoker systemInvoker = null;
                List<DataPort> outputPorts1 = node1.getOutputPorts();
                List<Node> endForEachNodes = new ArrayList<Node>();
                for (DataPort port : outputPorts1) {
                    Iterator<Node> endForEachNodeItr1 = port.getToNodes().iterator();
                    while (endForEachNodeItr1.hasNext()) {
                        Node node2 = endForEachNodeItr1.next();
                        if (node2 instanceof EndForEachNode) {
                            endForEachNodes.add(node2);
                        } else if (node2 instanceof OutputNode) {
                        // intentionally left noop
                        } else {
                            throw new WorkFlowInterpreterException("Found More than one node inside foreach");
                        }
                    }
                }
                final List<Node> finalEndForEachNodes = endForEachNodes;
                Iterator<Node> endForEachNodeItr1 = node1.getOutputPort(0).getToNodes().iterator();
                while (endForEachNodeItr1.hasNext()) {
                    Node node2 = endForEachNodeItr1.next();
                    // Start reading input came for foreach node
                    int parallelRuns = listOfValues.size() * node1.getOutputPorts().size();
                    if (listOfValues.size() > 0) {
                        forEachNode.setState(NodeExecutionState.EXECUTING);
                        node1.setState(NodeExecutionState.EXECUTING);
                        List<DataPort> outputPorts = node1.getOutputPorts();
                        final AtomicInteger counter = new AtomicInteger();
                        for (Node endFor : endForEachNodes) {
                            systemInvoker = new SystemComponentInvoker();
                            this.invokerMap.put(endFor, systemInvoker);
                        }
                        final Map<Node, Invoker> finalMap = this.invokerMap;
                        new Thread() {

                            @Override
                            public void run() {
                                try {
                                    runInThread(listOfValues, forEachNode, node1, finalEndForEachNodes, finalMap, counter, inputNumbers);
                                } catch (WorkflowException e) {
                                    log.error(e.getLocalizedMessage(), e);
                                } catch (RegistryException e) {
                                    log.error(e.getMessage(), e);
                                } catch (TException e) {
                                    log.error(e.getMessage(), e);
                                }
                            }
                        }.start();
                        while (counter.intValue() < parallelRuns) {
                            try {
                                Thread.sleep(100);
                            } catch (InterruptedException e) {
                                Thread.currentThread().interrupt();
                            }
                        }
                    // if (!(node2 instanceof OutputNode)) {
                    // listOfValues.removeAll(listOfValues);
                    // String output = (String) systemInvoker.getOutput(node1.getOutputPort(0).getName());
                    // XmlElement xmlElement = XMLUtil.stringToXmlElement("<result>" + output + "</result>");
                    // Iterator iterator1 = xmlElement.children().iterator();
                    // while (iterator1.hasNext()) {
                    // Object next1 = iterator1.next();
                    // if (next1 instanceof XmlElement) {
                    // listOfValues.add((String) ((XmlElement) next1).children().iterator().next());
                    // }
                    // }
                    // }
                    }
                }
            }
            // we have finished execution so end foreach is finished
            // todo this has to be done in a separate thread
            endForEachNode.setState(NodeExecutionState.FINISHED);
            middleNode.setState(NodeExecutionState.FINISHED);
            node.setState(NodeExecutionState.FINISHED);
        } else {
            // First node after foreach should end with EndForEachNode
            List<DataPort> outputPorts1 = middleNode.getOutputPorts();
            List<Node> endForEachNodes = new ArrayList<Node>();
            for (DataPort port : outputPorts1) {
                Iterator<Node> endForEachNodeItr1 = port.getToNodes().iterator();
                while (endForEachNodeItr1.hasNext()) {
                    Node node2 = endForEachNodeItr1.next();
                    if (node2 instanceof EndForEachNode) {
                        endForEachNodes.add(node2);
                    } else if (node2 instanceof OutputNode) {
                    // intentionally left noop
                    } else {
                        throw new WorkFlowInterpreterException("Found More than one node inside foreach");
                    }
                }
            }
            final List<Node> finalEndForEachNodes = endForEachNodes;
            final Node foreachWSNode = middleNode;
            final LinkedList<String> listOfValues = new LinkedList<String>();
            // Start reading input came for foreach node
            InterpreterUtil.getInputsForForEachNode(forEachNode, listOfValues, this.invokerMap);
            final Integer[] inputNumbers = InterpreterUtil.getNumberOfInputsForForEachNode(forEachNode, this.invokerMap);
            int parallelRuns = createInputValues(listOfValues, inputNumbers).size() * outputPorts1.size();
            if (listOfValues.size() > 0) {
                forEachNode.setState(NodeExecutionState.EXECUTING);
                foreachWSNode.setState(NodeExecutionState.EXECUTING);
                List<DataPort> outputPorts = middleNode.getOutputPorts();
                final AtomicInteger counter = new AtomicInteger();
                for (Node endFor : endForEachNodes) {
                    final SystemComponentInvoker systemInvoker = new SystemComponentInvoker();
                    this.invokerMap.put(endFor, systemInvoker);
                }
                final Map<Node, Invoker> finalInvokerMap = this.invokerMap;
                new Thread() {

                    @Override
                    public void run() {
                        try {
                            runInThread(listOfValues, forEachNode, foreachWSNode, finalEndForEachNodes, finalInvokerMap, counter, inputNumbers);
                        } catch (WorkflowException e) {
                            log.error(e.getLocalizedMessage(), e);
                        } catch (RegistryException e) {
                            // TODO Auto-generated catch block
                            e.printStackTrace();
                        } catch (TException e) {
                            // TODO Auto-generated catch block
                            e.printStackTrace();
                        }
                    }
                }.start();
                while (counter.intValue() < parallelRuns) {
                    try {
                        Thread.sleep(100);
                    } catch (InterruptedException e) {
                        Thread.currentThread().interrupt();
                    }
                }
                // we have finished execution so end foreach is finished
                // todo this has to be done in a separate thread
                middleNode.setState(NodeExecutionState.FINISHED);
                for (Node endForEach : endForEachNodes) {
                    endForEach.setState(NodeExecutionState.FINISHED);
                }
            } else {
                throw new WorkFlowInterpreterException("No array values found for foreach");
            }
        }
    }
}
Also used : TException(org.apache.thrift.TException) DynamicNode(org.apache.airavata.workflow.model.graph.dynamic.DynamicNode) Node(org.apache.airavata.workflow.model.graph.Node) SubWorkflowNode(org.apache.airavata.workflow.model.graph.subworkflow.SubWorkflowNode) WSNode(org.apache.airavata.workflow.model.graph.ws.WSNode) DataPort(org.apache.airavata.workflow.model.graph.DataPort) WSNode(org.apache.airavata.workflow.model.graph.ws.WSNode) SubWorkflowNode(org.apache.airavata.workflow.model.graph.subworkflow.SubWorkflowNode) NodeImpl(org.apache.airavata.workflow.model.graph.impl.NodeImpl) WorkflowException(org.apache.airavata.workflow.model.exceptions.WorkflowException) Workflow(org.apache.airavata.workflow.model.wf.Workflow) RegistryException(org.apache.airavata.registry.cpi.RegistryException) AtomicInteger(java.util.concurrent.atomic.AtomicInteger)

Aggregations

RegistryException (org.apache.airavata.registry.cpi.RegistryException)134 EntityManager (javax.persistence.EntityManager)54 Query (javax.persistence.Query)29 QueryGenerator (org.apache.airavata.registry.core.experiment.catalog.utils.QueryGenerator)29 ArrayList (java.util.ArrayList)15 List (java.util.List)12 ExperimentCatResource (org.apache.airavata.registry.core.experiment.catalog.ExperimentCatResource)12 OutputDataObjectType (org.apache.airavata.model.application.io.OutputDataObjectType)11 ExperimentCatalogException (org.apache.airavata.registry.cpi.ExperimentCatalogException)8 AiravataException (org.apache.airavata.common.exception.AiravataException)6 GFacException (org.apache.airavata.gfac.core.GFacException)6 InputDataObjectType (org.apache.airavata.model.application.io.InputDataObjectType)6 Node (org.apache.airavata.workflow.model.graph.Node)6 DynamicNode (org.apache.airavata.workflow.model.graph.dynamic.DynamicNode)6 SubWorkflowNode (org.apache.airavata.workflow.model.graph.subworkflow.SubWorkflowNode)6 WSNode (org.apache.airavata.workflow.model.graph.ws.WSNode)6 Timestamp (java.sql.Timestamp)5 ApplicationSettingsException (org.apache.airavata.common.exception.ApplicationSettingsException)4 DataPort (org.apache.airavata.workflow.model.graph.DataPort)4 HashMap (java.util.HashMap)3