Search in sources :

Example 1 with OutputDataObjectType

use of org.apache.airavata.model.application.io.OutputDataObjectType in project airavata by apache.

the class GFacEngineImpl method executeTaskListFrom.

private void executeTaskListFrom(ProcessContext processContext, String startingTaskId) throws GFacException {
    // checkpoint
    if (processContext.isInterrupted() && processContext.getProcessState() != ProcessState.MONITORING) {
        GFacUtils.handleProcessInterrupt(processContext);
        return;
    }
    List<TaskModel> taskList = processContext.getTaskList();
    Map<String, TaskModel> taskMap = processContext.getTaskMap();
    boolean fastForward = true;
    for (String taskId : processContext.getTaskExecutionOrder()) {
        if (fastForward) {
            if (taskId.equalsIgnoreCase(startingTaskId)) {
                fastForward = false;
            } else {
                continue;
            }
        }
        TaskModel taskModel = taskMap.get(taskId);
        processContext.setCurrentExecutingTaskModel(taskModel);
        TaskTypes taskType = taskModel.getTaskType();
        TaskContext taskContext = getTaskContext(processContext);
        taskContext.setTaskModel(taskModel);
        ProcessStatus status = null;
        switch(taskType) {
            case ENV_SETUP:
                status = new ProcessStatus(ProcessState.CONFIGURING_WORKSPACE);
                status.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
                processContext.setProcessStatus(status);
                GFacUtils.saveAndPublishProcessStatus(processContext);
                // checkpoint
                if (processContext.isInterrupted()) {
                    GFacUtils.handleProcessInterrupt(processContext);
                    return;
                }
                configureWorkspace(taskContext, processContext.isRecovery());
                // checkpoint
                if (processContext.isInterrupted()) {
                    GFacUtils.handleProcessInterrupt(processContext);
                    return;
                }
                break;
            case DATA_STAGING:
                try {
                    // checkpoint
                    if (processContext.isInterrupted()) {
                        GFacUtils.handleProcessInterrupt(processContext);
                        return;
                    }
                    DataStagingTaskModel subTaskModel = (DataStagingTaskModel) taskContext.getSubTaskModel();
                    DataStageType type = subTaskModel.getType();
                    switch(type) {
                        case INPUT:
                            status = new ProcessStatus(ProcessState.INPUT_DATA_STAGING);
                            status.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
                            processContext.setProcessStatus(status);
                            GFacUtils.saveAndPublishProcessStatus(processContext);
                            taskContext.setProcessInput(subTaskModel.getProcessInput());
                            inputDataStaging(taskContext, processContext.isRecovery());
                            break;
                        case OUPUT:
                            status = new ProcessStatus(ProcessState.OUTPUT_DATA_STAGING);
                            status.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
                            processContext.setProcessStatus(status);
                            GFacUtils.saveAndPublishProcessStatus(processContext);
                            taskContext.setProcessOutput(subTaskModel.getProcessOutput());
                            outputDataStaging(taskContext, processContext.isRecovery(), false);
                            break;
                        case ARCHIVE_OUTPUT:
                            status = new ProcessStatus(ProcessState.OUTPUT_DATA_STAGING);
                            status.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
                            processContext.setProcessStatus(status);
                            GFacUtils.saveAndPublishProcessStatus(processContext);
                            outputDataStaging(taskContext, processContext.isRecovery(), true);
                            break;
                    }
                    // checkpoint
                    if (processContext.isInterrupted()) {
                        GFacUtils.handleProcessInterrupt(processContext);
                        return;
                    }
                } catch (TException e) {
                    throw new GFacException(e);
                }
                break;
            case JOB_SUBMISSION:
                // checkpoint
                if (processContext.isInterrupted()) {
                    GFacUtils.handleProcessInterrupt(processContext);
                    return;
                }
                status = new ProcessStatus(ProcessState.EXECUTING);
                status.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
                processContext.setProcessStatus(status);
                GFacUtils.saveAndPublishProcessStatus(processContext);
                executeJobSubmission(taskContext, processContext.isRecovery());
                // Don't put any checkpoint in between JobSubmission and Monitoring tasks
                JobStatus jobStatus = processContext.getJobModel().getJobStatuses().get(0);
                if (jobStatus != null && (jobStatus.getJobState() == JobState.SUBMITTED || jobStatus.getJobState() == JobState.QUEUED || jobStatus.getJobState() == JobState.ACTIVE)) {
                    List<OutputDataObjectType> processOutputs = processContext.getProcessModel().getProcessOutputs();
                    if (processOutputs != null && !processOutputs.isEmpty()) {
                        for (OutputDataObjectType output : processOutputs) {
                            try {
                                if (output.isOutputStreaming()) {
                                    TaskModel streamingTaskModel = new TaskModel();
                                    streamingTaskModel.setTaskType(TaskTypes.OUTPUT_FETCHING);
                                    streamingTaskModel.setTaskStatuses(Arrays.asList(new TaskStatus(TaskState.CREATED)));
                                    streamingTaskModel.setCreationTime(AiravataUtils.getCurrentTimestamp().getTime());
                                    streamingTaskModel.setParentProcessId(processContext.getProcessId());
                                    TaskContext streamingTaskContext = getTaskContext(processContext);
                                    DataStagingTaskModel submodel = new DataStagingTaskModel();
                                    submodel.setType(DataStageType.OUPUT);
                                    submodel.setProcessOutput(output);
                                    URI source = new URI(processContext.getDataMovementProtocol().name(), processContext.getComputeResourceLoginUserName(), processContext.getComputeResourceDescription().getHostName(), 22, processContext.getWorkingDir() + output.getValue(), null, null);
                                    submodel.setSource(source.getPath());
                                    submodel.setDestination("dummy://temp/file/location");
                                    streamingTaskModel.setSubTaskModel(ThriftUtils.serializeThriftObject(submodel));
                                    String streamTaskId = (String) processContext.getExperimentCatalog().add(ExpCatChildDataType.TASK, streamingTaskModel, processContext.getProcessId());
                                    streamingTaskModel.setTaskId(streamTaskId);
                                    streamingTaskContext.setTaskModel(streamingTaskModel);
                                    executeDataStreaming(streamingTaskContext, processContext.isRecovery());
                                }
                            } catch (URISyntaxException | TException | RegistryException e) {
                                log.error("Error while streaming output " + output.getValue());
                            }
                        }
                    }
                }
                break;
            case MONITORING:
                status = new ProcessStatus(ProcessState.MONITORING);
                status.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
                processContext.setProcessStatus(status);
                GFacUtils.saveAndPublishProcessStatus(processContext);
                executeJobMonitoring(taskContext, processContext.isRecovery());
                break;
            case ENV_CLEANUP:
                // TODO implement environment clean up task logic
                break;
            default:
                throw new GFacException("Unsupported Task type");
        }
        if (processContext.isPauseTaskExecution()) {
            // If any task put processContext to wait, the same task must continue processContext execution.
            return;
        }
    }
    processContext.setComplete(true);
}
Also used : TException(org.apache.thrift.TException) TaskContext(org.apache.airavata.gfac.core.context.TaskContext) ProcessStatus(org.apache.airavata.model.status.ProcessStatus) URISyntaxException(java.net.URISyntaxException) TaskStatus(org.apache.airavata.model.status.TaskStatus) URI(java.net.URI) RegistryException(org.apache.airavata.registry.cpi.RegistryException) DataStageType(org.apache.airavata.model.task.DataStageType) JobStatus(org.apache.airavata.model.status.JobStatus) GFacException(org.apache.airavata.gfac.core.GFacException) OutputDataObjectType(org.apache.airavata.model.application.io.OutputDataObjectType) TaskTypes(org.apache.airavata.model.task.TaskTypes) DataStagingTaskModel(org.apache.airavata.model.task.DataStagingTaskModel) EnvironmentSetupTaskModel(org.apache.airavata.model.task.EnvironmentSetupTaskModel) JobSubmissionTaskModel(org.apache.airavata.model.task.JobSubmissionTaskModel) MonitorTaskModel(org.apache.airavata.model.task.MonitorTaskModel) DataStagingTaskModel(org.apache.airavata.model.task.DataStagingTaskModel) TaskModel(org.apache.airavata.model.task.TaskModel)

Example 2 with OutputDataObjectType

use of org.apache.airavata.model.application.io.OutputDataObjectType in project airavata by apache.

the class BESJobSubmissionTask method execute.

@Override
public TaskStatus execute(TaskContext taskContext) {
    TaskStatus taskStatus = new TaskStatus(TaskState.CREATED);
    StorageClient sc = null;
    ProcessContext processContext = taskContext.getParentProcessContext();
    // FIXME - use original output dir
    setInputOutputLocations(processContext);
    try {
        // con't reuse if UserDN has been changed.
        secProperties = getSecurityConfig(processContext);
    // try secProperties = secProperties.clone() if we can't use already initialized ClientConfigurations.
    } catch (GFacException e) {
        String msg = "Unicorn security context initialization error";
        log.error(msg, e);
        taskStatus.setState(TaskState.FAILED);
        taskStatus.setReason(msg);
        return taskStatus;
    }
    try {
        JobSubmissionProtocol protocol = processContext.getJobSubmissionProtocol();
        JobSubmissionInterface jobSubmissionInterface = GFacUtils.getPreferredJobSubmissionInterface(processContext);
        String factoryUrl = null;
        if (protocol.equals(JobSubmissionProtocol.UNICORE)) {
            UnicoreJobSubmission unicoreJobSubmission = GFacUtils.getUnicoreJobSubmission(jobSubmissionInterface.getJobSubmissionInterfaceId());
            factoryUrl = unicoreJobSubmission.getUnicoreEndPointURL();
        }
        EndpointReferenceType eprt = EndpointReferenceType.Factory.newInstance();
        eprt.addNewAddress().setStringValue(factoryUrl);
        String userDN = processContext.getProcessModel().getUserDn();
        CreateActivityDocument cad = CreateActivityDocument.Factory.newInstance();
        // create storage
        StorageCreator storageCreator = new StorageCreator(secProperties, factoryUrl, 5, null);
        sc = storageCreator.createStorage();
        JobDefinitionType jobDefinition = JSDLGenerator.buildJSDLInstance(processContext, sc.getUrl()).getJobDefinition();
        cad.addNewCreateActivity().addNewActivityDocument().setJobDefinition(jobDefinition);
        log.info("Submitted JSDL: " + jobDefinition.getJobDescription());
        // copy files to local
        copyInputFilesToLocal(taskContext);
        // upload files if any
        DataTransferrer dt = new DataTransferrer(processContext, sc);
        dt.uploadLocalFiles();
        JobModel jobDetails = new JobModel();
        jobDetails.setTaskId(taskContext.getTaskId());
        jobDetails.setProcessId(taskContext.getProcessId());
        FactoryClient factory = new FactoryClient(eprt, secProperties);
        log.info("Activity Submitting to {} ... \n", factoryUrl);
        CreateActivityResponseDocument response = factory.createActivity(cad);
        log.info("Activity Submitted to {} ... \n", factoryUrl);
        EndpointReferenceType activityEpr = response.getCreateActivityResponse().getActivityIdentifier();
        log.info("Activity : " + activityEpr.getAddress().getStringValue() + " Submitted.");
        // factory.waitWhileActivityIsDone(activityEpr, 1000);
        jobId = WSUtilities.extractResourceID(activityEpr);
        if (jobId == null) {
            jobId = new Long(Calendar.getInstance().getTimeInMillis()).toString();
        }
        log.info("JobID: " + jobId);
        jobDetails.setJobId(jobId);
        jobDetails.setJobDescription(activityEpr.toString());
        jobDetails.setJobStatuses(Arrays.asList(new JobStatus(JobState.SUBMITTED)));
        processContext.setJobModel(jobDetails);
        GFacUtils.saveJobModel(processContext, jobDetails);
        GFacUtils.saveJobStatus(processContext, jobDetails);
        log.info(formatStatusMessage(activityEpr.getAddress().getStringValue(), factory.getActivityStatus(activityEpr).toString()));
        waitUntilDone(eprt, activityEpr, processContext, secProperties);
        ActivityStatusType activityStatus = null;
        activityStatus = getStatus(factory, activityEpr);
        log.info(formatStatusMessage(activityEpr.getAddress().getStringValue(), activityStatus.getState().toString()));
        ActivityClient activityClient;
        activityClient = new ActivityClient(activityEpr, secProperties);
        // now use the activity working directory property
        dt.setStorageClient(activityClient.getUspaceClient());
        List<OutputDataObjectType> copyOutput = null;
        if ((activityStatus.getState() == ActivityStateEnumeration.FAILED)) {
            String error = activityStatus.getFault().getFaultcode().getLocalPart() + "\n" + activityStatus.getFault().getFaultstring() + "\n EXITCODE: " + activityStatus.getExitCode();
            log.error(error);
            JobState applicationJobStatus = JobState.FAILED;
            jobDetails.setJobStatuses(Arrays.asList(new JobStatus(applicationJobStatus)));
            sendNotification(processContext, jobDetails);
            try {
                Thread.sleep(5000);
            } catch (InterruptedException e) {
            }
            // What if job is failed before execution and there are not stdouts generated yet?
            log.debug("Downloading any standard output and error files, if they were produced.");
            copyOutput = dt.downloadRemoteFiles();
        } else if (activityStatus.getState() == ActivityStateEnumeration.CANCELLED) {
            JobState applicationJobStatus = JobState.CANCELED;
            jobDetails.setJobStatuses(Arrays.asList(new JobStatus(applicationJobStatus)));
            GFacUtils.saveJobStatus(processContext, jobDetails);
            throw new GFacException(processContext.getExperimentId() + "Job Canceled");
        } else if (activityStatus.getState() == ActivityStateEnumeration.FINISHED) {
            try {
                Thread.sleep(5000);
            } catch (InterruptedException ignored) {
            }
            JobState applicationJobStatus = JobState.COMPLETE;
            jobDetails.setJobStatuses(Arrays.asList(new JobStatus(applicationJobStatus)));
            GFacUtils.saveJobStatus(processContext, jobDetails);
            log.info("Job Id: {}, exit code: {}, exit status: {}", jobDetails.getJobId(), activityStatus.getExitCode(), ActivityStateEnumeration.FINISHED.toString());
            // if (activityStatus.getExitCode() == 0) {
            // } else {
            // dt.downloadStdOuts();
            // }
            copyOutput = dt.downloadRemoteFiles();
        }
        if (copyOutput != null) {
            copyOutputFilesToStorage(taskContext, copyOutput);
            for (OutputDataObjectType outputDataObjectType : copyOutput) {
                GFacUtils.saveExperimentOutput(processContext, outputDataObjectType.getName(), outputDataObjectType.getValue());
            }
        }
        // dt.publishFinalOutputs();
        taskStatus.setState(TaskState.COMPLETED);
    } catch (AppCatalogException e) {
        log.error("Error while retrieving UNICORE job submission..", e);
        taskStatus.setState(TaskState.FAILED);
    } catch (Exception e) {
        log.error("BES task failed... ", e);
        taskStatus.setState(TaskState.FAILED);
    }
    return taskStatus;
}
Also used : FactoryClient(de.fzj.unicore.bes.client.FactoryClient) JobSubmissionInterface(org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface) EndpointReferenceType(org.w3.x2005.x08.addressing.EndpointReferenceType) ProcessContext(org.apache.airavata.gfac.core.context.ProcessContext) JobStatus(org.apache.airavata.model.status.JobStatus) UnicoreJobSubmission(org.apache.airavata.model.appcatalog.computeresource.UnicoreJobSubmission) AppCatalogException(org.apache.airavata.registry.cpi.AppCatalogException) OutputDataObjectType(org.apache.airavata.model.application.io.OutputDataObjectType) JobDefinitionType(org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDefinitionType) JobState(org.apache.airavata.model.status.JobState) JobModel(org.apache.airavata.model.job.JobModel) ActivityClient(de.fzj.unicore.bes.client.ActivityClient) JobSubmissionProtocol(org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol) StorageClient(de.fzj.unicore.uas.client.StorageClient) TaskStatus(org.apache.airavata.model.status.TaskStatus) URISyntaxException(java.net.URISyntaxException) CredentialStoreException(org.apache.airavata.credential.store.store.CredentialStoreException) TaskException(org.apache.airavata.gfac.core.task.TaskException) AppCatalogException(org.apache.airavata.registry.cpi.AppCatalogException) JSchException(com.jcraft.jsch.JSchException) RegistryException(org.apache.airavata.registry.cpi.RegistryException) AiravataException(org.apache.airavata.common.exception.AiravataException) GFacException(org.apache.airavata.gfac.core.GFacException) SSHApiException(org.apache.airavata.gfac.core.SSHApiException) IOException(java.io.IOException) ApplicationSettingsException(org.apache.airavata.common.exception.ApplicationSettingsException) GFacException(org.apache.airavata.gfac.core.GFacException)

Example 3 with OutputDataObjectType

use of org.apache.airavata.model.application.io.OutputDataObjectType in project airavata by apache.

the class DataStreamingTask method execute.

@Override
public TaskStatus execute(TaskContext taskContext) {
    ProcessState processState = taskContext.getParentProcessContext().getProcessState();
    try {
        TaskStatus status = new TaskStatus(TaskState.EXECUTING);
        final DataStagingTaskModel subTaskModel = (DataStagingTaskModel) ThriftUtils.getSubTaskModel(taskContext.getTaskModel());
        if (processState == ProcessState.OUTPUT_DATA_STAGING) {
            OutputDataObjectType processOutput = taskContext.getProcessOutput();
            if (processOutput != null && processOutput.getValue() == null) {
                log.error("expId: {}, processId:{}, taskId: {}:- Couldn't stage file {} , file name shouldn't be null", taskContext.getExperimentId(), taskContext.getProcessId(), taskContext.getTaskId(), processOutput.getName());
                status = new TaskStatus(TaskState.FAILED);
                if (processOutput.isIsRequired()) {
                    status.setReason("File name is null, but this output's isRequired bit is not set");
                } else {
                    status.setReason("File name is null");
                }
                return status;
            }
            if (processOutput != null) {
                if (processOutput.isOutputStreaming()) {
                    // stream output periodically
                    ComputationalResourceSchedulingModel resourceSchedule = taskContext.getParentProcessContext().getProcessModel().getProcessResourceSchedule();
                    int wallTimeLimit = resourceSchedule.getWallTimeLimit();
                    if (wallTimeLimit > 10) {
                        int period = wallTimeLimit / 10;
                        Timer timer = new Timer();
                        StreamData streamData = new StreamData(userName, hostName, inputPath, taskContext, subTaskModel);
                        timer.schedule(streamData, 0, 1000 * 60 * period);
                        status.setState(TaskState.COMPLETED);
                    }
                }
            }
        }
        return null;
    } catch (TException e) {
        log.error("Error while creating data streaming task", e);
        return null;
    }
}
Also used : ProcessState(org.apache.airavata.model.status.ProcessState) TException(org.apache.thrift.TException) OutputDataObjectType(org.apache.airavata.model.application.io.OutputDataObjectType) Timer(java.util.Timer) DataStagingTaskModel(org.apache.airavata.model.task.DataStagingTaskModel) StreamData(org.apache.airavata.gfac.impl.task.utils.StreamData) TaskStatus(org.apache.airavata.model.status.TaskStatus) ComputationalResourceSchedulingModel(org.apache.airavata.model.scheduling.ComputationalResourceSchedulingModel)

Example 4 with OutputDataObjectType

use of org.apache.airavata.model.application.io.OutputDataObjectType in project airavata by apache.

the class SCPDataStageTask method execute.

@Override
public TaskStatus execute(TaskContext taskContext) {
    TaskStatus status = new TaskStatus(TaskState.EXECUTING);
    AuthenticationInfo authenticationInfo = null;
    DataStagingTaskModel subTaskModel = null;
    String localDataDir = null;
    ProcessContext processContext = taskContext.getParentProcessContext();
    ProcessState processState = processContext.getProcessState();
    try {
        subTaskModel = ((DataStagingTaskModel) taskContext.getSubTaskModel());
        if (processState == ProcessState.OUTPUT_DATA_STAGING) {
            OutputDataObjectType processOutput = taskContext.getProcessOutput();
            if (processOutput != null && processOutput.getValue() == null) {
                log.error("expId: {}, processId:{}, taskId: {}:- Couldn't stage file {} , file name shouldn't be null", taskContext.getExperimentId(), taskContext.getProcessId(), taskContext.getTaskId(), processOutput.getName());
                status = new TaskStatus(TaskState.FAILED);
                if (processOutput.isIsRequired()) {
                    status.setReason("File name is null, but this output's isRequired bit is not set");
                } else {
                    status.setReason("File name is null");
                }
                return status;
            }
        } else if (processState == ProcessState.INPUT_DATA_STAGING) {
            InputDataObjectType processInput = taskContext.getProcessInput();
            if (processInput != null && processInput.getValue() == null) {
                log.error("expId: {}, processId:{}, taskId: {}:- Couldn't stage file {} , file name shouldn't be null", taskContext.getExperimentId(), taskContext.getProcessId(), taskContext.getTaskId(), processInput.getName());
                status = new TaskStatus(TaskState.FAILED);
                if (processInput.isIsRequired()) {
                    status.setReason("File name is null, but this input's isRequired bit is not set");
                } else {
                    status.setReason("File name is null");
                }
                return status;
            }
        } else {
            status.setState(TaskState.FAILED);
            status.setReason("Invalid task invocation, Support " + ProcessState.INPUT_DATA_STAGING.name() + " and " + "" + ProcessState.OUTPUT_DATA_STAGING.name() + " process phases. found " + processState.name());
            return status;
        }
        StorageResourceDescription storageResource = processContext.getStorageResource();
        // StoragePreference storagePreference = taskContext.getParentProcessContext().getStoragePreference();
        String hostName = null;
        if (storageResource != null) {
            hostName = storageResource.getHostName();
        } else {
            throw new GFacException("Storage Resource is null");
        }
        String inputPath = processContext.getStorageFileSystemRootLocation();
        inputPath = (inputPath.endsWith(File.separator) ? inputPath : inputPath + File.separator);
        // use rsync instead of scp if source and destination host and user name is same.
        URI sourceURI = new URI(subTaskModel.getSource());
        String fileName = sourceURI.getPath().substring(sourceURI.getPath().lastIndexOf(File.separator) + 1, sourceURI.getPath().length());
        Session remoteSession = Factory.getSSHSession(Factory.getComputerResourceSSHKeyAuthentication(processContext), processContext.getComputeResourceServerInfo());
        Session storageSession = Factory.getSSHSession(Factory.getStorageSSHKeyAuthentication(processContext), processContext.getStorageResourceServerInfo());
        URI destinationURI = null;
        if (subTaskModel.getDestination().startsWith("dummy")) {
            destinationURI = TaskUtils.getDestinationURI(taskContext, hostName, inputPath, fileName);
            subTaskModel.setDestination(destinationURI.toString());
        } else {
            destinationURI = new URI(subTaskModel.getDestination());
        }
        if (sourceURI.getHost().equalsIgnoreCase(destinationURI.getHost()) && sourceURI.getUserInfo().equalsIgnoreCase(destinationURI.getUserInfo())) {
            localDataCopy(taskContext, sourceURI, destinationURI);
            status.setState(TaskState.COMPLETED);
            status.setReason("Locally copied file using 'cp' command ");
            return status;
        }
        status = new TaskStatus(TaskState.COMPLETED);
        // Wildcard for file name. Has to find the correct name.
        if (fileName.contains("*")) {
            String destParentPath = (new File(destinationURI.getPath())).getParentFile().getPath();
            String sourceParentPath = (new File(sourceURI.getPath())).getParentFile().getPath();
            List<String> fileNames = taskContext.getParentProcessContext().getDataMovementRemoteCluster().getFileNameFromExtension(fileName, sourceParentPath, remoteSession);
            ExperimentCatalog experimentCatalog = processContext.getExperimentCatalog();
            String experimentId = processContext.getExperimentId();
            String processId = processContext.getProcessId();
            OutputDataObjectType processOutput = taskContext.getProcessOutput();
            for (int i = 0; i < fileNames.size(); i++) {
                String temp = fileNames.get(i);
                if (temp != null && temp != "") {
                    fileName = temp;
                }
                if (destParentPath.endsWith(File.separator)) {
                    destinationURI = new URI(destParentPath + fileName);
                } else {
                    destinationURI = new URI(destParentPath + File.separator + fileName);
                }
                // Wildcard support is only enabled for output data staging
                if (processState == ProcessState.OUTPUT_DATA_STAGING) {
                    processOutput.setName(fileName);
                    experimentCatalog.add(ExpCatChildDataType.EXPERIMENT_OUTPUT, Arrays.asList(processOutput), experimentId);
                    experimentCatalog.add(ExpCatChildDataType.PROCESS_OUTPUT, Arrays.asList(processOutput), processId);
                    taskContext.setProcessOutput(processOutput);
                    makeDir(taskContext, destinationURI);
                    // TODO - save updated subtask model with new destination
                    outputDataStaging(taskContext, remoteSession, sourceURI, storageSession, destinationURI);
                    status.setReason("Successfully staged output data");
                }
            }
            if (processState == ProcessState.OUTPUT_DATA_STAGING) {
                status.setReason("Successfully staged output data");
            } else {
                status.setReason("Wildcard support is only enabled for output data staging");
            }
        } else {
            if (processState == ProcessState.INPUT_DATA_STAGING) {
                inputDataStaging(taskContext, storageSession, sourceURI, remoteSession, destinationURI);
                status.setReason("Successfully staged input data");
            } else if (processState == ProcessState.OUTPUT_DATA_STAGING) {
                makeDir(taskContext, destinationURI);
                // TODO - save updated subtask model with new destination
                outputDataStaging(taskContext, remoteSession, sourceURI, storageSession, destinationURI);
                status.setReason("Successfully staged output data");
            }
        }
    } catch (TException e) {
        String msg = "Couldn't create subTask model thrift model";
        log.error(msg, e);
        status.setState(TaskState.FAILED);
        status.setReason(msg);
        ErrorModel errorModel = new ErrorModel();
        errorModel.setActualErrorMessage(e.getMessage());
        errorModel.setUserFriendlyMessage(msg);
        taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
        return status;
    } catch (ApplicationSettingsException | FileNotFoundException e) {
        String msg = "Failed while reading credentials";
        log.error(msg, e);
        status.setState(TaskState.FAILED);
        status.setReason(msg);
        ErrorModel errorModel = new ErrorModel();
        errorModel.setActualErrorMessage(e.getMessage());
        errorModel.setUserFriendlyMessage(msg);
        taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
    } catch (URISyntaxException e) {
        String msg = "Source or destination uri is not correct source : " + subTaskModel.getSource() + ", " + "destination : " + subTaskModel.getDestination();
        log.error(msg, e);
        status.setState(TaskState.FAILED);
        status.setReason(msg);
        ErrorModel errorModel = new ErrorModel();
        errorModel.setActualErrorMessage(e.getMessage());
        errorModel.setUserFriendlyMessage(msg);
        taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
    } catch (CredentialStoreException e) {
        String msg = "Storage authentication issue, could be invalid credential token";
        log.error(msg, e);
        status.setState(TaskState.FAILED);
        status.setReason(msg);
        ErrorModel errorModel = new ErrorModel();
        errorModel.setActualErrorMessage(e.getMessage());
        errorModel.setUserFriendlyMessage(msg);
        taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
    } catch (AiravataException e) {
        String msg = "Error while creating ssh session with client";
        log.error(msg, e);
        status.setState(TaskState.FAILED);
        status.setReason(msg);
        ErrorModel errorModel = new ErrorModel();
        errorModel.setActualErrorMessage(e.getMessage());
        errorModel.setUserFriendlyMessage(msg);
        taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
    } catch (JSchException | IOException e) {
        String msg = "Failed to do scp with client";
        log.error(msg, e);
        status.setState(TaskState.FAILED);
        status.setReason(msg);
        ErrorModel errorModel = new ErrorModel();
        errorModel.setActualErrorMessage(e.getMessage());
        errorModel.setUserFriendlyMessage(msg);
        taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
    } catch (RegistryException | GFacException e) {
        String msg = "Data staging failed";
        log.error(msg, e);
        status.setState(TaskState.FAILED);
        status.setReason(msg);
        ErrorModel errorModel = new ErrorModel();
        errorModel.setActualErrorMessage(e.getMessage());
        errorModel.setUserFriendlyMessage(msg);
        taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
    }
    return status;
}
Also used : TException(org.apache.thrift.TException) JSchException(com.jcraft.jsch.JSchException) ApplicationSettingsException(org.apache.airavata.common.exception.ApplicationSettingsException) ExperimentCatalog(org.apache.airavata.registry.cpi.ExperimentCatalog) FileNotFoundException(java.io.FileNotFoundException) URISyntaxException(java.net.URISyntaxException) CredentialStoreException(org.apache.airavata.credential.store.store.CredentialStoreException) URI(java.net.URI) AuthenticationInfo(org.apache.airavata.gfac.core.authentication.AuthenticationInfo) ProcessContext(org.apache.airavata.gfac.core.context.ProcessContext) OutputDataObjectType(org.apache.airavata.model.application.io.OutputDataObjectType) AiravataException(org.apache.airavata.common.exception.AiravataException) InputDataObjectType(org.apache.airavata.model.application.io.InputDataObjectType) IOException(java.io.IOException) TaskStatus(org.apache.airavata.model.status.TaskStatus) RegistryException(org.apache.airavata.registry.cpi.RegistryException) ProcessState(org.apache.airavata.model.status.ProcessState) StorageResourceDescription(org.apache.airavata.model.appcatalog.storageresource.StorageResourceDescription) GFacException(org.apache.airavata.gfac.core.GFacException) DataStagingTaskModel(org.apache.airavata.model.task.DataStagingTaskModel) ErrorModel(org.apache.airavata.model.commons.ErrorModel) File(java.io.File) Session(com.jcraft.jsch.Session)

Example 5 with OutputDataObjectType

use of org.apache.airavata.model.application.io.OutputDataObjectType in project airavata by apache.

the class DataTransferrer method initStdoutsLocation.

private void initStdoutsLocation() {
    gatewayDownloadLocation = getDownloadLocation();
    String stdout = processContext.getStdoutLocation();
    String stderr = processContext.getStderrLocation();
    if (stdout != null) {
        stdout = stdout.substring(stdout.lastIndexOf('/') + 1);
    }
    if (stderr != null) {
        stderr = stderr.substring(stderr.lastIndexOf('/') + 1);
    }
    String stdoutFileName = (stdout == null || stdout.equals("")) ? "stdout" : stdout;
    String stderrFileName = (stdout == null || stderr.equals("")) ? "stderr" : stderr;
    stdoutLocation = gatewayDownloadLocation + File.separator + stdoutFileName;
    stderrLocation = gatewayDownloadLocation + File.separator + stderrFileName;
    List<OutputDataObjectType> processOutputs = processContext.getProcessModel().getProcessOutputs();
    if (processOutputs != null && !processOutputs.isEmpty()) {
        for (OutputDataObjectType processOutput : processOutputs) {
            if (processOutput.getType().equals(DataType.STDOUT)) {
                processOutput.setValue(stdoutLocation);
            }
            if (processOutput.getType().equals(DataType.STDERR)) {
                processOutput.setValue(stderrLocation);
            }
        }
    }
}
Also used : OutputDataObjectType(org.apache.airavata.model.application.io.OutputDataObjectType)

Aggregations

OutputDataObjectType (org.apache.airavata.model.application.io.OutputDataObjectType)93 InputDataObjectType (org.apache.airavata.model.application.io.InputDataObjectType)60 TException (org.apache.thrift.TException)51 AuthzToken (org.apache.airavata.model.security.AuthzToken)44 ComputationalResourceSchedulingModel (org.apache.airavata.model.scheduling.ComputationalResourceSchedulingModel)34 ExperimentModel (org.apache.airavata.model.experiment.ExperimentModel)33 UserConfigurationDataModel (org.apache.airavata.model.experiment.UserConfigurationDataModel)33 ArrayList (java.util.ArrayList)18 Project (org.apache.airavata.model.workspace.Project)16 RegistryException (org.apache.airavata.registry.cpi.RegistryException)14 GFacException (org.apache.airavata.gfac.core.GFacException)7 AiravataClientException (org.apache.airavata.model.error.AiravataClientException)7 AiravataSystemException (org.apache.airavata.model.error.AiravataSystemException)7 InvalidRequestException (org.apache.airavata.model.error.InvalidRequestException)7 URISyntaxException (java.net.URISyntaxException)6 AiravataException (org.apache.airavata.common.exception.AiravataException)5 HashMap (java.util.HashMap)4 ApplicationSettingsException (org.apache.airavata.common.exception.ApplicationSettingsException)4 ProcessContext (org.apache.airavata.gfac.core.context.ProcessContext)4 JobStatus (org.apache.airavata.model.status.JobStatus)4