use of org.apache.airavata.model.status.TaskStatus in project airavata by apache.
the class DataStageTask method execute.
@Override
public TaskStatus execute(TaskContext taskContext) {
TaskStatus status = new TaskStatus(TaskState.COMPLETED);
if (taskContext.getTaskModel().getTaskType() != TaskTypes.DATA_STAGING) {
status.setState(TaskState.FAILED);
status.setReason("Invalid task call, expected " + TaskTypes.DATA_STAGING.toString() + " but found " + taskContext.getTaskModel().getTaskType().toString());
} else {
try {
DataStagingTaskModel subTaskModel = ((DataStagingTaskModel) taskContext.getSubTaskModel());
URI sourceURI = new URI(subTaskModel.getSource());
URI destinationURI = new URI(subTaskModel.getDestination());
ProcessState processState = taskContext.getParentProcessContext().getProcessState();
if (processState == ProcessState.INPUT_DATA_STAGING) {
/**
* copy local file to compute resource.
*/
taskContext.getParentProcessContext().getDataMovementRemoteCluster().copyTo(sourceURI.getPath(), destinationURI.getPath());
} else if (processState == ProcessState.OUTPUT_DATA_STAGING) {
/**
* copy remote file from compute resource.
*/
taskContext.getParentProcessContext().getDataMovementRemoteCluster().copyFrom(sourceURI.getPath(), destinationURI.getPath());
}
status.setReason("Successfully staged data");
} catch (GFacException e) {
String msg = "Scp attempt failed";
log.error(msg, e);
status.setState(TaskState.FAILED);
status.setReason(msg);
ErrorModel errorModel = new ErrorModel();
errorModel.setActualErrorMessage(e.getMessage());
errorModel.setUserFriendlyMessage(msg);
taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
} catch (TException e) {
String msg = "Invalid task invocation";
log.error(msg, e);
status.setState(TaskState.FAILED);
status.setReason(msg);
ErrorModel errorModel = new ErrorModel();
errorModel.setActualErrorMessage(e.getMessage());
errorModel.setUserFriendlyMessage(msg);
taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
} catch (URISyntaxException e) {
String msg = "source or destination is not a valid URI";
log.error(msg, e);
status.setState(TaskState.FAILED);
status.setReason(msg);
ErrorModel errorModel = new ErrorModel();
errorModel.setActualErrorMessage(e.getMessage());
errorModel.setUserFriendlyMessage(msg);
taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
}
}
return status;
}
use of org.apache.airavata.model.status.TaskStatus in project airavata by apache.
the class BESJobSubmissionTask method execute.
@Override
public TaskStatus execute(TaskContext taskContext) {
TaskStatus taskStatus = new TaskStatus(TaskState.CREATED);
StorageClient sc = null;
ProcessContext processContext = taskContext.getParentProcessContext();
// FIXME - use original output dir
setInputOutputLocations(processContext);
try {
// con't reuse if UserDN has been changed.
secProperties = getSecurityConfig(processContext);
// try secProperties = secProperties.clone() if we can't use already initialized ClientConfigurations.
} catch (GFacException e) {
String msg = "Unicorn security context initialization error";
log.error(msg, e);
taskStatus.setState(TaskState.FAILED);
taskStatus.setReason(msg);
return taskStatus;
}
try {
JobSubmissionProtocol protocol = processContext.getJobSubmissionProtocol();
JobSubmissionInterface jobSubmissionInterface = GFacUtils.getPreferredJobSubmissionInterface(processContext);
String factoryUrl = null;
if (protocol.equals(JobSubmissionProtocol.UNICORE)) {
UnicoreJobSubmission unicoreJobSubmission = GFacUtils.getUnicoreJobSubmission(jobSubmissionInterface.getJobSubmissionInterfaceId());
factoryUrl = unicoreJobSubmission.getUnicoreEndPointURL();
}
EndpointReferenceType eprt = EndpointReferenceType.Factory.newInstance();
eprt.addNewAddress().setStringValue(factoryUrl);
String userDN = processContext.getProcessModel().getUserDn();
CreateActivityDocument cad = CreateActivityDocument.Factory.newInstance();
// create storage
StorageCreator storageCreator = new StorageCreator(secProperties, factoryUrl, 5, null);
sc = storageCreator.createStorage();
JobDefinitionType jobDefinition = JSDLGenerator.buildJSDLInstance(processContext, sc.getUrl()).getJobDefinition();
cad.addNewCreateActivity().addNewActivityDocument().setJobDefinition(jobDefinition);
log.info("Submitted JSDL: " + jobDefinition.getJobDescription());
// copy files to local
copyInputFilesToLocal(taskContext);
// upload files if any
DataTransferrer dt = new DataTransferrer(processContext, sc);
dt.uploadLocalFiles();
JobModel jobDetails = new JobModel();
jobDetails.setTaskId(taskContext.getTaskId());
jobDetails.setProcessId(taskContext.getProcessId());
FactoryClient factory = new FactoryClient(eprt, secProperties);
log.info("Activity Submitting to {} ... \n", factoryUrl);
CreateActivityResponseDocument response = factory.createActivity(cad);
log.info("Activity Submitted to {} ... \n", factoryUrl);
EndpointReferenceType activityEpr = response.getCreateActivityResponse().getActivityIdentifier();
log.info("Activity : " + activityEpr.getAddress().getStringValue() + " Submitted.");
// factory.waitWhileActivityIsDone(activityEpr, 1000);
jobId = WSUtilities.extractResourceID(activityEpr);
if (jobId == null) {
jobId = new Long(Calendar.getInstance().getTimeInMillis()).toString();
}
log.info("JobID: " + jobId);
jobDetails.setJobId(jobId);
jobDetails.setJobDescription(activityEpr.toString());
jobDetails.setJobStatuses(Arrays.asList(new JobStatus(JobState.SUBMITTED)));
processContext.setJobModel(jobDetails);
GFacUtils.saveJobModel(processContext, jobDetails);
GFacUtils.saveJobStatus(processContext, jobDetails);
log.info(formatStatusMessage(activityEpr.getAddress().getStringValue(), factory.getActivityStatus(activityEpr).toString()));
waitUntilDone(eprt, activityEpr, processContext, secProperties);
ActivityStatusType activityStatus = null;
activityStatus = getStatus(factory, activityEpr);
log.info(formatStatusMessage(activityEpr.getAddress().getStringValue(), activityStatus.getState().toString()));
ActivityClient activityClient;
activityClient = new ActivityClient(activityEpr, secProperties);
// now use the activity working directory property
dt.setStorageClient(activityClient.getUspaceClient());
List<OutputDataObjectType> copyOutput = null;
if ((activityStatus.getState() == ActivityStateEnumeration.FAILED)) {
String error = activityStatus.getFault().getFaultcode().getLocalPart() + "\n" + activityStatus.getFault().getFaultstring() + "\n EXITCODE: " + activityStatus.getExitCode();
log.error(error);
JobState applicationJobStatus = JobState.FAILED;
jobDetails.setJobStatuses(Arrays.asList(new JobStatus(applicationJobStatus)));
sendNotification(processContext, jobDetails);
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
}
// What if job is failed before execution and there are not stdouts generated yet?
log.debug("Downloading any standard output and error files, if they were produced.");
copyOutput = dt.downloadRemoteFiles();
} else if (activityStatus.getState() == ActivityStateEnumeration.CANCELLED) {
JobState applicationJobStatus = JobState.CANCELED;
jobDetails.setJobStatuses(Arrays.asList(new JobStatus(applicationJobStatus)));
GFacUtils.saveJobStatus(processContext, jobDetails);
throw new GFacException(processContext.getExperimentId() + "Job Canceled");
} else if (activityStatus.getState() == ActivityStateEnumeration.FINISHED) {
try {
Thread.sleep(5000);
} catch (InterruptedException ignored) {
}
JobState applicationJobStatus = JobState.COMPLETE;
jobDetails.setJobStatuses(Arrays.asList(new JobStatus(applicationJobStatus)));
GFacUtils.saveJobStatus(processContext, jobDetails);
log.info("Job Id: {}, exit code: {}, exit status: {}", jobDetails.getJobId(), activityStatus.getExitCode(), ActivityStateEnumeration.FINISHED.toString());
// if (activityStatus.getExitCode() == 0) {
// } else {
// dt.downloadStdOuts();
// }
copyOutput = dt.downloadRemoteFiles();
}
if (copyOutput != null) {
copyOutputFilesToStorage(taskContext, copyOutput);
for (OutputDataObjectType outputDataObjectType : copyOutput) {
GFacUtils.saveExperimentOutput(processContext, outputDataObjectType.getName(), outputDataObjectType.getValue());
}
}
// dt.publishFinalOutputs();
taskStatus.setState(TaskState.COMPLETED);
} catch (AppCatalogException e) {
log.error("Error while retrieving UNICORE job submission..", e);
taskStatus.setState(TaskState.FAILED);
} catch (Exception e) {
log.error("BES task failed... ", e);
taskStatus.setState(TaskState.FAILED);
}
return taskStatus;
}
use of org.apache.airavata.model.status.TaskStatus in project airavata by apache.
the class DataStreamingTask method execute.
@Override
public TaskStatus execute(TaskContext taskContext) {
ProcessState processState = taskContext.getParentProcessContext().getProcessState();
try {
TaskStatus status = new TaskStatus(TaskState.EXECUTING);
final DataStagingTaskModel subTaskModel = (DataStagingTaskModel) ThriftUtils.getSubTaskModel(taskContext.getTaskModel());
if (processState == ProcessState.OUTPUT_DATA_STAGING) {
OutputDataObjectType processOutput = taskContext.getProcessOutput();
if (processOutput != null && processOutput.getValue() == null) {
log.error("expId: {}, processId:{}, taskId: {}:- Couldn't stage file {} , file name shouldn't be null", taskContext.getExperimentId(), taskContext.getProcessId(), taskContext.getTaskId(), processOutput.getName());
status = new TaskStatus(TaskState.FAILED);
if (processOutput.isIsRequired()) {
status.setReason("File name is null, but this output's isRequired bit is not set");
} else {
status.setReason("File name is null");
}
return status;
}
if (processOutput != null) {
if (processOutput.isOutputStreaming()) {
// stream output periodically
ComputationalResourceSchedulingModel resourceSchedule = taskContext.getParentProcessContext().getProcessModel().getProcessResourceSchedule();
int wallTimeLimit = resourceSchedule.getWallTimeLimit();
if (wallTimeLimit > 10) {
int period = wallTimeLimit / 10;
Timer timer = new Timer();
StreamData streamData = new StreamData(userName, hostName, inputPath, taskContext, subTaskModel);
timer.schedule(streamData, 0, 1000 * 60 * period);
status.setState(TaskState.COMPLETED);
}
}
}
}
return null;
} catch (TException e) {
log.error("Error while creating data streaming task", e);
return null;
}
}
use of org.apache.airavata.model.status.TaskStatus in project airavata by apache.
the class EnvironmentSetupTask method execute.
@Override
public TaskStatus execute(TaskContext taskContext) {
TaskStatus status = new TaskStatus(TaskState.COMPLETED);
try {
RemoteCluster remoteCluster = taskContext.getParentProcessContext().getJobSubmissionRemoteCluster();
remoteCluster.makeDirectory(taskContext.getParentProcessContext().getWorkingDir());
status.setReason("Successfully created environment");
} catch (GFacException e) {
String msg = "Error while environment setup";
log.error(msg, e);
status.setState(TaskState.FAILED);
status.setReason(msg);
ErrorModel errorModel = new ErrorModel();
errorModel.setActualErrorMessage(e.getMessage());
errorModel.setUserFriendlyMessage(msg);
taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
}
return status;
}
use of org.apache.airavata.model.status.TaskStatus in project airavata by apache.
the class ForkJobSubmissionTask method execute.
@Override
public TaskStatus execute(TaskContext taskContext) {
TaskStatus taskStatus = new TaskStatus(TaskState.CREATED);
try {
ProcessContext processContext = taskContext.getParentProcessContext();
JobModel jobModel = processContext.getJobModel();
jobModel.setTaskId(taskContext.getTaskId());
RemoteCluster remoteCluster = processContext.getJobSubmissionRemoteCluster();
GroovyMap groovyMap = GFacUtils.createGroovyMap(processContext, taskContext);
jobModel.setJobName(groovyMap.get(Script.JOB_NAME).toString());
ResourceJobManager resourceJobManager = GFacUtils.getResourceJobManager(processContext);
JobManagerConfiguration jConfig = null;
if (resourceJobManager != null) {
jConfig = Factory.getJobManagerConfiguration(resourceJobManager);
}
JobStatus jobStatus = new JobStatus();
File jobFile = GFacUtils.createJobFile(groovyMap, taskContext, jConfig);
if (jobFile != null && jobFile.exists()) {
jobModel.setJobDescription(FileUtils.readFileToString(jobFile));
JobSubmissionOutput jobSubmissionOutput = remoteCluster.submitBatchJob(jobFile.getPath(), processContext.getWorkingDir());
jobModel.setExitCode(jobSubmissionOutput.getExitCode());
jobModel.setStdErr(jobSubmissionOutput.getStdErr());
jobModel.setStdOut(jobSubmissionOutput.getStdOut());
String jobId = jobSubmissionOutput.getJobId();
if (jobId != null && !jobId.isEmpty()) {
jobModel.setJobId(jobId);
GFacUtils.saveJobModel(processContext, jobModel);
jobStatus.setJobState(JobState.SUBMITTED);
jobStatus.setReason("Successfully Submitted to " + taskContext.getParentProcessContext().getComputeResourceDescription().getHostName());
jobStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
jobModel.setJobStatuses(Arrays.asList(jobStatus));
GFacUtils.saveJobStatus(taskContext.getParentProcessContext(), jobModel);
taskStatus = new TaskStatus(TaskState.COMPLETED);
taskStatus.setReason("Submitted job to compute resource");
}
if (jobId == null || jobId.isEmpty()) {
String msg = "expId:" + processContext.getProcessModel().getExperimentId() + " Couldn't find " + "remote jobId for JobName:" + jobModel.getJobName() + ", both submit and verify steps " + "doesn't return a valid JobId. " + "Hence changing experiment state to Failed";
log.error(msg);
ErrorModel errorModel = new ErrorModel();
errorModel.setActualErrorMessage(msg);
errorModel.setCreationTime(AiravataUtils.getCurrentTimestamp().getTime());
GFacUtils.saveExperimentError(processContext, errorModel);
GFacUtils.saveProcessError(processContext, errorModel);
GFacUtils.saveTaskError(taskContext, errorModel);
taskStatus.setState(TaskState.FAILED);
taskStatus.setReason("Couldn't find job id in both submitted and verified steps");
} else {
GFacUtils.saveJobModel(processContext, jobModel);
}
} else {
taskStatus.setState(TaskState.FAILED);
if (jobFile == null) {
taskStatus.setReason("JobFile is null");
} else {
taskStatus.setReason("Job file doesn't exist");
}
}
} catch (ApplicationSettingsException e) {
String msg = "Error occurred while creating job descriptor";
log.error(msg, e);
taskStatus.setState(TaskState.FAILED);
taskStatus.setReason(msg);
ErrorModel errorModel = new ErrorModel();
errorModel.setActualErrorMessage(e.getMessage());
errorModel.setUserFriendlyMessage(msg);
taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
} catch (AppCatalogException e) {
String msg = "Error while instantiating app catalog";
log.error(msg, e);
taskStatus.setState(TaskState.FAILED);
taskStatus.setReason(msg);
ErrorModel errorModel = new ErrorModel();
errorModel.setActualErrorMessage(e.getMessage());
errorModel.setUserFriendlyMessage(msg);
taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
} catch (GFacException e) {
String msg = "Error occurred while submitting the job";
log.error(msg, e);
taskStatus.setState(TaskState.FAILED);
taskStatus.setReason(msg);
ErrorModel errorModel = new ErrorModel();
errorModel.setActualErrorMessage(e.getMessage());
errorModel.setUserFriendlyMessage(msg);
taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
} catch (IOException e) {
String msg = "Error while reading the content of the job file";
log.error(msg, e);
taskStatus.setState(TaskState.FAILED);
taskStatus.setReason(msg);
ErrorModel errorModel = new ErrorModel();
errorModel.setActualErrorMessage(e.getMessage());
errorModel.setUserFriendlyMessage(msg);
taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
}
return taskStatus;
}
Aggregations