use of org.apache.airavata.gfac.core.context.ProcessContext in project airavata by apache.
the class GFacEngineImpl method outputDataStaging.
/**
* @param taskContext
* @param recovery
* @return <code>true</code> if process execution interrupted , <code>false</code> otherwise.
* @throws GFacException
*/
private boolean outputDataStaging(TaskContext taskContext, boolean recovery, boolean isArchive) throws GFacException {
TaskStatus taskStatus = new TaskStatus(TaskState.EXECUTING);
taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
taskContext.setTaskStatus(taskStatus);
GFacUtils.saveAndPublishTaskStatus(taskContext);
ProcessContext processContext = taskContext.getParentProcessContext();
Task dMoveTask = null;
if (isArchive) {
dMoveTask = Factory.getArchiveTask();
} else {
dMoveTask = Factory.getDataMovementTask(processContext.getDataMovementProtocol());
}
if (null == dMoveTask) {
throw new GFacException("Unsupported security protocol, Airavata doesn't support " + processContext.getDataMovementProtocol() + " protocol yet.");
}
taskStatus = executeTask(taskContext, dMoveTask, recovery);
taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
taskContext.setTaskStatus(taskStatus);
GFacUtils.saveAndPublishTaskStatus(taskContext);
if (taskStatus.getState() == TaskState.FAILED) {
log.error("expId: {}, processId: {}, taskId: {} type: {},:- output staging failed, " + "reason:" + " {}", taskContext.getParentProcessContext().getExperimentId(), taskContext.getParentProcessContext().getProcessId(), taskContext.getTaskId(), dMoveTask.getType().name(), taskStatus.getReason());
String errorMsg = new StringBuilder("expId: ").append(processContext.getExperimentId()).append(", processId: ").append(processContext.getProcessId()).append(", taskId: ").append(taskContext.getTaskId()).append(", type: ").append(taskContext.getTaskType().name()).append(" :- Output staging failed. Reason: ").append(taskStatus.getReason()).toString();
ErrorModel errorModel = new ErrorModel();
errorModel.setUserFriendlyMessage("Error while staging output data");
errorModel.setActualErrorMessage(errorMsg);
GFacUtils.saveTaskError(taskContext, errorModel);
}
return false;
}
use of org.apache.airavata.gfac.core.context.ProcessContext in project airavata by apache.
the class GFacEngineImpl method executeCancel.
private void executeCancel(TaskContext taskContext, JobSubmissionTask jSTask) throws GFacException {
try {
JobStatus oldJobStatus = jSTask.cancel(taskContext);
// If Job was in Queued state when cancel command runs, then there won't be any email from this job.
ProcessContext pc = taskContext.getParentProcessContext();
JobMonitor monitorService = Factory.getMonitorService(pc.getMonitorMode());
monitorService.canceledJob(pc.getJobModel().getJobId());
} catch (TaskException e) {
throw new GFacException("Error while cancelling job");
} catch (AiravataException e) {
throw new GFacException("Error wile getting monitoring service");
}
}
use of org.apache.airavata.gfac.core.context.ProcessContext in project airavata by apache.
the class GFacEngineImpl method inputDataStaging.
private boolean inputDataStaging(TaskContext taskContext, boolean recover) throws GFacException, TException {
TaskStatus taskStatus = new TaskStatus(TaskState.EXECUTING);
taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
taskContext.setTaskStatus(taskStatus);
GFacUtils.saveAndPublishTaskStatus(taskContext);
ProcessContext processContext = taskContext.getParentProcessContext();
// handle URI_COLLECTION input data type
Task dMoveTask = Factory.getDataMovementTask(processContext.getDataMovementProtocol());
if (null == dMoveTask) {
throw new GFacException("Unsupported security protocol, Airavata doesn't support " + processContext.getDataMovementProtocol() + " protocol yet.");
}
if (taskContext.getProcessInput().getType() == DataType.URI_COLLECTION) {
String values = taskContext.getProcessInput().getValue();
String[] multiple_inputs = values.split(GFacConstants.MULTIPLE_INPUTS_SPLITTER);
DataStagingTaskModel subTaskModel = (DataStagingTaskModel) taskContext.getSubTaskModel();
for (String input : multiple_inputs) {
taskContext.getProcessInput().setValue(input);
subTaskModel.setSource(input);
taskStatus = executeTask(taskContext, dMoveTask, false);
}
taskContext.getProcessInput().setValue(values);
} else {
taskStatus = executeTask(taskContext, dMoveTask, false);
}
taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
taskContext.setTaskStatus(taskStatus);
GFacUtils.saveAndPublishTaskStatus(taskContext);
checkFailures(taskContext, taskStatus, dMoveTask);
return false;
}
use of org.apache.airavata.gfac.core.context.ProcessContext in project airavata by apache.
the class GFacEngineImpl method executeJobMonitoring.
private void executeJobMonitoring(TaskContext taskContext, boolean recovery) throws GFacException {
ProcessContext processContext = taskContext.getParentProcessContext();
TaskStatus taskStatus;
JobMonitor monitorService = null;
try {
taskStatus = new TaskStatus(TaskState.EXECUTING);
taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
taskContext.setTaskStatus(taskStatus);
GFacUtils.saveAndPublishTaskStatus(taskContext);
MonitorTaskModel monitorTaskModel = ((MonitorTaskModel) taskContext.getSubTaskModel());
monitorService = Factory.getMonitorService(monitorTaskModel.getMonitorMode());
if (!monitorService.isMonitoring(processContext.getJobModel().getJobId())) {
monitorService.monitor(processContext.getJobModel().getJobId(), taskContext);
} else {
log.warn("Jobid: {}, already in monitoring map", processContext.getJobModel().getJobId());
}
} catch (AiravataException | TException e) {
taskStatus = new TaskStatus(TaskState.FAILED);
taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
taskStatus.setReason("Couldn't handover jobId {} to monitor service, monitor service type {}");
taskContext.setTaskStatus(taskStatus);
GFacUtils.saveAndPublishTaskStatus(taskContext);
String errorMsg = new StringBuilder("expId: ").append(processContext.getExperimentId()).append(", processId: ").append(processContext.getProcessId()).append(", taskId: ").append(taskContext.getTaskId()).append(", type: ").append(taskContext.getTaskType().name()).append(" :- Input staging failed. Reason: ").append(taskStatus.getReason()).toString();
ErrorModel errorModel = new ErrorModel();
errorModel.setUserFriendlyMessage("Error while staging output data");
errorModel.setActualErrorMessage(errorMsg);
GFacUtils.saveTaskError(taskContext, errorModel);
throw new GFacException(e);
}
if (processContext.isPauseTaskExecution()) {
// we won't update task status to complete, job monitor will update task status to complete after it complete monitoring for this job id.
return;
}
taskStatus = new TaskStatus(TaskState.COMPLETED);
taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
taskStatus.setReason("Successfully handed over job id to job monitor service.");
taskContext.setTaskStatus(taskStatus);
GFacUtils.saveAndPublishTaskStatus(taskContext);
}
use of org.apache.airavata.gfac.core.context.ProcessContext in project airavata by apache.
the class BESJobSubmissionTask method execute.
@Override
public TaskStatus execute(TaskContext taskContext) {
TaskStatus taskStatus = new TaskStatus(TaskState.CREATED);
StorageClient sc = null;
ProcessContext processContext = taskContext.getParentProcessContext();
// FIXME - use original output dir
setInputOutputLocations(processContext);
try {
// con't reuse if UserDN has been changed.
secProperties = getSecurityConfig(processContext);
// try secProperties = secProperties.clone() if we can't use already initialized ClientConfigurations.
} catch (GFacException e) {
String msg = "Unicorn security context initialization error";
log.error(msg, e);
taskStatus.setState(TaskState.FAILED);
taskStatus.setReason(msg);
return taskStatus;
}
try {
JobSubmissionProtocol protocol = processContext.getJobSubmissionProtocol();
JobSubmissionInterface jobSubmissionInterface = GFacUtils.getPreferredJobSubmissionInterface(processContext);
String factoryUrl = null;
if (protocol.equals(JobSubmissionProtocol.UNICORE)) {
UnicoreJobSubmission unicoreJobSubmission = GFacUtils.getUnicoreJobSubmission(jobSubmissionInterface.getJobSubmissionInterfaceId());
factoryUrl = unicoreJobSubmission.getUnicoreEndPointURL();
}
EndpointReferenceType eprt = EndpointReferenceType.Factory.newInstance();
eprt.addNewAddress().setStringValue(factoryUrl);
String userDN = processContext.getProcessModel().getUserDn();
CreateActivityDocument cad = CreateActivityDocument.Factory.newInstance();
// create storage
StorageCreator storageCreator = new StorageCreator(secProperties, factoryUrl, 5, null);
sc = storageCreator.createStorage();
JobDefinitionType jobDefinition = JSDLGenerator.buildJSDLInstance(processContext, sc.getUrl()).getJobDefinition();
cad.addNewCreateActivity().addNewActivityDocument().setJobDefinition(jobDefinition);
log.info("Submitted JSDL: " + jobDefinition.getJobDescription());
// copy files to local
copyInputFilesToLocal(taskContext);
// upload files if any
DataTransferrer dt = new DataTransferrer(processContext, sc);
dt.uploadLocalFiles();
JobModel jobDetails = new JobModel();
jobDetails.setTaskId(taskContext.getTaskId());
jobDetails.setProcessId(taskContext.getProcessId());
FactoryClient factory = new FactoryClient(eprt, secProperties);
log.info("Activity Submitting to {} ... \n", factoryUrl);
CreateActivityResponseDocument response = factory.createActivity(cad);
log.info("Activity Submitted to {} ... \n", factoryUrl);
EndpointReferenceType activityEpr = response.getCreateActivityResponse().getActivityIdentifier();
log.info("Activity : " + activityEpr.getAddress().getStringValue() + " Submitted.");
// factory.waitWhileActivityIsDone(activityEpr, 1000);
jobId = WSUtilities.extractResourceID(activityEpr);
if (jobId == null) {
jobId = new Long(Calendar.getInstance().getTimeInMillis()).toString();
}
log.info("JobID: " + jobId);
jobDetails.setJobId(jobId);
jobDetails.setJobDescription(activityEpr.toString());
jobDetails.setJobStatuses(Arrays.asList(new JobStatus(JobState.SUBMITTED)));
processContext.setJobModel(jobDetails);
GFacUtils.saveJobModel(processContext, jobDetails);
GFacUtils.saveJobStatus(processContext, jobDetails);
log.info(formatStatusMessage(activityEpr.getAddress().getStringValue(), factory.getActivityStatus(activityEpr).toString()));
waitUntilDone(eprt, activityEpr, processContext, secProperties);
ActivityStatusType activityStatus = null;
activityStatus = getStatus(factory, activityEpr);
log.info(formatStatusMessage(activityEpr.getAddress().getStringValue(), activityStatus.getState().toString()));
ActivityClient activityClient;
activityClient = new ActivityClient(activityEpr, secProperties);
// now use the activity working directory property
dt.setStorageClient(activityClient.getUspaceClient());
List<OutputDataObjectType> copyOutput = null;
if ((activityStatus.getState() == ActivityStateEnumeration.FAILED)) {
String error = activityStatus.getFault().getFaultcode().getLocalPart() + "\n" + activityStatus.getFault().getFaultstring() + "\n EXITCODE: " + activityStatus.getExitCode();
log.error(error);
JobState applicationJobStatus = JobState.FAILED;
jobDetails.setJobStatuses(Arrays.asList(new JobStatus(applicationJobStatus)));
sendNotification(processContext, jobDetails);
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
}
// What if job is failed before execution and there are not stdouts generated yet?
log.debug("Downloading any standard output and error files, if they were produced.");
copyOutput = dt.downloadRemoteFiles();
} else if (activityStatus.getState() == ActivityStateEnumeration.CANCELLED) {
JobState applicationJobStatus = JobState.CANCELED;
jobDetails.setJobStatuses(Arrays.asList(new JobStatus(applicationJobStatus)));
GFacUtils.saveJobStatus(processContext, jobDetails);
throw new GFacException(processContext.getExperimentId() + "Job Canceled");
} else if (activityStatus.getState() == ActivityStateEnumeration.FINISHED) {
try {
Thread.sleep(5000);
} catch (InterruptedException ignored) {
}
JobState applicationJobStatus = JobState.COMPLETE;
jobDetails.setJobStatuses(Arrays.asList(new JobStatus(applicationJobStatus)));
GFacUtils.saveJobStatus(processContext, jobDetails);
log.info("Job Id: {}, exit code: {}, exit status: {}", jobDetails.getJobId(), activityStatus.getExitCode(), ActivityStateEnumeration.FINISHED.toString());
// if (activityStatus.getExitCode() == 0) {
// } else {
// dt.downloadStdOuts();
// }
copyOutput = dt.downloadRemoteFiles();
}
if (copyOutput != null) {
copyOutputFilesToStorage(taskContext, copyOutput);
for (OutputDataObjectType outputDataObjectType : copyOutput) {
GFacUtils.saveExperimentOutput(processContext, outputDataObjectType.getName(), outputDataObjectType.getValue());
}
}
// dt.publishFinalOutputs();
taskStatus.setState(TaskState.COMPLETED);
} catch (AppCatalogException e) {
log.error("Error while retrieving UNICORE job submission..", e);
taskStatus.setState(TaskState.FAILED);
} catch (Exception e) {
log.error("BES task failed... ", e);
taskStatus.setState(TaskState.FAILED);
}
return taskStatus;
}
Aggregations