use of org.apache.airavata.model.task.DataStagingTaskModel in project airavata by apache.
the class ArchiveTask method execute.
@Override
public TaskStatus execute(TaskContext taskContext) {
// implement archive logic with jscp
TaskStatus status = new TaskStatus(TaskState.EXECUTING);
ProcessContext processContext = taskContext.getParentProcessContext();
RemoteCluster remoteCluster = processContext.getJobSubmissionRemoteCluster();
AuthenticationInfo authenticationInfo = null;
DataStagingTaskModel subTaskModel = null;
try {
subTaskModel = (DataStagingTaskModel) ThriftUtils.getSubTaskModel(taskContext.getTaskModel());
} catch (TException e) {
String msg = "Error! Deserialization issue with SubTask Model";
log.error(msg, e);
status.setState(TaskState.FAILED);
status.setReason(msg);
ErrorModel errorModel = new ErrorModel();
errorModel.setActualErrorMessage(e.getMessage());
errorModel.setUserFriendlyMessage(msg);
taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
return status;
}
try {
StorageResourceDescription storageResource = taskContext.getParentProcessContext().getStorageResource();
if (storageResource != null) {
hostName = storageResource.getHostName();
} else {
throw new GFacException("Storage Resource is null");
}
userName = processContext.getStorageResourceLoginUserName();
inputPath = processContext.getStorageFileSystemRootLocation();
inputPath = (inputPath.endsWith(File.separator) ? inputPath : inputPath + File.separator);
status = new TaskStatus(TaskState.COMPLETED);
Session srcSession = Factory.getSSHSession(Factory.getComputerResourceSSHKeyAuthentication(processContext), processContext.getComputeResourceServerInfo());
Session destSession = Factory.getSSHSession(Factory.getStorageSSHKeyAuthentication(processContext), processContext.getStorageResourceServerInfo());
URI sourceURI = new URI(subTaskModel.getSource());
URI destinationURI = null;
String workingDirName = null, path = null;
if (sourceURI.getPath().endsWith("/")) {
path = sourceURI.getPath().substring(0, sourceURI.getPath().length() - 1);
} else {
path = sourceURI.getPath();
}
workingDirName = path.substring(path.lastIndexOf(File.separator) + 1, path.length());
// tar working dir
// cd /Users/syodage/Desktop/temp/.. && tar -cvf path/workingDir.tar temp
String archiveTar = "archive.tar";
String resourceAbsTarFilePath = path + "/" + archiveTar;
CommandInfo commandInfo = new RawCommandInfo("cd " + path + " && tar -cvf " + resourceAbsTarFilePath + " ./* ");
// move tar to storage resource
remoteCluster.execute(commandInfo);
destinationURI = TaskUtils.getDestinationURI(taskContext, hostName, inputPath, archiveTar);
remoteCluster.scpThirdParty(resourceAbsTarFilePath, srcSession, destinationURI.getPath(), destSession, RemoteCluster.DIRECTION.FROM, true);
// delete tar in remote computer resource
commandInfo = new RawCommandInfo("rm " + resourceAbsTarFilePath);
remoteCluster.execute(commandInfo);
// untar file and delete tar in storage resource
String destPath = destinationURI.getPath();
String destParent = destPath.substring(0, destPath.lastIndexOf("/"));
String storageArchiveDir = "ARCHIVE";
commandInfo = new RawCommandInfo("cd " + destParent + " && mkdir " + storageArchiveDir + " && tar -xvf " + archiveTar + " -C " + storageArchiveDir + " && rm " + archiveTar + " && chmod 755 -R " + storageArchiveDir + "/*");
executeCommand(destSession, commandInfo, new StandardOutReader());
} catch (CredentialStoreException e) {
String msg = "Storage authentication issue, make sure you are passing valid credential token";
log.error(msg, e);
status.setState(TaskState.FAILED);
status.setReason(msg);
status.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
ErrorModel errorModel = new ErrorModel();
errorModel.setActualErrorMessage(e.getMessage());
errorModel.setUserFriendlyMessage(msg);
taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
} catch (URISyntaxException | GFacException e) {
String msg = "Error! Archive task failed";
log.error(msg, e);
status.setState(TaskState.FAILED);
status.setReason(msg);
status.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
ErrorModel errorModel = new ErrorModel();
errorModel.setActualErrorMessage(e.getMessage());
errorModel.setUserFriendlyMessage(msg);
taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
}
return status;
}
use of org.apache.airavata.model.task.DataStagingTaskModel in project airavata by apache.
the class JobResourceTest method setUp.
@Before
public void setUp() throws Exception {
super.setUp();
Timestamp creationTime = new Timestamp(new Date().getTime());
experimentResource = (ExperimentResource) getGatewayResource().create(ResourceType.EXPERIMENT);
experimentResource.setExperimentId(experimentID);
experimentResource.setExperimentName(experimentID);
experimentResource.setUserName(getWorkerResource().getUser());
experimentResource.setProjectId(getProjectResource().getId());
experimentResource.setCreationTime(creationTime);
experimentResource.save();
processResource = (ProcessResource) experimentResource.create(ResourceType.PROCESS);
processResource.setProcessId(processId);
processResource.setExperimentId(experimentID);
processResource.setCreationTime(creationTime);
processResource.save();
taskResource = (TaskResource) processResource.create(ResourceType.TASK);
taskResource.setTaskId(taskId);
taskResource.setParentProcessId(processId);
taskResource.setTaskType(TaskTypes.DATA_STAGING.toString());
taskResource.setTaskDetail("task detail");
taskResource.setSubTaskModel(new DataStagingTaskModel("source", "destination", DataStageType.INPUT).toString().getBytes());
taskResource.save();
jobResource = (JobResource) processResource.create(ResourceType.JOB);
jobResource.setJobId(jobId);
jobResource.setProcessId(processId);
jobResource.setTaskId(taskId);
jobResource.setJobDescription("Job Description");
jobResource.setComputeResourceConsumed("computer-resource-host");
jobResource.setJobName("JobName");
jobResource.setWorkingDir("WorkingDir");
jobResource.save();
}
use of org.apache.airavata.model.task.DataStagingTaskModel in project airavata by apache.
the class TaskResourceTest method setUp.
@Before
public void setUp() throws Exception {
super.setUp();
Timestamp creationTime = new Timestamp(new Date().getTime());
experimentResource = (ExperimentResource) getGatewayResource().create(ResourceType.EXPERIMENT);
experimentResource.setExperimentId(experimentID);
experimentResource.setExperimentName(experimentID);
experimentResource.setUserName(getWorkerResource().getUser());
experimentResource.setProjectId(getProjectResource().getId());
experimentResource.setCreationTime(creationTime);
experimentResource.save();
processResource = (ProcessResource) experimentResource.create(ResourceType.PROCESS);
processResource.setProcessId(processId);
processResource.setExperimentId(experimentID);
processResource.setCreationTime(creationTime);
processResource.save();
taskResource = (TaskResource) processResource.create(ResourceType.TASK);
taskResource.setTaskId(taskId);
taskResource.setParentProcessId(processId);
taskResource.setTaskType(TaskTypes.DATA_STAGING.toString());
taskResource.setTaskDetail("task detail");
taskResource.setSubTaskModel(new DataStagingTaskModel("source", "destination", DataStageType.INPUT).toString().getBytes());
taskResource.save();
}
use of org.apache.airavata.model.task.DataStagingTaskModel in project airavata by apache.
the class GFacEngineImpl method getDataStagingTaskContext.
private TaskContext getDataStagingTaskContext(ProcessContext processContext, OutputDataObjectType processOutput) throws TException, TaskException, GFacException {
TaskContext taskCtx = new TaskContext();
taskCtx.setParentProcessContext(processContext);
// create new task model for this task
TaskModel taskModel = new TaskModel();
taskModel.setParentProcessId(processContext.getProcessId());
taskModel.setCreationTime(AiravataUtils.getCurrentTimestamp().getTime());
taskModel.setLastUpdateTime(taskModel.getCreationTime());
TaskStatus taskStatus = new TaskStatus(TaskState.CREATED);
taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
taskModel.setTaskStatuses(Arrays.asList(taskStatus));
taskModel.setTaskType(TaskTypes.DATA_STAGING);
// create data staging sub task model
String remoteOutputDir = processContext.getOutputDir();
remoteOutputDir = remoteOutputDir.endsWith("/") ? remoteOutputDir : remoteOutputDir + "/";
DataStagingTaskModel submodel = new DataStagingTaskModel();
ServerInfo serverInfo = processContext.getComputeResourceServerInfo();
URI source = null;
try {
source = new URI(processContext.getDataMovementProtocol().name(), serverInfo.getHost(), serverInfo.getUserName(), serverInfo.getPort(), remoteOutputDir + processOutput.getValue(), null, null);
} catch (URISyntaxException e) {
throw new TaskException("Error while constructing source file URI");
}
submodel.setSource(source.toString());
// We don't know destination location at this time, data staging task will set this.
// because destination is required field we set dummy destination
submodel.setDestination("dummy://temp/file/location");
taskModel.setSubTaskModel(ThriftUtils.serializeThriftObject(submodel));
taskCtx.setTaskModel(taskModel);
taskCtx.setProcessOutput(processOutput);
return taskCtx;
}
Aggregations