use of org.apache.airavata.gfac.core.authentication.AuthenticationInfo in project airavata by apache.
the class Factory method getDataMovementRemoteCluster.
public static RemoteCluster getDataMovementRemoteCluster(ProcessContext processContext) throws GFacException, AiravataException, CredentialStoreException {
String storageResourceId = processContext.getStorageResourceId();
DataMovementProtocol dataMovementProtocol = processContext.getDataMovementProtocol();
String key = new StringBuilder(processContext.getComputeResourceLoginUserName()).append(':').append(dataMovementProtocol.name()).append(':').append(storageResourceId).append(":").append(processContext.getStorageResourceCredentialToken()).toString();
RemoteCluster remoteCluster = remoteClusterMap.get(key);
if (remoteCluster == null) {
JobManagerConfiguration jobManagerConfiguration = getJobManagerConfiguration(processContext.getResourceJobManager());
if (dataMovementProtocol == DataMovementProtocol.LOCAL) {
remoteCluster = new LocalRemoteCluster(processContext.getStorageResourceServerInfo(), jobManagerConfiguration, null);
} else if (dataMovementProtocol == DataMovementProtocol.SCP) {
remoteCluster = new HPCRemoteCluster(processContext.getStorageResourceServerInfo(), jobManagerConfiguration, Factory.getStorageSSHKeyAuthentication(processContext));
} else {
throw new GFacException("No remote cluster implementation map to job data movement protocol " + dataMovementProtocol.name());
}
remoteClusterMap.put(key, remoteCluster);
} else {
AuthenticationInfo authentication = remoteCluster.getAuthentication();
if (authentication instanceof SSHKeyAuthentication) {
SSHKeyAuthentication sshKeyAuthentication = (SSHKeyAuthentication) authentication;
if (!sshKeyAuthentication.getUserName().equals(processContext.getStorageResourceLoginUserName())) {
JobManagerConfiguration jobManagerConfiguration = getJobManagerConfiguration(processContext.getResourceJobManager());
dataMovementProtocol = processContext.getDataMovementProtocol();
if (dataMovementProtocol == DataMovementProtocol.SCP) {
remoteCluster = new HPCRemoteCluster(processContext.getStorageResourceServerInfo(), jobManagerConfiguration, Factory.getStorageSSHKeyAuthentication(processContext));
}
}
}
}
return remoteCluster;
}
use of org.apache.airavata.gfac.core.authentication.AuthenticationInfo in project airavata by apache.
the class Factory method getJobSubmissionRemoteCluster.
/**
* Factory class manage reomete cluster map, this will solve too many connections/ sessions issues with cluster
* communications.
* @param processContext
* @return
* @throws GFacException
* @throws AppCatalogException
* @throws AiravataException
*/
public static RemoteCluster getJobSubmissionRemoteCluster(ProcessContext processContext) throws GFacException, AppCatalogException, AiravataException, CredentialStoreException {
String computeResourceId = processContext.getComputeResourceId();
JobSubmissionProtocol jobSubmissionProtocol = processContext.getJobSubmissionProtocol();
String key = new StringBuilder(processContext.getComputeResourceLoginUserName()).append(':').append(jobSubmissionProtocol.name()).append(':').append(computeResourceId).append(':').append(processContext.getComputeResourceCredentialToken()).toString();
RemoteCluster remoteCluster = remoteClusterMap.get(key);
if (remoteCluster == null) {
JobManagerConfiguration jobManagerConfiguration = getJobManagerConfiguration(processContext.getResourceJobManager());
if (jobSubmissionProtocol == JobSubmissionProtocol.LOCAL || jobSubmissionProtocol == JobSubmissionProtocol.LOCAL_FORK) {
remoteCluster = new LocalRemoteCluster(processContext.getComputeResourceServerInfo(), jobManagerConfiguration, null);
} else if (jobSubmissionProtocol == JobSubmissionProtocol.SSH || jobSubmissionProtocol == JobSubmissionProtocol.SSH_FORK || jobSubmissionProtocol == JobSubmissionProtocol.CLOUD) {
remoteCluster = new HPCRemoteCluster(processContext.getComputeResourceServerInfo(), jobManagerConfiguration, Factory.getComputerResourceSSHKeyAuthentication(processContext));
} else {
throw new GFacException("No remote cluster implementation map to job submission protocol " + jobSubmissionProtocol.name());
}
remoteClusterMap.put(key, remoteCluster);
} else {
AuthenticationInfo authentication = remoteCluster.getAuthentication();
if (authentication instanceof SSHKeyAuthentication) {
SSHKeyAuthentication sshKeyAuthentication = (SSHKeyAuthentication) authentication;
if (!sshKeyAuthentication.getUserName().equals(processContext.getComputeResourceLoginUserName())) {
JobManagerConfiguration jobManagerConfiguration = getJobManagerConfiguration(processContext.getResourceJobManager());
if (jobSubmissionProtocol == JobSubmissionProtocol.SSH || jobSubmissionProtocol == JobSubmissionProtocol.SSH_FORK) {
remoteCluster = new HPCRemoteCluster(processContext.getComputeResourceServerInfo(), jobManagerConfiguration, Factory.getComputerResourceSSHKeyAuthentication(processContext));
}
}
}
}
return remoteCluster;
}
use of org.apache.airavata.gfac.core.authentication.AuthenticationInfo in project airavata by apache.
the class SCPDataStageTask method execute.
@Override
public TaskStatus execute(TaskContext taskContext) {
TaskStatus status = new TaskStatus(TaskState.EXECUTING);
AuthenticationInfo authenticationInfo = null;
DataStagingTaskModel subTaskModel = null;
String localDataDir = null;
ProcessContext processContext = taskContext.getParentProcessContext();
ProcessState processState = processContext.getProcessState();
try {
subTaskModel = ((DataStagingTaskModel) taskContext.getSubTaskModel());
if (processState == ProcessState.OUTPUT_DATA_STAGING) {
OutputDataObjectType processOutput = taskContext.getProcessOutput();
if (processOutput != null && processOutput.getValue() == null) {
log.error("expId: {}, processId:{}, taskId: {}:- Couldn't stage file {} , file name shouldn't be null", taskContext.getExperimentId(), taskContext.getProcessId(), taskContext.getTaskId(), processOutput.getName());
status = new TaskStatus(TaskState.FAILED);
if (processOutput.isIsRequired()) {
status.setReason("File name is null, but this output's isRequired bit is not set");
} else {
status.setReason("File name is null");
}
return status;
}
} else if (processState == ProcessState.INPUT_DATA_STAGING) {
InputDataObjectType processInput = taskContext.getProcessInput();
if (processInput != null && processInput.getValue() == null) {
log.error("expId: {}, processId:{}, taskId: {}:- Couldn't stage file {} , file name shouldn't be null", taskContext.getExperimentId(), taskContext.getProcessId(), taskContext.getTaskId(), processInput.getName());
status = new TaskStatus(TaskState.FAILED);
if (processInput.isIsRequired()) {
status.setReason("File name is null, but this input's isRequired bit is not set");
} else {
status.setReason("File name is null");
}
return status;
}
} else {
status.setState(TaskState.FAILED);
status.setReason("Invalid task invocation, Support " + ProcessState.INPUT_DATA_STAGING.name() + " and " + "" + ProcessState.OUTPUT_DATA_STAGING.name() + " process phases. found " + processState.name());
return status;
}
StorageResourceDescription storageResource = processContext.getStorageResource();
// StoragePreference storagePreference = taskContext.getParentProcessContext().getStoragePreference();
String hostName = null;
if (storageResource != null) {
hostName = storageResource.getHostName();
} else {
throw new GFacException("Storage Resource is null");
}
String inputPath = processContext.getStorageFileSystemRootLocation();
inputPath = (inputPath.endsWith(File.separator) ? inputPath : inputPath + File.separator);
// use rsync instead of scp if source and destination host and user name is same.
URI sourceURI = new URI(subTaskModel.getSource());
String fileName = sourceURI.getPath().substring(sourceURI.getPath().lastIndexOf(File.separator) + 1, sourceURI.getPath().length());
Session remoteSession = Factory.getSSHSession(Factory.getComputerResourceSSHKeyAuthentication(processContext), processContext.getComputeResourceServerInfo());
Session storageSession = Factory.getSSHSession(Factory.getStorageSSHKeyAuthentication(processContext), processContext.getStorageResourceServerInfo());
URI destinationURI = null;
if (subTaskModel.getDestination().startsWith("dummy")) {
destinationURI = TaskUtils.getDestinationURI(taskContext, hostName, inputPath, fileName);
subTaskModel.setDestination(destinationURI.toString());
} else {
destinationURI = new URI(subTaskModel.getDestination());
}
if (sourceURI.getHost().equalsIgnoreCase(destinationURI.getHost()) && sourceURI.getUserInfo().equalsIgnoreCase(destinationURI.getUserInfo())) {
localDataCopy(taskContext, sourceURI, destinationURI);
status.setState(TaskState.COMPLETED);
status.setReason("Locally copied file using 'cp' command ");
return status;
}
status = new TaskStatus(TaskState.COMPLETED);
// Wildcard for file name. Has to find the correct name.
if (fileName.contains("*")) {
String destParentPath = (new File(destinationURI.getPath())).getParentFile().getPath();
String sourceParentPath = (new File(sourceURI.getPath())).getParentFile().getPath();
List<String> fileNames = taskContext.getParentProcessContext().getDataMovementRemoteCluster().getFileNameFromExtension(fileName, sourceParentPath, remoteSession);
ExperimentCatalog experimentCatalog = processContext.getExperimentCatalog();
String experimentId = processContext.getExperimentId();
String processId = processContext.getProcessId();
OutputDataObjectType processOutput = taskContext.getProcessOutput();
for (int i = 0; i < fileNames.size(); i++) {
String temp = fileNames.get(i);
if (temp != null && temp != "") {
fileName = temp;
}
if (destParentPath.endsWith(File.separator)) {
destinationURI = new URI(destParentPath + fileName);
} else {
destinationURI = new URI(destParentPath + File.separator + fileName);
}
// Wildcard support is only enabled for output data staging
if (processState == ProcessState.OUTPUT_DATA_STAGING) {
processOutput.setName(fileName);
experimentCatalog.add(ExpCatChildDataType.EXPERIMENT_OUTPUT, Arrays.asList(processOutput), experimentId);
experimentCatalog.add(ExpCatChildDataType.PROCESS_OUTPUT, Arrays.asList(processOutput), processId);
taskContext.setProcessOutput(processOutput);
makeDir(taskContext, destinationURI);
// TODO - save updated subtask model with new destination
outputDataStaging(taskContext, remoteSession, sourceURI, storageSession, destinationURI);
status.setReason("Successfully staged output data");
}
}
if (processState == ProcessState.OUTPUT_DATA_STAGING) {
status.setReason("Successfully staged output data");
} else {
status.setReason("Wildcard support is only enabled for output data staging");
}
} else {
if (processState == ProcessState.INPUT_DATA_STAGING) {
inputDataStaging(taskContext, storageSession, sourceURI, remoteSession, destinationURI);
status.setReason("Successfully staged input data");
} else if (processState == ProcessState.OUTPUT_DATA_STAGING) {
makeDir(taskContext, destinationURI);
// TODO - save updated subtask model with new destination
outputDataStaging(taskContext, remoteSession, sourceURI, storageSession, destinationURI);
status.setReason("Successfully staged output data");
}
}
} catch (TException e) {
String msg = "Couldn't create subTask model thrift model";
log.error(msg, e);
status.setState(TaskState.FAILED);
status.setReason(msg);
ErrorModel errorModel = new ErrorModel();
errorModel.setActualErrorMessage(e.getMessage());
errorModel.setUserFriendlyMessage(msg);
taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
return status;
} catch (ApplicationSettingsException | FileNotFoundException e) {
String msg = "Failed while reading credentials";
log.error(msg, e);
status.setState(TaskState.FAILED);
status.setReason(msg);
ErrorModel errorModel = new ErrorModel();
errorModel.setActualErrorMessage(e.getMessage());
errorModel.setUserFriendlyMessage(msg);
taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
} catch (URISyntaxException e) {
String msg = "Source or destination uri is not correct source : " + subTaskModel.getSource() + ", " + "destination : " + subTaskModel.getDestination();
log.error(msg, e);
status.setState(TaskState.FAILED);
status.setReason(msg);
ErrorModel errorModel = new ErrorModel();
errorModel.setActualErrorMessage(e.getMessage());
errorModel.setUserFriendlyMessage(msg);
taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
} catch (CredentialStoreException e) {
String msg = "Storage authentication issue, could be invalid credential token";
log.error(msg, e);
status.setState(TaskState.FAILED);
status.setReason(msg);
ErrorModel errorModel = new ErrorModel();
errorModel.setActualErrorMessage(e.getMessage());
errorModel.setUserFriendlyMessage(msg);
taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
} catch (AiravataException e) {
String msg = "Error while creating ssh session with client";
log.error(msg, e);
status.setState(TaskState.FAILED);
status.setReason(msg);
ErrorModel errorModel = new ErrorModel();
errorModel.setActualErrorMessage(e.getMessage());
errorModel.setUserFriendlyMessage(msg);
taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
} catch (JSchException | IOException e) {
String msg = "Failed to do scp with client";
log.error(msg, e);
status.setState(TaskState.FAILED);
status.setReason(msg);
ErrorModel errorModel = new ErrorModel();
errorModel.setActualErrorMessage(e.getMessage());
errorModel.setUserFriendlyMessage(msg);
taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
} catch (RegistryException | GFacException e) {
String msg = "Data staging failed";
log.error(msg, e);
status.setState(TaskState.FAILED);
status.setReason(msg);
ErrorModel errorModel = new ErrorModel();
errorModel.setActualErrorMessage(e.getMessage());
errorModel.setUserFriendlyMessage(msg);
taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
}
return status;
}
use of org.apache.airavata.gfac.core.authentication.AuthenticationInfo in project airavata by apache.
the class ArchiveTask method execute.
@Override
public TaskStatus execute(TaskContext taskContext) {
// implement archive logic with jscp
TaskStatus status = new TaskStatus(TaskState.EXECUTING);
ProcessContext processContext = taskContext.getParentProcessContext();
RemoteCluster remoteCluster = processContext.getJobSubmissionRemoteCluster();
AuthenticationInfo authenticationInfo = null;
DataStagingTaskModel subTaskModel = null;
try {
subTaskModel = (DataStagingTaskModel) ThriftUtils.getSubTaskModel(taskContext.getTaskModel());
} catch (TException e) {
String msg = "Error! Deserialization issue with SubTask Model";
log.error(msg, e);
status.setState(TaskState.FAILED);
status.setReason(msg);
ErrorModel errorModel = new ErrorModel();
errorModel.setActualErrorMessage(e.getMessage());
errorModel.setUserFriendlyMessage(msg);
taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
return status;
}
try {
StorageResourceDescription storageResource = taskContext.getParentProcessContext().getStorageResource();
if (storageResource != null) {
hostName = storageResource.getHostName();
} else {
throw new GFacException("Storage Resource is null");
}
userName = processContext.getStorageResourceLoginUserName();
inputPath = processContext.getStorageFileSystemRootLocation();
inputPath = (inputPath.endsWith(File.separator) ? inputPath : inputPath + File.separator);
status = new TaskStatus(TaskState.COMPLETED);
Session srcSession = Factory.getSSHSession(Factory.getComputerResourceSSHKeyAuthentication(processContext), processContext.getComputeResourceServerInfo());
Session destSession = Factory.getSSHSession(Factory.getStorageSSHKeyAuthentication(processContext), processContext.getStorageResourceServerInfo());
URI sourceURI = new URI(subTaskModel.getSource());
URI destinationURI = null;
String workingDirName = null, path = null;
if (sourceURI.getPath().endsWith("/")) {
path = sourceURI.getPath().substring(0, sourceURI.getPath().length() - 1);
} else {
path = sourceURI.getPath();
}
workingDirName = path.substring(path.lastIndexOf(File.separator) + 1, path.length());
// tar working dir
// cd /Users/syodage/Desktop/temp/.. && tar -cvf path/workingDir.tar temp
String archiveTar = "archive.tar";
String resourceAbsTarFilePath = path + "/" + archiveTar;
CommandInfo commandInfo = new RawCommandInfo("cd " + path + " && tar -cvf " + resourceAbsTarFilePath + " ./* ");
// move tar to storage resource
remoteCluster.execute(commandInfo);
destinationURI = TaskUtils.getDestinationURI(taskContext, hostName, inputPath, archiveTar);
remoteCluster.scpThirdParty(resourceAbsTarFilePath, srcSession, destinationURI.getPath(), destSession, RemoteCluster.DIRECTION.FROM, true);
// delete tar in remote computer resource
commandInfo = new RawCommandInfo("rm " + resourceAbsTarFilePath);
remoteCluster.execute(commandInfo);
// untar file and delete tar in storage resource
String destPath = destinationURI.getPath();
String destParent = destPath.substring(0, destPath.lastIndexOf("/"));
String storageArchiveDir = "ARCHIVE";
commandInfo = new RawCommandInfo("cd " + destParent + " && mkdir " + storageArchiveDir + " && tar -xvf " + archiveTar + " -C " + storageArchiveDir + " && rm " + archiveTar + " && chmod 755 -R " + storageArchiveDir + "/*");
executeCommand(destSession, commandInfo, new StandardOutReader());
} catch (CredentialStoreException e) {
String msg = "Storage authentication issue, make sure you are passing valid credential token";
log.error(msg, e);
status.setState(TaskState.FAILED);
status.setReason(msg);
status.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
ErrorModel errorModel = new ErrorModel();
errorModel.setActualErrorMessage(e.getMessage());
errorModel.setUserFriendlyMessage(msg);
taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
} catch (URISyntaxException | GFacException e) {
String msg = "Error! Archive task failed";
log.error(msg, e);
status.setState(TaskState.FAILED);
status.setReason(msg);
status.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
ErrorModel errorModel = new ErrorModel();
errorModel.setActualErrorMessage(e.getMessage());
errorModel.setUserFriendlyMessage(msg);
taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
}
return status;
}
Aggregations