use of org.apache.airavata.gfac.core.GFacException in project airavata by apache.
the class Factory method getSSHSession.
public static synchronized Session getSSHSession(AuthenticationInfo authenticationInfo, ServerInfo serverInfo) throws GFacException {
if (authenticationInfo == null || serverInfo == null) {
throw new IllegalArgumentException("Can't create ssh session, argument should be valid (not null)");
}
SSHKeyAuthentication authentication;
if (authenticationInfo instanceof SSHKeyAuthentication) {
authentication = (SSHKeyAuthentication) authenticationInfo;
} else {
throw new GFacException("Support ssh key authentication only");
}
String key = buildKey(serverInfo);
Session session = sessionCache.getIfPresent(key);
boolean valid = isValidSession(session);
// FIXME - move following info logs to debug
if (valid) {
log.info("SSH Session validation succeeded, key :" + key);
valid = testChannelCreation(session);
if (valid) {
log.info("Channel creation test succeeded, key :" + key);
} else {
log.info("Channel creation test failed, key :" + key);
}
} else {
log.info("Session validation failed, key :" + key);
}
if (!valid) {
if (session != null) {
log.info("Reinitialize a new SSH session for :" + key);
} else {
log.info("Initialize a new SSH session for :" + key);
}
try {
JSch jSch = new JSch();
jSch.addIdentity(UUID.randomUUID().toString(), authentication.getPrivateKey(), authentication.getPublicKey(), authentication.getPassphrase().getBytes());
session = jSch.getSession(serverInfo.getUserName(), serverInfo.getHost(), serverInfo.getPort());
session.setUserInfo(new DefaultUserInfo(serverInfo.getUserName(), null, authentication.getPassphrase()));
if (authentication.getStrictHostKeyChecking().equals("yes")) {
jSch.setKnownHosts(authentication.getKnownHostsFilePath());
} else {
session.setConfig("StrictHostKeyChecking", "no");
}
// 0 connection timeout
session.connect();
sessionCache.put(key, session);
} catch (JSchException e) {
throw new GFacException("JSch initialization error ", e);
}
} else {
// FIXME - move following info log to debug
log.info("Reuse SSH session for :" + key);
}
return session;
}
use of org.apache.airavata.gfac.core.GFacException in project airavata by apache.
the class GFacEngineImpl method outputDataStaging.
/**
* @param taskContext
* @param recovery
* @return <code>true</code> if process execution interrupted , <code>false</code> otherwise.
* @throws GFacException
*/
private boolean outputDataStaging(TaskContext taskContext, boolean recovery, boolean isArchive) throws GFacException {
TaskStatus taskStatus = new TaskStatus(TaskState.EXECUTING);
taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
taskContext.setTaskStatus(taskStatus);
GFacUtils.saveAndPublishTaskStatus(taskContext);
ProcessContext processContext = taskContext.getParentProcessContext();
Task dMoveTask = null;
if (isArchive) {
dMoveTask = Factory.getArchiveTask();
} else {
dMoveTask = Factory.getDataMovementTask(processContext.getDataMovementProtocol());
}
if (null == dMoveTask) {
throw new GFacException("Unsupported security protocol, Airavata doesn't support " + processContext.getDataMovementProtocol() + " protocol yet.");
}
taskStatus = executeTask(taskContext, dMoveTask, recovery);
taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
taskContext.setTaskStatus(taskStatus);
GFacUtils.saveAndPublishTaskStatus(taskContext);
if (taskStatus.getState() == TaskState.FAILED) {
log.error("expId: {}, processId: {}, taskId: {} type: {},:- output staging failed, " + "reason:" + " {}", taskContext.getParentProcessContext().getExperimentId(), taskContext.getParentProcessContext().getProcessId(), taskContext.getTaskId(), dMoveTask.getType().name(), taskStatus.getReason());
String errorMsg = new StringBuilder("expId: ").append(processContext.getExperimentId()).append(", processId: ").append(processContext.getProcessId()).append(", taskId: ").append(taskContext.getTaskId()).append(", type: ").append(taskContext.getTaskType().name()).append(" :- Output staging failed. Reason: ").append(taskStatus.getReason()).toString();
ErrorModel errorModel = new ErrorModel();
errorModel.setUserFriendlyMessage("Error while staging output data");
errorModel.setActualErrorMessage(errorMsg);
GFacUtils.saveTaskError(taskContext, errorModel);
}
return false;
}
use of org.apache.airavata.gfac.core.GFacException in project airavata by apache.
the class GFacEngineImpl method executeCancel.
private void executeCancel(TaskContext taskContext, JobSubmissionTask jSTask) throws GFacException {
try {
JobStatus oldJobStatus = jSTask.cancel(taskContext);
// If Job was in Queued state when cancel command runs, then there won't be any email from this job.
ProcessContext pc = taskContext.getParentProcessContext();
JobMonitor monitorService = Factory.getMonitorService(pc.getMonitorMode());
monitorService.canceledJob(pc.getJobModel().getJobId());
} catch (TaskException e) {
throw new GFacException("Error while cancelling job");
} catch (AiravataException e) {
throw new GFacException("Error wile getting monitoring service");
}
}
use of org.apache.airavata.gfac.core.GFacException in project airavata by apache.
the class GFacEngineImpl method inputDataStaging.
private boolean inputDataStaging(TaskContext taskContext, boolean recover) throws GFacException, TException {
TaskStatus taskStatus = new TaskStatus(TaskState.EXECUTING);
taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
taskContext.setTaskStatus(taskStatus);
GFacUtils.saveAndPublishTaskStatus(taskContext);
ProcessContext processContext = taskContext.getParentProcessContext();
// handle URI_COLLECTION input data type
Task dMoveTask = Factory.getDataMovementTask(processContext.getDataMovementProtocol());
if (null == dMoveTask) {
throw new GFacException("Unsupported security protocol, Airavata doesn't support " + processContext.getDataMovementProtocol() + " protocol yet.");
}
if (taskContext.getProcessInput().getType() == DataType.URI_COLLECTION) {
String values = taskContext.getProcessInput().getValue();
String[] multiple_inputs = values.split(GFacConstants.MULTIPLE_INPUTS_SPLITTER);
DataStagingTaskModel subTaskModel = (DataStagingTaskModel) taskContext.getSubTaskModel();
for (String input : multiple_inputs) {
taskContext.getProcessInput().setValue(input);
subTaskModel.setSource(input);
taskStatus = executeTask(taskContext, dMoveTask, false);
}
taskContext.getProcessInput().setValue(values);
} else {
taskStatus = executeTask(taskContext, dMoveTask, false);
}
taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
taskContext.setTaskStatus(taskStatus);
GFacUtils.saveAndPublishTaskStatus(taskContext);
checkFailures(taskContext, taskStatus, dMoveTask);
return false;
}
use of org.apache.airavata.gfac.core.GFacException in project airavata by apache.
the class GFacEngineImpl method executeTaskListFrom.
private void executeTaskListFrom(ProcessContext processContext, String startingTaskId) throws GFacException {
// checkpoint
if (processContext.isInterrupted() && processContext.getProcessState() != ProcessState.MONITORING) {
GFacUtils.handleProcessInterrupt(processContext);
return;
}
List<TaskModel> taskList = processContext.getTaskList();
Map<String, TaskModel> taskMap = processContext.getTaskMap();
boolean fastForward = true;
for (String taskId : processContext.getTaskExecutionOrder()) {
if (fastForward) {
if (taskId.equalsIgnoreCase(startingTaskId)) {
fastForward = false;
} else {
continue;
}
}
TaskModel taskModel = taskMap.get(taskId);
processContext.setCurrentExecutingTaskModel(taskModel);
TaskTypes taskType = taskModel.getTaskType();
TaskContext taskContext = getTaskContext(processContext);
taskContext.setTaskModel(taskModel);
ProcessStatus status = null;
switch(taskType) {
case ENV_SETUP:
status = new ProcessStatus(ProcessState.CONFIGURING_WORKSPACE);
status.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
processContext.setProcessStatus(status);
GFacUtils.saveAndPublishProcessStatus(processContext);
// checkpoint
if (processContext.isInterrupted()) {
GFacUtils.handleProcessInterrupt(processContext);
return;
}
configureWorkspace(taskContext, processContext.isRecovery());
// checkpoint
if (processContext.isInterrupted()) {
GFacUtils.handleProcessInterrupt(processContext);
return;
}
break;
case DATA_STAGING:
try {
// checkpoint
if (processContext.isInterrupted()) {
GFacUtils.handleProcessInterrupt(processContext);
return;
}
DataStagingTaskModel subTaskModel = (DataStagingTaskModel) taskContext.getSubTaskModel();
DataStageType type = subTaskModel.getType();
switch(type) {
case INPUT:
status = new ProcessStatus(ProcessState.INPUT_DATA_STAGING);
status.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
processContext.setProcessStatus(status);
GFacUtils.saveAndPublishProcessStatus(processContext);
taskContext.setProcessInput(subTaskModel.getProcessInput());
inputDataStaging(taskContext, processContext.isRecovery());
break;
case OUPUT:
status = new ProcessStatus(ProcessState.OUTPUT_DATA_STAGING);
status.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
processContext.setProcessStatus(status);
GFacUtils.saveAndPublishProcessStatus(processContext);
taskContext.setProcessOutput(subTaskModel.getProcessOutput());
outputDataStaging(taskContext, processContext.isRecovery(), false);
break;
case ARCHIVE_OUTPUT:
status = new ProcessStatus(ProcessState.OUTPUT_DATA_STAGING);
status.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
processContext.setProcessStatus(status);
GFacUtils.saveAndPublishProcessStatus(processContext);
outputDataStaging(taskContext, processContext.isRecovery(), true);
break;
}
// checkpoint
if (processContext.isInterrupted()) {
GFacUtils.handleProcessInterrupt(processContext);
return;
}
} catch (TException e) {
throw new GFacException(e);
}
break;
case JOB_SUBMISSION:
// checkpoint
if (processContext.isInterrupted()) {
GFacUtils.handleProcessInterrupt(processContext);
return;
}
status = new ProcessStatus(ProcessState.EXECUTING);
status.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
processContext.setProcessStatus(status);
GFacUtils.saveAndPublishProcessStatus(processContext);
executeJobSubmission(taskContext, processContext.isRecovery());
// Don't put any checkpoint in between JobSubmission and Monitoring tasks
JobStatus jobStatus = processContext.getJobModel().getJobStatuses().get(0);
if (jobStatus != null && (jobStatus.getJobState() == JobState.SUBMITTED || jobStatus.getJobState() == JobState.QUEUED || jobStatus.getJobState() == JobState.ACTIVE)) {
List<OutputDataObjectType> processOutputs = processContext.getProcessModel().getProcessOutputs();
if (processOutputs != null && !processOutputs.isEmpty()) {
for (OutputDataObjectType output : processOutputs) {
try {
if (output.isOutputStreaming()) {
TaskModel streamingTaskModel = new TaskModel();
streamingTaskModel.setTaskType(TaskTypes.OUTPUT_FETCHING);
streamingTaskModel.setTaskStatuses(Arrays.asList(new TaskStatus(TaskState.CREATED)));
streamingTaskModel.setCreationTime(AiravataUtils.getCurrentTimestamp().getTime());
streamingTaskModel.setParentProcessId(processContext.getProcessId());
TaskContext streamingTaskContext = getTaskContext(processContext);
DataStagingTaskModel submodel = new DataStagingTaskModel();
submodel.setType(DataStageType.OUPUT);
submodel.setProcessOutput(output);
URI source = new URI(processContext.getDataMovementProtocol().name(), processContext.getComputeResourceLoginUserName(), processContext.getComputeResourceDescription().getHostName(), 22, processContext.getWorkingDir() + output.getValue(), null, null);
submodel.setSource(source.getPath());
submodel.setDestination("dummy://temp/file/location");
streamingTaskModel.setSubTaskModel(ThriftUtils.serializeThriftObject(submodel));
String streamTaskId = (String) processContext.getExperimentCatalog().add(ExpCatChildDataType.TASK, streamingTaskModel, processContext.getProcessId());
streamingTaskModel.setTaskId(streamTaskId);
streamingTaskContext.setTaskModel(streamingTaskModel);
executeDataStreaming(streamingTaskContext, processContext.isRecovery());
}
} catch (URISyntaxException | TException | RegistryException e) {
log.error("Error while streaming output " + output.getValue());
}
}
}
}
break;
case MONITORING:
status = new ProcessStatus(ProcessState.MONITORING);
status.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
processContext.setProcessStatus(status);
GFacUtils.saveAndPublishProcessStatus(processContext);
executeJobMonitoring(taskContext, processContext.isRecovery());
break;
case ENV_CLEANUP:
// TODO implement environment clean up task logic
break;
default:
throw new GFacException("Unsupported Task type");
}
if (processContext.isPauseTaskExecution()) {
// If any task put processContext to wait, the same task must continue processContext execution.
return;
}
}
processContext.setComplete(true);
}
Aggregations