use of org.apache.airavata.gfac.impl.task.utils.StreamData in project airavata by apache.
the class DataStreamingTask method execute.
@Override
public TaskStatus execute(TaskContext taskContext) {
ProcessState processState = taskContext.getParentProcessContext().getProcessState();
try {
TaskStatus status = new TaskStatus(TaskState.EXECUTING);
final DataStagingTaskModel subTaskModel = (DataStagingTaskModel) ThriftUtils.getSubTaskModel(taskContext.getTaskModel());
if (processState == ProcessState.OUTPUT_DATA_STAGING) {
OutputDataObjectType processOutput = taskContext.getProcessOutput();
if (processOutput != null && processOutput.getValue() == null) {
log.error("expId: {}, processId:{}, taskId: {}:- Couldn't stage file {} , file name shouldn't be null", taskContext.getExperimentId(), taskContext.getProcessId(), taskContext.getTaskId(), processOutput.getName());
status = new TaskStatus(TaskState.FAILED);
if (processOutput.isIsRequired()) {
status.setReason("File name is null, but this output's isRequired bit is not set");
} else {
status.setReason("File name is null");
}
return status;
}
if (processOutput != null) {
if (processOutput.isOutputStreaming()) {
// stream output periodically
ComputationalResourceSchedulingModel resourceSchedule = taskContext.getParentProcessContext().getProcessModel().getProcessResourceSchedule();
int wallTimeLimit = resourceSchedule.getWallTimeLimit();
if (wallTimeLimit > 10) {
int period = wallTimeLimit / 10;
Timer timer = new Timer();
StreamData streamData = new StreamData(userName, hostName, inputPath, taskContext, subTaskModel);
timer.schedule(streamData, 0, 1000 * 60 * period);
status.setState(TaskState.COMPLETED);
}
}
}
}
return null;
} catch (TException e) {
log.error("Error while creating data streaming task", e);
return null;
}
}
Aggregations