use of edu.pitt.dbmi.tetrad.db.entity.HpcAccount in project tetrad by cmu-phil.
the class HpcJobManager method removePendingHpcJob.
public synchronized void removePendingHpcJob(final HpcJobInfo hpcJobInfo) {
HpcAccount hpcAccount = hpcJobInfo.getHpcAccount();
LOGGER.debug("removedPendingHpcJob: connection: " + hpcAccount.getConnectionName());
LOGGER.debug("removedPendingHpcJob: algorithm: " + hpcJobInfo.getAlgoId());
LOGGER.debug("removedPendingHpcJob: status: " + hpcJobInfo.getStatus());
LOGGER.debug("removedPendingHpcJob: pid: " + hpcJobInfo.getPid());
Set<HpcJobInfo> hpcJobInfos = pendingHpcJobInfoMap.get(hpcAccount);
if (hpcJobInfos != null) {
for (HpcJobInfo jobInfo : hpcJobInfos) {
if (jobInfo.getId() == hpcJobInfo.getId()) {
// LOGGER.debug("removedPendingHpcJob: Found
// hpcJobInfo in the pendingHpcJobInfoMap & removed it!");
hpcJobInfos.remove(jobInfo);
}
}
if (hpcJobInfos.isEmpty()) {
pendingHpcJobInfoMap.remove(hpcAccount);
} else {
pendingHpcJobInfoMap.put(hpcAccount, hpcJobInfos);
}
}
}
use of edu.pitt.dbmi.tetrad.db.entity.HpcAccount in project tetrad by cmu-phil.
the class HpcJobPreProcessTask method run.
@Override
public void run() {
TetradDesktop desktop = (TetradDesktop) DesktopController.getInstance();
while (desktop == null) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
final HpcAccountManager hpcAccountManager = desktop.getHpcAccountManager();
final HpcJobManager hpcJobManager = desktop.getHpcJobManager();
HpcAccount hpcAccount = hpcJobInfo.getHpcAccount();
AlgorithmParamRequest algorParamReq = hpcJobInfo.getAlgorithmParamRequest();
String datasetPath = algorParamReq.getDatasetPath();
String priorKnowledgePath = algorParamReq.getPriorKnowledgePath();
try {
HpcAccountService hpcAccountService = hpcJobManager.getHpcAccountService(hpcAccount);
HpcJobLog hpcJobLog = hpcJobManager.getHpcJobLog(hpcJobInfo);
String log = "Initiated connection to " + hpcAccount.getConnectionName();
LOGGER.debug(log);
hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
log = "datasetPath: " + datasetPath;
System.out.println(log);
Path file = Paths.get(datasetPath);
// Get file's MD5 hash and use it as its identifier
String md5 = algorParamReq.getDatasetMd5();
// Initiate data uploading progress
hpcJobManager.updateUploadFileProgress(datasetPath, 0);
Path prior = null;
if (priorKnowledgePath != null) {
log = "priorKnowledgePath: " + priorKnowledgePath;
LOGGER.debug(log);
prior = Paths.get(priorKnowledgePath);
// Initiate prior knowledge uploading progress
hpcJobManager.updateUploadFileProgress(priorKnowledgePath, 0);
}
// Check if this dataset already exists with this md5 hash
RemoteDataFileService remoteDataService = hpcAccountService.getRemoteDataService();
DataFile dataFile = HpcAccountUtils.getRemoteDataFile(hpcAccountManager, remoteDataService, hpcAccount, md5);
DataUploadService dataUploadService = hpcAccountService.getDataUploadService();
// If not, upload the file
if (dataFile == null) {
log = "Started uploading " + file.getFileName().toString();
LOGGER.debug(log);
dataUploadService.startUpload(file, HpcAccountUtils.getJsonWebToken(hpcAccountManager, hpcAccount));
hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
int progress;
while ((progress = dataUploadService.getUploadJobStatus(file.toAbsolutePath().toString())) < 100) {
// System.out.println("Uploading "
// + file.toAbsolutePath().toString() + " Progress: "
// + progress + "%");
hpcJobManager.updateUploadFileProgress(datasetPath, progress);
Thread.sleep(10);
}
hpcJobManager.updateUploadFileProgress(datasetPath, progress);
log = "Finished uploading " + file.getFileName().toString();
LOGGER.debug(log);
hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
// Get remote datafile
dataFile = HpcAccountUtils.getRemoteDataFile(hpcAccountManager, remoteDataService, hpcAccount, md5);
HpcAccountUtils.summarizeDataset(remoteDataService, algorParamReq, dataFile.getId(), HpcAccountUtils.getJsonWebToken(hpcAccountManager, hpcAccount));
log = "Summarized " + file.getFileName().toString();
LOGGER.debug(log);
hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
} else {
log = "Skipped uploading " + file.getFileName().toString();
LOGGER.debug(log);
hpcJobManager.updateUploadFileProgress(datasetPath, -1);
hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
if (dataFile.getFileSummary().getVariableType() == null) {
HpcAccountUtils.summarizeDataset(remoteDataService, algorParamReq, dataFile.getId(), HpcAccountUtils.getJsonWebToken(hpcAccountManager, hpcAccount));
log = "Summarized " + file.getFileName().toString();
LOGGER.debug(log);
hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, "Summarized " + file.getFileName().toString());
}
}
DataFile priorKnowledgeFile = null;
// Prior Knowledge File
if (prior != null) {
// Get prior knowledge file Id
md5 = algorParamReq.getPriorKnowledgeMd5();
priorKnowledgeFile = HpcAccountUtils.getRemotePriorKnowledgeFile(hpcAccountManager, remoteDataService, hpcAccount, md5);
if (priorKnowledgeFile == null) {
// Upload prior knowledge file
dataUploadService.startUpload(prior, HpcAccountUtils.getJsonWebToken(hpcAccountManager, hpcAccount));
log = "Started uploading Prior Knowledge File";
LOGGER.debug(log);
hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
int progress;
while ((progress = dataUploadService.getUploadJobStatus(prior.toAbsolutePath().toString())) < 100) {
hpcJobManager.updateUploadFileProgress(priorKnowledgePath, progress);
Thread.sleep(10);
}
hpcJobManager.updateUploadFileProgress(priorKnowledgePath, progress);
priorKnowledgeFile = HpcAccountUtils.getRemotePriorKnowledgeFile(hpcAccountManager, remoteDataService, hpcAccount, md5);
log = "Finished uploading Prior Knowledge File";
LOGGER.debug(log);
hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
}
}
// Algorithm Job Preparation
edu.pitt.dbmi.ccd.rest.client.dto.algo.AlgorithmParamRequest paramRequest = new edu.pitt.dbmi.ccd.rest.client.dto.algo.AlgorithmParamRequest();
String algoId = hpcJobInfo.getAlgoId();
paramRequest.setAlgoId(algoId);
paramRequest.setDatasetFileId(dataFile.getId());
// Test
if (algorParamReq.getTestId() != null) {
paramRequest.setTestId(algorParamReq.getTestId());
}
// Score
if (algorParamReq.getScoreId() != null) {
paramRequest.setScoreId(algorParamReq.getScoreId());
}
Set<AlgoParameter> algorithmParameters = new HashSet<>();
for (AlgorithmParameter param : algorParamReq.getAlgorithmParameters()) {
algorithmParameters.add(new AlgoParameter(param.getParameter(), param.getValue()));
LOGGER.debug("AlgorithmParameter: " + param.getParameter() + " : " + param.getValue());
}
if (priorKnowledgeFile != null) {
paramRequest.setPriorKnowledgeFileId(priorKnowledgeFile.getId());
LOGGER.debug("priorKnowledgeFileId: " + priorKnowledgeFile.getId());
}
paramRequest.setAlgoParameters(algorithmParameters);
if (algorParamReq.getJvmOptions() != null) {
JvmOptions jvmOptions = new JvmOptions();
jvmOptions.setMaxHeapSize(algorParamReq.getJvmOptions().getMaxHeapSize());
paramRequest.setJvmOptions(jvmOptions);
}
Set<HpcParameter> hpcParameters = algorParamReq.getHpcParameters();
if (hpcParameters != null) {
Set<edu.pitt.dbmi.ccd.rest.client.dto.algo.HpcParameter> hpcParams = new HashSet<>();
for (HpcParameter param : hpcParameters) {
edu.pitt.dbmi.ccd.rest.client.dto.algo.HpcParameter hpcParam = new edu.pitt.dbmi.ccd.rest.client.dto.algo.HpcParameter();
hpcParam.setKey(param.getKey());
hpcParam.setValue(param.getValue());
hpcParams.add(hpcParam);
LOGGER.debug("HpcParameter: " + hpcParam.getKey() + " : " + hpcParam.getValue());
}
paramRequest.setHpcParameters(hpcParams);
}
// Submit a job
JobQueueService jobQueueService = hpcAccountService.getJobQueueService();
JobInfo jobInfo = jobQueueService.addToRemoteQueue(paramRequest, HpcAccountUtils.getJsonWebToken(hpcAccountManager, hpcAccount));
// Log the job submission
hpcJobInfo.setSubmittedTime(new Date(System.currentTimeMillis()));
// Submitted
hpcJobInfo.setStatus(0);
hpcJobInfo.setPid(jobInfo.getId());
hpcJobInfo.setResultFileName(jobInfo.getResultFileName());
hpcJobInfo.setResultJsonFileName(jobInfo.getResultJsonFileName());
hpcJobInfo.setErrorResultFileName(jobInfo.getErrorResultFileName());
hpcJobManager.updateHpcJobInfo(hpcJobInfo);
log = "Submitted job to " + hpcAccount.getConnectionName();
LOGGER.debug(log);
hpcJobManager.logHpcJobLogDetail(hpcJobLog, 0, log);
LOGGER.debug("HpcJobPreProcessTask: HpcJobInfo: id : " + hpcJobInfo.getId() + " : pid : " + hpcJobInfo.getPid() + " : " + hpcJobInfo.getAlgoId() + hpcJobInfo.getAlgorithmParamRequest().getTestId() == null ? "" : " : " + hpcJobInfo.getAlgorithmParamRequest().getTestId() + hpcJobInfo.getAlgorithmParamRequest().getScoreId() == null ? "" : " : " + hpcJobInfo.getAlgorithmParamRequest().getScoreId() + " : " + hpcJobInfo.getResultFileName());
hpcJobManager.addNewSubmittedHpcJob(hpcJobInfo);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
use of edu.pitt.dbmi.tetrad.db.entity.HpcAccount in project tetrad by cmu-phil.
the class LoadHpcGraphJsonAction method buildHpcJsonChooserComponent.
private JComponent buildHpcJsonChooserComponent(final TetradDesktop desktop) {
final HpcAccountManager hpcAccountManager = desktop.getHpcAccountManager();
final HpcJobManager hpcJobManager = desktop.getHpcJobManager();
// Get ComputingAccount from DB
final DefaultListModel<HpcAccount> listModel = new DefaultListModel<HpcAccount>();
for (HpcAccount account : hpcAccountManager.getHpcAccounts()) {
listModel.addElement(account);
}
// JSplitPane
final JSplitPane splitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT);
// Left pane -> JList (parent pane)
JPanel leftPanel = new JPanel(new BorderLayout());
// Right pane -> ComputingAccountResultList
final JPanel jsonResultListPanel = new JPanel(new BorderLayout());
int minWidth = 800;
int minHeight = 600;
int screenWidth = Toolkit.getDefaultToolkit().getScreenSize().width;
int screenHeight = Toolkit.getDefaultToolkit().getScreenSize().height;
int frameWidth = screenWidth * 3 / 4;
int frameHeight = screenHeight * 3 / 4;
final int paneWidth = minWidth > frameWidth ? minWidth : frameWidth;
final int paneHeight = minHeight > frameHeight ? minHeight : frameHeight;
// JTable
final Vector<String> columnNames = new Vector<>();
columnNames.addElement("Name");
columnNames.addElement("Created");
columnNames.addElement("Last Modified");
columnNames.addElement("Size");
Vector<Vector<String>> rowData = new Vector<>();
final DefaultTableModel tableModel = new LoadHpcGraphJsonTableModel(rowData, columnNames);
final JTable jsonResultTable = new JTable(tableModel);
jsonResultTable.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
// Resize table's column width
jsonResultTable.getColumnModel().getColumn(0).setPreferredWidth(paneWidth * 2 / 5);
jsonResultTable.getColumnModel().getColumn(1).setPreferredWidth(paneWidth * 2 / 15);
jsonResultTable.getColumnModel().getColumn(2).setPreferredWidth(paneWidth * 2 / 15);
jsonResultTable.getColumnModel().getColumn(3).setPreferredWidth(paneWidth * 2 / 15);
ListSelectionModel selectionModel = jsonResultTable.getSelectionModel();
selectionModel.addListSelectionListener(new ListSelectionListener() {
@Override
public void valueChanged(ListSelectionEvent e) {
int row = jsonResultTable.getSelectedRow();
if (row >= 0) {
DefaultTableModel model = (DefaultTableModel) jsonResultTable.getModel();
jsonFileName = (String) model.getValueAt(row, 0);
}
}
});
final JScrollPane scrollTablePane = new JScrollPane(jsonResultTable);
jsonResultListPanel.add(scrollTablePane, BorderLayout.CENTER);
splitPane.setLeftComponent(leftPanel);
splitPane.setRightComponent(jsonResultListPanel);
// Center Panel
final JList<HpcAccount> accountList = new JList<>(listModel);
accountList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
accountList.setLayoutOrientation(JList.VERTICAL);
accountList.setSelectedIndex(-1);
accountList.addListSelectionListener(new ListSelectionListener() {
@Override
public void valueChanged(ListSelectionEvent e) {
if (e.getValueIsAdjusting())
return;
int selectedIndex = ((JList<?>) e.getSource()).getSelectedIndex();
// Show or remove the json list
if (selectedIndex > -1) {
jsonFileName = null;
hpcAccount = listModel.get(selectedIndex);
TableColumnModel columnModel = jsonResultTable.getColumnModel();
List<Integer> columnWidthList = new ArrayList<>();
for (int i = 0; i < columnModel.getColumnCount(); i++) {
int width = columnModel.getColumn(i).getPreferredWidth();
columnWidthList.add(width);
}
jsonResultTable.clearSelection();
try {
HpcAccountService hpcAccountService = hpcJobManager.getHpcAccountService(hpcAccount);
ResultService resultService = hpcAccountService.getResultService();
Set<ResultFile> results = resultService.listAlgorithmResultFiles(HpcAccountUtils.getJsonWebToken(hpcAccountManager, hpcAccount));
Vector<Vector<String>> jsonFiles = new Vector<>();
for (ResultFile resultFile : results) {
if (resultFile.getName().endsWith(".json")) {
Vector<String> rowData = new Vector<>();
rowData.addElement(resultFile.getName());
rowData.addElement(FilePrint.fileTimestamp(resultFile.getCreationTime().getTime()));
rowData.addElement(FilePrint.fileTimestamp(resultFile.getLastModifiedTime().getTime()));
rowData.addElement(FilePrint.humanReadableSize(resultFile.getFileSize(), false));
jsonFiles.add(rowData);
}
}
tableModel.setDataVector(jsonFiles, columnNames);
} catch (Exception e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
// Resize table's column width
for (int i = 0; i < columnModel.getColumnCount(); i++) {
jsonResultTable.getColumnModel().getColumn(i).setPreferredWidth(columnWidthList.get(i).intValue());
}
}
}
});
// Left Panel
JScrollPane accountListScroller = new JScrollPane(accountList);
leftPanel.add(accountListScroller, BorderLayout.CENTER);
splitPane.setDividerLocation(paneWidth / 5);
accountListScroller.setPreferredSize(new Dimension(paneWidth / 5, paneHeight));
jsonResultListPanel.setPreferredSize(new Dimension(paneWidth * 4 / 5, paneHeight));
return splitPane;
}
use of edu.pitt.dbmi.tetrad.db.entity.HpcAccount in project tetrad by cmu-phil.
the class PendingHpcJobUpdaterTask method monitorDataUploadProgress.
private synchronized Set<HpcJobInfo> monitorDataUploadProgress(final Set<HpcJobInfo> pendingDisplayHpcJobInfoSet, final DefaultTableModel model) {
Map<Long, Integer> rowMap = new HashMap<>();
for (int row = 0; row < model.getRowCount(); row++) {
rowMap.put(Long.valueOf(model.getValueAt(row, HpcJobActivityEditor.ID_COLUMN).toString()), row);
}
Set<HpcJobInfo> notPendingJobAnymoreSet = new HashSet<>();
for (HpcJobInfo hpcJobInfo : pendingDisplayHpcJobInfoSet) {
int status = hpcJobInfo.getStatus();
if (!rowMap.containsKey(hpcJobInfo.getId())) {
continue;
}
int modelRow = rowMap.get(hpcJobInfo.getId());
// request was issued
if (status != -1) {
notPendingJobAnymoreSet.add(hpcJobInfo);
} else {
// Dataset uploading progress
AlgorithmParamRequest algorParamReq = hpcJobInfo.getAlgorithmParamRequest();
String datasetPath = algorParamReq.getDatasetPath();
int dataUploadProgress = hpcJobManager.getUploadFileProgress(datasetPath);
if (dataUploadProgress > -1 && dataUploadProgress < 100) {
model.setValueAt("" + dataUploadProgress + "%", modelRow, HpcJobActivityEditor.DATA_UPLOAD_COLUMN);
} else if (dataUploadProgress == -1) {
model.setValueAt("Skipped", modelRow, HpcJobActivityEditor.DATA_UPLOAD_COLUMN);
} else {
model.setValueAt("Done", modelRow, HpcJobActivityEditor.DATA_UPLOAD_COLUMN);
}
// Prior Knowledge uploading progress
String priorKnowledgePath = algorParamReq.getPriorKnowledgePath();
int priorKnowledgeUploadProgress = -1;
if (priorKnowledgePath != null) {
LOGGER.debug("priorKnowledgePath: " + priorKnowledgePath);
priorKnowledgeUploadProgress = hpcJobManager.getUploadFileProgress(priorKnowledgePath);
if (priorKnowledgeUploadProgress > -1 && priorKnowledgeUploadProgress < 100) {
model.setValueAt("" + priorKnowledgeUploadProgress + "%", modelRow, HpcJobActivityEditor.KNOWLEDGE_UPLOAD_COLUMN);
} else {
model.setValueAt("Done", modelRow, HpcJobActivityEditor.KNOWLEDGE_UPLOAD_COLUMN);
}
} else {
model.setValueAt("Skipped", modelRow, HpcJobActivityEditor.KNOWLEDGE_UPLOAD_COLUMN);
}
if (dataUploadProgress == 100 && (priorKnowledgeUploadProgress == -1 || priorKnowledgeUploadProgress == 100)) {
LOGGER.debug("HpcJobInfo Id: " + hpcJobInfo.getId() + " done with both uploading");
Map<HpcAccount, Set<HpcJobInfo>> pendingHpcJobInfoMap = hpcJobManager.getPendingHpcJobInfoMap();
Map<HpcAccount, Set<HpcJobInfo>> submittedHpcJobInfoMap = hpcJobManager.getSubmittedHpcJobInfoMap();
if (pendingHpcJobInfoMap != null) {
Set<HpcJobInfo> pendingJobSet = pendingHpcJobInfoMap.get(hpcJobInfo.getHpcAccount());
// Is the job still stuck in the pre-processed schedule
// task?
long id = -1;
for (HpcJobInfo pendingJob : pendingJobSet) {
if (pendingJob.getId() == hpcJobInfo.getId()) {
id = pendingJob.getId();
continue;
}
}
// The job is not in the pre-processed schedule task
if (id == -1 && submittedHpcJobInfoMap != null) {
Set<HpcJobInfo> submittedJobSet = submittedHpcJobInfoMap.get(hpcJobInfo.getHpcAccount());
// Is the job in the submitted schedule task?
for (HpcJobInfo submittedJob : submittedJobSet) {
if (submittedJob.getId() == hpcJobInfo.getId()) {
// Status
switch(submittedJob.getStatus()) {
case -1:
model.setValueAt("Pending", modelRow, HpcJobActivityEditor.STATUS_COLUMN);
break;
case 0:
model.setValueAt("Submitted", modelRow, HpcJobActivityEditor.STATUS_COLUMN);
break;
case 1:
model.setValueAt("Running", modelRow, HpcJobActivityEditor.STATUS_COLUMN);
break;
case 2:
model.setValueAt("Kill Request", modelRow, HpcJobActivityEditor.STATUS_COLUMN);
break;
}
// Submitted time
if (submittedJob.getSubmittedTime() != null) {
model.setValueAt(FilePrint.fileTimestamp(hpcJobInfo.getSubmittedTime().getTime()), modelRow, HpcJobActivityEditor.ACTIVE_SUBMITTED_COLUMN);
}
// Hpc Pid
model.setValueAt(submittedJob.getPid(), modelRow, HpcJobActivityEditor.ACTIVE_HPC_JOB_ID_COLUMN);
// last update
HpcJobLog hpcJobLog = hpcJobManager.getHpcJobLog(submittedJob);
model.setValueAt(FilePrint.fileTimestamp(hpcJobLog.getLastUpdatedTime().getTime()), modelRow, HpcJobActivityEditor.ACTIVE_LAST_UPDATED_COLUMN);
// Remove from the pending queue
notPendingJobAnymoreSet.add(submittedJob);
continue;
}
}
}
}
}
}
}
return notPendingJobAnymoreSet;
}
use of edu.pitt.dbmi.tetrad.db.entity.HpcAccount in project tetrad by cmu-phil.
the class SubmittedHpcJobUpdaterTask method monitorSubmittedJobStatus.
private synchronized Set<HpcJobInfo> monitorSubmittedJobStatus(final Set<HpcJobInfo> submittedDisplayHpcJobInfoSet, final DefaultTableModel model) {
Map<Long, Integer> rowMap = new HashMap<>();
for (int row = 0; row < model.getRowCount(); row++) {
rowMap.put(Long.valueOf(model.getValueAt(row, HpcJobActivityEditor.ID_COLUMN).toString()), row);
}
Set<HpcJobInfo> finishedJobSet = new HashSet<>();
for (HpcJobInfo hpcJobInfo : submittedDisplayHpcJobInfoSet) {
Long id = hpcJobInfo.getId();
if (!rowMap.containsKey(id)) {
// System.out.println("hpcJobInfo not found in rowMap");
continue;
}
int modelRow = rowMap.get(id);
Map<HpcAccount, Set<HpcJobInfo>> submittedHpcJobInfoMap = hpcJobManager.getSubmittedHpcJobInfoMap();
Set<HpcJobInfo> submittedJobSet = submittedHpcJobInfoMap.get(hpcJobInfo.getHpcAccount());
if (submittedJobSet != null) {
for (HpcJobInfo submittedJob : submittedJobSet) {
if (submittedJob.getId() == hpcJobInfo.getId()) {
hpcJobInfo = submittedJob;
// submittedHpcJobInfoMap id matched!");
continue;
}
}
}
int status = hpcJobInfo.getStatus();
// Status
switch(hpcJobInfo.getStatus()) {
case -1:
model.setValueAt("Pending", modelRow, HpcJobActivityEditor.STATUS_COLUMN);
break;
case 0:
model.setValueAt("Submitted", modelRow, HpcJobActivityEditor.STATUS_COLUMN);
break;
case 1:
model.setValueAt("Running", modelRow, HpcJobActivityEditor.STATUS_COLUMN);
break;
case 2:
model.setValueAt("Kill Request", modelRow, HpcJobActivityEditor.STATUS_COLUMN);
break;
}
// last update
HpcJobLog hpcJobLog = hpcJobManager.getHpcJobLog(hpcJobInfo);
model.setValueAt(FilePrint.fileTimestamp(hpcJobLog.getLastUpdatedTime().getTime()), modelRow, HpcJobActivityEditor.ACTIVE_LAST_UPDATED_COLUMN);
// was finished.
if (status > 2) {
finishedJobSet.add(hpcJobInfo);
}
}
return finishedJobSet;
}
Aggregations