use of edu.pitt.dbmi.tetrad.db.entity.HpcAccount in project tetrad by cmu-phil.
the class HpcJobActivityEditor method getActiveRowData.
private Vector<Vector<String>> getActiveRowData(final TetradDesktop desktop, final List<HpcAccount> exclusiveHpcAccounts) throws Exception {
final Vector<Vector<String>> activeRowData = new Vector<>();
final HpcJobManager hpcJobManager = desktop.getHpcJobManager();
Map<Long, HpcJobInfo> activeHpcJobInfoMap = null;
// Pending
Map<HpcAccount, Set<HpcJobInfo>> pendingHpcJobInfoMap = hpcJobManager.getPendingHpcJobInfoMap();
pendingDisplayHpcJobInfoSet.clear();
for (HpcAccount hpcAccount : pendingHpcJobInfoMap.keySet()) {
if (exclusiveHpcAccounts != null && !exclusiveHpcAccounts.contains(hpcAccount)) {
continue;
}
Set<HpcJobInfo> pendingHpcJobSet = pendingHpcJobInfoMap.get(hpcAccount);
for (HpcJobInfo hpcJobInfo : pendingHpcJobSet) {
// For monitoring purpose
pendingDisplayHpcJobInfoSet.add(hpcJobInfo);
if (activeHpcJobInfoMap == null) {
activeHpcJobInfoMap = new HashMap<>();
}
activeHpcJobInfoMap.put(hpcJobInfo.getId(), hpcJobInfo);
}
}
// Submitted
Map<HpcAccount, Set<HpcJobInfo>> submittedHpcJobInfoMap = hpcJobManager.getSubmittedHpcJobInfoMap();
submittedDisplayHpcJobInfoSet.clear();
for (HpcAccount hpcAccount : submittedHpcJobInfoMap.keySet()) {
if (exclusiveHpcAccounts != null && !exclusiveHpcAccounts.contains(hpcAccount)) {
continue;
}
Set<HpcJobInfo> submittedHpcJobSet = submittedHpcJobInfoMap.get(hpcAccount);
for (HpcJobInfo hpcJobInfo : submittedHpcJobSet) {
// For monitoring purpose
submittedDisplayHpcJobInfoSet.add(hpcJobInfo);
if (activeHpcJobInfoMap == null) {
activeHpcJobInfoMap = new HashMap<>();
}
activeHpcJobInfoMap.put(hpcJobInfo.getId(), hpcJobInfo);
}
}
if (activeHpcJobInfoMap != null) {
List<Long> activeJobIds = new ArrayList<>(activeHpcJobInfoMap.keySet());
Collections.sort(activeJobIds);
Collections.reverse(activeJobIds);
for (Long jobId : activeJobIds) {
final HpcJobInfo hpcJobInfo = activeHpcJobInfoMap.get(jobId);
Vector<String> rowData = new Vector<>();
HpcJobLog hpcJobLog = hpcJobManager.getHpcJobLog(hpcJobInfo);
// Local job id
rowData.add(hpcJobInfo.getId().toString());
int status = hpcJobInfo.getStatus();
switch(status) {
case -1:
rowData.add("Pending");
break;
case 0:
rowData.add("Submitted");
break;
case 1:
rowData.add("Running");
break;
case 2:
rowData.add("Kill Request");
break;
}
// Locally added time
rowData.add(FilePrint.fileTimestamp(hpcJobLog.getAddedTime().getTime()));
// HPC node name
HpcAccount hpcAccount = hpcJobInfo.getHpcAccount();
rowData.add(hpcAccount.getConnectionName());
// Algorithm
rowData.add(hpcJobInfo.getAlgoId());
// Dataset uploading progress
AlgorithmParamRequest algorParamReq = hpcJobInfo.getAlgorithmParamRequest();
String datasetPath = algorParamReq.getDatasetPath();
int progress = hpcJobManager.getUploadFileProgress(datasetPath);
if (progress > -1 && progress < 100) {
rowData.add("" + progress + "%");
} else {
rowData.add("Done");
}
// Prior Knowledge uploading progress
String priorKnowledgePath = algorParamReq.getPriorKnowledgePath();
if (priorKnowledgePath != null) {
progress = hpcJobManager.getUploadFileProgress(priorKnowledgePath);
if (progress > -1 && progress < 100) {
rowData.add("" + progress + "%");
} else {
rowData.add("Done");
}
} else {
rowData.add("Skipped");
}
if (status > -1) {
// Submitted time
rowData.add(FilePrint.fileTimestamp(hpcJobInfo.getSubmittedTime().getTime()));
// HPC job id
rowData.add(hpcJobInfo.getPid() != null ? "" + hpcJobInfo.getPid() : "");
} else {
rowData.add("");
rowData.add("");
}
// Last update time
rowData.add(FilePrint.fileTimestamp(hpcJobLog.getLastUpdatedTime().getTime()));
// Cancel job
rowData.add("Cancel");
activeRowData.add(rowData);
}
}
return activeRowData;
}
use of edu.pitt.dbmi.tetrad.db.entity.HpcAccount in project tetrad by cmu-phil.
the class HpcJobManager method submitNewHpcJobToQueue.
public synchronized void submitNewHpcJobToQueue(final HpcJobInfo hpcJobInfo, final GeneralAlgorithmEditor generalAlgorithmEditor) {
hpcJobInfoService.add(hpcJobInfo);
LOGGER.debug("hpcJobInfo: id: " + hpcJobInfo.getId());
HpcJobLog hpcJobLog = new HpcJobLog();
hpcJobLog.setAddedTime(new Date(System.currentTimeMillis()));
hpcJobLog.setHpcJobInfo(hpcJobInfo);
hpcJobLogService.update(hpcJobLog);
LOGGER.debug("HpcJobLog: id: " + hpcJobLog.getId());
HpcJobLogDetail hpcJobLogDetail = new HpcJobLogDetail();
hpcJobLogDetail.setAddedTime(new Date());
hpcJobLogDetail.setHpcJobLog(hpcJobLog);
// Pending
hpcJobLogDetail.setJobState(-1);
hpcJobLogDetail.setProgress("Pending");
hpcJobLogDetailService.add(hpcJobLogDetail);
LOGGER.debug("HpcJobLogDetail: id: " + hpcJobLogDetail.getId());
hpcGraphResultMap.put(hpcJobInfo, generalAlgorithmEditor);
// Put a new pre-process task into hpc job queue
HpcJobPreProcessTask preProcessTask = new HpcJobPreProcessTask(hpcJobInfo);
// Added a job to the pending list
final HpcAccount hpcAccount = hpcJobInfo.getHpcAccount();
Set<HpcJobInfo> hpcJobInfos = pendingHpcJobInfoMap.get(hpcAccount);
if (hpcJobInfos == null) {
hpcJobInfos = new LinkedHashSet<>();
}
hpcJobInfos.add(hpcJobInfo);
pendingHpcJobInfoMap.put(hpcAccount, hpcJobInfos);
executorService.execute(preProcessTask);
}
use of edu.pitt.dbmi.tetrad.db.entity.HpcAccount in project tetrad by cmu-phil.
the class HpcJobManager method requestHpcJobKilled.
public HpcJobInfo requestHpcJobKilled(final HpcJobInfo hpcJobInfo) throws Exception {
final HpcAccount hpcAccount = hpcJobInfo.getHpcAccount();
HpcAccountService hpcAccountService = getHpcAccountService(hpcAccount);
JobQueueService jobQueueService = hpcAccountService.getJobQueueService();
TetradDesktop desktop = (TetradDesktop) DesktopController.getInstance();
final HpcAccountManager hpcAccountManager = desktop.getHpcAccountManager();
JsonWebTokenManager jsonWebTokenManager = hpcAccountManager.getJsonWebTokenManager();
jobQueueService.requestJobKilled(hpcJobInfo.getPid(), jsonWebTokenManager.getJsonWebToken(hpcAccount));
JobInfo jobInfo = jobQueueService.getJobStatus(hpcJobInfo.getPid(), jsonWebTokenManager.getJsonWebToken(hpcAccount));
if (jobInfo != null) {
hpcJobInfo.setStatus(jobInfo.getStatus());
return hpcJobInfo;
}
return null;
}
use of edu.pitt.dbmi.tetrad.db.entity.HpcAccount in project tetrad by cmu-phil.
the class HpcJobManager method addNewSubmittedHpcJob.
public synchronized void addNewSubmittedHpcJob(final HpcJobInfo hpcJobInfo) {
HpcAccount hpcAccount = hpcJobInfo.getHpcAccount();
LOGGER.debug("addNewSubmittedHpcJob: connection: " + hpcAccount.getConnectionName());
LOGGER.debug("addNewSubmittedHpcJob: algorithm: " + hpcJobInfo.getAlgoId());
LOGGER.debug("addNewSubmittedHpcJob: status: " + hpcJobInfo.getStatus());
LOGGER.debug("addNewSubmittedHpcJob: " + "pid: " + hpcJobInfo.getPid());
Set<HpcJobInfo> hpcJobInfos = submittedHpcJobInfoMap.get(hpcAccount);
if (hpcJobInfos == null) {
hpcJobInfos = new LinkedHashSet<>();
}
hpcJobInfos.add(hpcJobInfo);
submittedHpcJobInfoMap.put(hpcAccount, hpcJobInfos);
removePendingHpcJob(hpcJobInfo);
}
use of edu.pitt.dbmi.tetrad.db.entity.HpcAccount in project tetrad by cmu-phil.
the class GeneralAlgorithmEditor method doSearch.
private void doSearch(final GeneralAlgorithmRunner runner) {
new WatchedProcess((Window) getTopLevelAncestor()) {
@Override
public void watch() {
AlgorithmModel algoModel = algorithmList.getSelectedValue();
if (algoModel != null) {
String title = String.format("Algorithm: %s", algoModel.getAlgorithm().getAnnotation().name());
algorithmGraphTitle.setText(title);
HpcAccount hpcAccount = null;
if (algoModel.getAlgorithm().getAnnotation().algoType() != AlgType.orient_pairwise && runner.getDataModelList().getModelList().size() == 1) {
String algoName = algoModel.getAlgorithm().getAnnotation().name();
hpcAccount = showRemoteComputingOptions(algoName);
}
if (hpcAccount == null) {
graphEditor.saveLayout();
runner.execute();
// Show graph
graphEditor.replace(runner.getGraphs());
graphEditor.validate();
firePropertyChange("modelChanged", null, null);
// Update the graphContainer
graphContainer.add(graphEditor);
changeCard(GRAPH_CARD);
} else {
try {
doRemoteCompute(runner, hpcAccount);
} catch (Exception exception) {
LOGGER.error("Unable to run algorithm.", exception);
}
}
}
}
};
}
Aggregations