use of edu.pitt.dbmi.tetrad.db.entity.AlgorithmParamRequest in project tetrad by cmu-phil.
the class HpcJobActivityEditor method getActiveRowData.
private Vector<Vector<String>> getActiveRowData(final TetradDesktop desktop, final List<HpcAccount> exclusiveHpcAccounts) throws Exception {
final Vector<Vector<String>> activeRowData = new Vector<>();
final HpcJobManager hpcJobManager = desktop.getHpcJobManager();
Map<Long, HpcJobInfo> activeHpcJobInfoMap = null;
// Pending
Map<HpcAccount, Set<HpcJobInfo>> pendingHpcJobInfoMap = hpcJobManager.getPendingHpcJobInfoMap();
pendingDisplayHpcJobInfoSet.clear();
for (HpcAccount hpcAccount : pendingHpcJobInfoMap.keySet()) {
if (exclusiveHpcAccounts != null && !exclusiveHpcAccounts.contains(hpcAccount)) {
continue;
}
Set<HpcJobInfo> pendingHpcJobSet = pendingHpcJobInfoMap.get(hpcAccount);
for (HpcJobInfo hpcJobInfo : pendingHpcJobSet) {
// For monitoring purpose
pendingDisplayHpcJobInfoSet.add(hpcJobInfo);
if (activeHpcJobInfoMap == null) {
activeHpcJobInfoMap = new HashMap<>();
}
activeHpcJobInfoMap.put(hpcJobInfo.getId(), hpcJobInfo);
}
}
// Submitted
Map<HpcAccount, Set<HpcJobInfo>> submittedHpcJobInfoMap = hpcJobManager.getSubmittedHpcJobInfoMap();
submittedDisplayHpcJobInfoSet.clear();
for (HpcAccount hpcAccount : submittedHpcJobInfoMap.keySet()) {
if (exclusiveHpcAccounts != null && !exclusiveHpcAccounts.contains(hpcAccount)) {
continue;
}
Set<HpcJobInfo> submittedHpcJobSet = submittedHpcJobInfoMap.get(hpcAccount);
for (HpcJobInfo hpcJobInfo : submittedHpcJobSet) {
// For monitoring purpose
submittedDisplayHpcJobInfoSet.add(hpcJobInfo);
if (activeHpcJobInfoMap == null) {
activeHpcJobInfoMap = new HashMap<>();
}
activeHpcJobInfoMap.put(hpcJobInfo.getId(), hpcJobInfo);
}
}
if (activeHpcJobInfoMap != null) {
List<Long> activeJobIds = new ArrayList<>(activeHpcJobInfoMap.keySet());
Collections.sort(activeJobIds);
Collections.reverse(activeJobIds);
for (Long jobId : activeJobIds) {
final HpcJobInfo hpcJobInfo = activeHpcJobInfoMap.get(jobId);
Vector<String> rowData = new Vector<>();
HpcJobLog hpcJobLog = hpcJobManager.getHpcJobLog(hpcJobInfo);
// Local job id
rowData.add(hpcJobInfo.getId().toString());
int status = hpcJobInfo.getStatus();
switch(status) {
case -1:
rowData.add("Pending");
break;
case 0:
rowData.add("Submitted");
break;
case 1:
rowData.add("Running");
break;
case 2:
rowData.add("Kill Request");
break;
}
// Locally added time
rowData.add(FilePrint.fileTimestamp(hpcJobLog.getAddedTime().getTime()));
// HPC node name
HpcAccount hpcAccount = hpcJobInfo.getHpcAccount();
rowData.add(hpcAccount.getConnectionName());
// Algorithm
rowData.add(hpcJobInfo.getAlgoId());
// Dataset uploading progress
AlgorithmParamRequest algorParamReq = hpcJobInfo.getAlgorithmParamRequest();
String datasetPath = algorParamReq.getDatasetPath();
int progress = hpcJobManager.getUploadFileProgress(datasetPath);
if (progress > -1 && progress < 100) {
rowData.add("" + progress + "%");
} else {
rowData.add("Done");
}
// Prior Knowledge uploading progress
String priorKnowledgePath = algorParamReq.getPriorKnowledgePath();
if (priorKnowledgePath != null) {
progress = hpcJobManager.getUploadFileProgress(priorKnowledgePath);
if (progress > -1 && progress < 100) {
rowData.add("" + progress + "%");
} else {
rowData.add("Done");
}
} else {
rowData.add("Skipped");
}
if (status > -1) {
// Submitted time
rowData.add(FilePrint.fileTimestamp(hpcJobInfo.getSubmittedTime().getTime()));
// HPC job id
rowData.add(hpcJobInfo.getPid() != null ? "" + hpcJobInfo.getPid() : "");
} else {
rowData.add("");
rowData.add("");
}
// Last update time
rowData.add(FilePrint.fileTimestamp(hpcJobLog.getLastUpdatedTime().getTime()));
// Cancel job
rowData.add("Cancel");
activeRowData.add(rowData);
}
}
return activeRowData;
}
use of edu.pitt.dbmi.tetrad.db.entity.AlgorithmParamRequest in project tetrad by cmu-phil.
the class GeneralAlgorithmEditor method doRemoteCompute.
private void doRemoteCompute(final GeneralAlgorithmRunner runner, final HpcAccount hpcAccount) throws Exception {
// **********************
// Show progress panel *
// **********************
Frame ancestor = (Frame) JOptionUtils.centeringComp().getTopLevelAncestor();
final JDialog progressDialog = new JDialog(ancestor, "HPC Job Submission's Progress...", false);
Dimension progressDim = new Dimension(500, 150);
JTextArea progressTextArea = new JTextArea();
progressTextArea.setPreferredSize(progressDim);
progressTextArea.setEditable(false);
JScrollPane progressScroller = new JScrollPane(progressTextArea);
progressScroller.setAlignmentX(LEFT_ALIGNMENT);
progressDialog.setLayout(new BorderLayout());
progressDialog.getContentPane().add(progressScroller, BorderLayout.CENTER);
progressDialog.pack();
Dimension screenDim = Toolkit.getDefaultToolkit().getScreenSize();
progressDialog.setLocation((screenDim.width - progressDim.width) / 2, (screenDim.height - progressDim.height) / 2);
progressDialog.setVisible(true);
int totalProcesses = 4;
String newline = "\n";
String tab = "\t";
int progressTextLength = 0;
DataModel dataModel = runner.getDataModel();
// 1. Generate temp file
Path file = null;
Path prior = null;
try {
// ****************************
// Data Preparation Progress *
// ****************************
String dataMessage = String.format("1/%1$d Data Preparation", totalProcesses);
progressTextArea.append(dataMessage);
progressTextArea.append(tab);
progressTextLength = progressTextArea.getText().length();
progressTextArea.append("Preparing...");
progressTextArea.updateUI();
file = Files.createTempFile("Tetrad-data-", ".txt");
// LOGGER.info(file.toAbsolutePath().toString());
List<String> tempLine = new ArrayList<>();
// Header
List<Node> variables = dataModel.getVariables();
if ((variables == null || variables.isEmpty()) && runner.getSourceGraph() != null) {
variables = runner.getSourceGraph().getNodes();
}
String vars = StringUtils.join(variables.toArray(), tab);
tempLine.add(vars);
// Data
DataSet dataSet = (DataSet) dataModel;
for (int i = 0; i < dataSet.getNumRows(); i++) {
String line = null;
for (int j = 0; j < dataSet.getNumColumns(); j++) {
String cell = null;
if (dataSet.isContinuous()) {
cell = String.valueOf(dataSet.getDouble(i, j));
} else {
cell = String.valueOf(dataSet.getInt(i, j));
}
if (line == null) {
line = cell;
} else {
line = line + "\t" + cell;
}
}
tempLine.add(line);
}
// for (String line : tempLine) {
// LOGGER.info(line);
// }
Files.write(file, tempLine);
// Get file's MD5 hash and use it as its identifier
String datasetMd5 = MessageDigestHash.computeMD5Hash(file);
progressTextArea.replaceRange("Done", progressTextLength, progressTextArea.getText().length());
progressTextArea.append(newline);
progressTextArea.updateUI();
// ***************************************
// Prior Knowledge Preparation Progress *
// ***************************************
String priorMessage = String.format("2/%1$d Prior Knowledge Preparation", totalProcesses);
progressTextArea.append(priorMessage);
progressTextArea.append(tab);
progressTextLength = progressTextArea.getText().length();
progressTextArea.append("Preparing...");
progressTextArea.updateUI();
// 2. Generate temp prior knowledge file
Knowledge2 knowledge = (Knowledge2) dataModel.getKnowledge();
if (knowledge != null && !knowledge.isEmpty()) {
prior = Files.createTempFile(file.getFileName().toString(), ".prior");
knowledge.saveKnowledge(Files.newBufferedWriter(prior));
progressTextArea.replaceRange("Done", progressTextLength, progressTextArea.getText().length());
progressTextArea.append(newline);
progressTextArea.updateUI();
} else {
progressTextArea.replaceRange("Skipped", progressTextLength, progressTextArea.getText().length());
progressTextArea.append(newline);
progressTextArea.updateUI();
}
// Get knowledge file's MD5 hash and use it as its identifier
String priorKnowledgeMd5 = null;
if (prior != null) {
priorKnowledgeMd5 = MessageDigestHash.computeMD5Hash(prior);
}
// *******************************************
// Algorithm Parameter Preparation Progress *
// *******************************************
String algorMessage = String.format("3/%1$d Algorithm Preparation", totalProcesses);
progressTextArea.append(algorMessage);
progressTextArea.append(tab);
progressTextLength = progressTextArea.getText().length();
progressTextArea.append("Preparing...");
progressTextArea.updateUI();
// 3.1 Algorithm Id, Independent Test Id, Score Id
AlgorithmModel algoModel = algorithmList.getSelectedValue();
String algoId = algoModel.getAlgorithm().getAnnotation().command();
// Test
String testId = null;
if (indTestComboBox.isEnabled()) {
IndependenceTestModel indTestModel = indTestComboBox.getItemAt(indTestComboBox.getSelectedIndex());
testId = indTestModel.getIndependenceTest().getAnnotation().command();
}
// Score
String scoreId = null;
if (scoreComboBox.isEnabled()) {
ScoreModel scoreModel = scoreComboBox.getItemAt(scoreComboBox.getSelectedIndex());
scoreId = scoreModel.getScore().getAnnotation().command();
}
// 3.2 Parameters
AlgorithmParamRequest algorithmParamRequest = new AlgorithmParamRequest();
// Test and score
algorithmParamRequest.setTestId(testId);
algorithmParamRequest.setScoreId(scoreId);
// Dataset and Prior paths
String datasetPath = file.toAbsolutePath().toString();
LOGGER.info(datasetPath);
algorithmParamRequest.setDatasetPath(datasetPath);
algorithmParamRequest.setDatasetMd5(datasetMd5);
if (prior != null) {
String priorKnowledgePath = prior.toAbsolutePath().toString();
LOGGER.info(priorKnowledgePath);
algorithmParamRequest.setPriorKnowledgePath(priorKnowledgePath);
algorithmParamRequest.setPriorKnowledgeMd5(priorKnowledgeMd5);
}
// VariableType
if (dataModel.isContinuous()) {
algorithmParamRequest.setVariableType("continuous");
} else if (dataModel.isDiscrete()) {
algorithmParamRequest.setVariableType("discrete");
} else {
algorithmParamRequest.setVariableType("mixed");
}
// FileDelimiter
// Pre-determined
String fileDelimiter = "tab";
algorithmParamRequest.setFileDelimiter(fileDelimiter);
Set<AlgorithmParameter> AlgorithmParameters = new HashSet<>();
Parameters parameters = runner.getParameters();
List<String> parameterNames = runner.getAlgorithm().getParameters();
for (String parameter : parameterNames) {
String value = parameters.get(parameter).toString();
LOGGER.info("parameter: " + parameter + "\tvalue: " + value);
if (value != null) {
AlgorithmParameter algorParam = new AlgorithmParameter();
algorParam.setParameter(parameter);
algorParam.setValue(value);
AlgorithmParameters.add(algorParam);
}
}
algorithmParamRequest.setAlgorithmParameters(AlgorithmParameters);
String maxHeapSize = null;
do {
maxHeapSize = JOptionPane.showInputDialog(progressDialog, "Enter Your Request Java Max Heap Size (GB):", "5");
} while (maxHeapSize != null && !StringUtils.isNumeric(maxHeapSize));
if (maxHeapSize != null) {
JvmOptions jvmOptions = new JvmOptions();
jvmOptions.setMaxHeapSize(Integer.parseInt(maxHeapSize));
algorithmParamRequest.setJvmOptions(jvmOptions);
}
// Hpc parameters
final HpcAccountManager hpcAccountManager = desktop.getHpcAccountManager();
JsonWebToken jsonWebToken = HpcAccountUtils.getJsonWebToken(hpcAccountManager, hpcAccount);
if (jsonWebToken.getWallTime() != null) {
// User allowed to customize the job's wall time
String[] wallTime = jsonWebToken.getWallTime();
Object userwallTime = JOptionPane.showInputDialog(progressDialog, "Wall Time:", "Choose Your Wall Time (in Hour)", JOptionPane.QUESTION_MESSAGE, null, wallTime, wallTime[0]);
if (wallTime != null && userwallTime != null) {
HpcParameter hpcParameter = new HpcParameter();
hpcParameter.setKey("walltime");
hpcParameter.setValue(userwallTime.toString());
LOGGER.info("walltime: " + userwallTime.toString());
Set<HpcParameter> hpcParameters = new HashSet<>();
hpcParameters.add(hpcParameter);
algorithmParamRequest.setHpcParameters(hpcParameters);
}
}
progressTextArea.replaceRange("Done", progressTextLength, progressTextArea.getText().length());
progressTextArea.append(newline);
progressTextArea.updateUI();
// ********************************
// Adding HPC Job Queue Progress *
// ********************************
String dbMessage = String.format("4/%1$d HPC Job Queue Submission", totalProcesses);
progressTextArea.append(dbMessage);
progressTextArea.append(tab);
progressTextLength = progressTextArea.getText().length();
progressTextArea.append("Preparing...");
progressTextArea.updateUI();
HpcJobManager hpcJobManager = desktop.getHpcJobManager();
// 4.1 Save HpcJobInfo
hpcJobInfo = new HpcJobInfo();
hpcJobInfo.setAlgoId(algoId);
hpcJobInfo.setAlgorithmParamRequest(algorithmParamRequest);
hpcJobInfo.setStatus(-1);
hpcJobInfo.setHpcAccount(hpcAccount);
hpcJobManager.submitNewHpcJobToQueue(hpcJobInfo, this);
progressTextArea.replaceRange("Done", progressTextLength, progressTextArea.getText().length());
progressTextArea.append(newline);
progressTextArea.updateUI();
this.jsonResult = null;
JOptionPane.showMessageDialog(ancestor, "The " + hpcJobInfo.getAlgoId() + " job on the " + hpcJobInfo.getHpcAccount().getConnectionName() + " node is in the queue successfully!");
} catch (IOException exception) {
LOGGER.error("", exception);
} finally {
progressDialog.setVisible(false);
progressDialog.dispose();
}
(new HpcJobActivityAction("")).actionPerformed(null);
}
use of edu.pitt.dbmi.tetrad.db.entity.AlgorithmParamRequest in project tetrad by cmu-phil.
the class HpcJobPreProcessTask method run.
@Override
public void run() {
TetradDesktop desktop = (TetradDesktop) DesktopController.getInstance();
while (desktop == null) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
final HpcAccountManager hpcAccountManager = desktop.getHpcAccountManager();
final HpcJobManager hpcJobManager = desktop.getHpcJobManager();
HpcAccount hpcAccount = hpcJobInfo.getHpcAccount();
AlgorithmParamRequest algorParamReq = hpcJobInfo.getAlgorithmParamRequest();
String datasetPath = algorParamReq.getDatasetPath();
String priorKnowledgePath = algorParamReq.getPriorKnowledgePath();
try {
HpcAccountService hpcAccountService = hpcJobManager.getHpcAccountService(hpcAccount);
HpcJobLog hpcJobLog = hpcJobManager.getHpcJobLog(hpcJobInfo);
String log = "Initiated connection to " + hpcAccount.getConnectionName();
LOGGER.debug(log);
hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
log = "datasetPath: " + datasetPath;
System.out.println(log);
Path file = Paths.get(datasetPath);
// Get file's MD5 hash and use it as its identifier
String md5 = algorParamReq.getDatasetMd5();
// Initiate data uploading progress
hpcJobManager.updateUploadFileProgress(datasetPath, 0);
Path prior = null;
if (priorKnowledgePath != null) {
log = "priorKnowledgePath: " + priorKnowledgePath;
LOGGER.debug(log);
prior = Paths.get(priorKnowledgePath);
// Initiate prior knowledge uploading progress
hpcJobManager.updateUploadFileProgress(priorKnowledgePath, 0);
}
// Check if this dataset already exists with this md5 hash
RemoteDataFileService remoteDataService = hpcAccountService.getRemoteDataService();
DataFile dataFile = HpcAccountUtils.getRemoteDataFile(hpcAccountManager, remoteDataService, hpcAccount, md5);
DataUploadService dataUploadService = hpcAccountService.getDataUploadService();
// If not, upload the file
if (dataFile == null) {
log = "Started uploading " + file.getFileName().toString();
LOGGER.debug(log);
dataUploadService.startUpload(file, HpcAccountUtils.getJsonWebToken(hpcAccountManager, hpcAccount));
hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
int progress;
while ((progress = dataUploadService.getUploadJobStatus(file.toAbsolutePath().toString())) < 100) {
// System.out.println("Uploading "
// + file.toAbsolutePath().toString() + " Progress: "
// + progress + "%");
hpcJobManager.updateUploadFileProgress(datasetPath, progress);
Thread.sleep(10);
}
hpcJobManager.updateUploadFileProgress(datasetPath, progress);
log = "Finished uploading " + file.getFileName().toString();
LOGGER.debug(log);
hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
// Get remote datafile
dataFile = HpcAccountUtils.getRemoteDataFile(hpcAccountManager, remoteDataService, hpcAccount, md5);
HpcAccountUtils.summarizeDataset(remoteDataService, algorParamReq, dataFile.getId(), HpcAccountUtils.getJsonWebToken(hpcAccountManager, hpcAccount));
log = "Summarized " + file.getFileName().toString();
LOGGER.debug(log);
hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
} else {
log = "Skipped uploading " + file.getFileName().toString();
LOGGER.debug(log);
hpcJobManager.updateUploadFileProgress(datasetPath, -1);
hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
if (dataFile.getFileSummary().getVariableType() == null) {
HpcAccountUtils.summarizeDataset(remoteDataService, algorParamReq, dataFile.getId(), HpcAccountUtils.getJsonWebToken(hpcAccountManager, hpcAccount));
log = "Summarized " + file.getFileName().toString();
LOGGER.debug(log);
hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, "Summarized " + file.getFileName().toString());
}
}
DataFile priorKnowledgeFile = null;
// Prior Knowledge File
if (prior != null) {
// Get prior knowledge file Id
md5 = algorParamReq.getPriorKnowledgeMd5();
priorKnowledgeFile = HpcAccountUtils.getRemotePriorKnowledgeFile(hpcAccountManager, remoteDataService, hpcAccount, md5);
if (priorKnowledgeFile == null) {
// Upload prior knowledge file
dataUploadService.startUpload(prior, HpcAccountUtils.getJsonWebToken(hpcAccountManager, hpcAccount));
log = "Started uploading Prior Knowledge File";
LOGGER.debug(log);
hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
int progress;
while ((progress = dataUploadService.getUploadJobStatus(prior.toAbsolutePath().toString())) < 100) {
hpcJobManager.updateUploadFileProgress(priorKnowledgePath, progress);
Thread.sleep(10);
}
hpcJobManager.updateUploadFileProgress(priorKnowledgePath, progress);
priorKnowledgeFile = HpcAccountUtils.getRemotePriorKnowledgeFile(hpcAccountManager, remoteDataService, hpcAccount, md5);
log = "Finished uploading Prior Knowledge File";
LOGGER.debug(log);
hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
}
}
// Algorithm Job Preparation
edu.pitt.dbmi.ccd.rest.client.dto.algo.AlgorithmParamRequest paramRequest = new edu.pitt.dbmi.ccd.rest.client.dto.algo.AlgorithmParamRequest();
String algoId = hpcJobInfo.getAlgoId();
paramRequest.setAlgoId(algoId);
paramRequest.setDatasetFileId(dataFile.getId());
// Test
if (algorParamReq.getTestId() != null) {
paramRequest.setTestId(algorParamReq.getTestId());
}
// Score
if (algorParamReq.getScoreId() != null) {
paramRequest.setScoreId(algorParamReq.getScoreId());
}
Set<AlgoParameter> algorithmParameters = new HashSet<>();
for (AlgorithmParameter param : algorParamReq.getAlgorithmParameters()) {
algorithmParameters.add(new AlgoParameter(param.getParameter(), param.getValue()));
LOGGER.debug("AlgorithmParameter: " + param.getParameter() + " : " + param.getValue());
}
if (priorKnowledgeFile != null) {
paramRequest.setPriorKnowledgeFileId(priorKnowledgeFile.getId());
LOGGER.debug("priorKnowledgeFileId: " + priorKnowledgeFile.getId());
}
paramRequest.setAlgoParameters(algorithmParameters);
if (algorParamReq.getJvmOptions() != null) {
JvmOptions jvmOptions = new JvmOptions();
jvmOptions.setMaxHeapSize(algorParamReq.getJvmOptions().getMaxHeapSize());
paramRequest.setJvmOptions(jvmOptions);
}
Set<HpcParameter> hpcParameters = algorParamReq.getHpcParameters();
if (hpcParameters != null) {
Set<edu.pitt.dbmi.ccd.rest.client.dto.algo.HpcParameter> hpcParams = new HashSet<>();
for (HpcParameter param : hpcParameters) {
edu.pitt.dbmi.ccd.rest.client.dto.algo.HpcParameter hpcParam = new edu.pitt.dbmi.ccd.rest.client.dto.algo.HpcParameter();
hpcParam.setKey(param.getKey());
hpcParam.setValue(param.getValue());
hpcParams.add(hpcParam);
LOGGER.debug("HpcParameter: " + hpcParam.getKey() + " : " + hpcParam.getValue());
}
paramRequest.setHpcParameters(hpcParams);
}
// Submit a job
JobQueueService jobQueueService = hpcAccountService.getJobQueueService();
JobInfo jobInfo = jobQueueService.addToRemoteQueue(paramRequest, HpcAccountUtils.getJsonWebToken(hpcAccountManager, hpcAccount));
// Log the job submission
hpcJobInfo.setSubmittedTime(new Date(System.currentTimeMillis()));
// Submitted
hpcJobInfo.setStatus(0);
hpcJobInfo.setPid(jobInfo.getId());
hpcJobInfo.setResultFileName(jobInfo.getResultFileName());
hpcJobInfo.setResultJsonFileName(jobInfo.getResultJsonFileName());
hpcJobInfo.setErrorResultFileName(jobInfo.getErrorResultFileName());
hpcJobManager.updateHpcJobInfo(hpcJobInfo);
log = "Submitted job to " + hpcAccount.getConnectionName();
LOGGER.debug(log);
hpcJobManager.logHpcJobLogDetail(hpcJobLog, 0, log);
LOGGER.debug("HpcJobPreProcessTask: HpcJobInfo: id : " + hpcJobInfo.getId() + " : pid : " + hpcJobInfo.getPid() + " : " + hpcJobInfo.getAlgoId() + hpcJobInfo.getAlgorithmParamRequest().getTestId() == null ? "" : " : " + hpcJobInfo.getAlgorithmParamRequest().getTestId() + hpcJobInfo.getAlgorithmParamRequest().getScoreId() == null ? "" : " : " + hpcJobInfo.getAlgorithmParamRequest().getScoreId() + " : " + hpcJobInfo.getResultFileName());
hpcJobManager.addNewSubmittedHpcJob(hpcJobInfo);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
use of edu.pitt.dbmi.tetrad.db.entity.AlgorithmParamRequest in project tetrad by cmu-phil.
the class PendingHpcJobUpdaterTask method monitorDataUploadProgress.
private synchronized Set<HpcJobInfo> monitorDataUploadProgress(final Set<HpcJobInfo> pendingDisplayHpcJobInfoSet, final DefaultTableModel model) {
Map<Long, Integer> rowMap = new HashMap<>();
for (int row = 0; row < model.getRowCount(); row++) {
rowMap.put(Long.valueOf(model.getValueAt(row, HpcJobActivityEditor.ID_COLUMN).toString()), row);
}
Set<HpcJobInfo> notPendingJobAnymoreSet = new HashSet<>();
for (HpcJobInfo hpcJobInfo : pendingDisplayHpcJobInfoSet) {
int status = hpcJobInfo.getStatus();
if (!rowMap.containsKey(hpcJobInfo.getId())) {
continue;
}
int modelRow = rowMap.get(hpcJobInfo.getId());
// request was issued
if (status != -1) {
notPendingJobAnymoreSet.add(hpcJobInfo);
} else {
// Dataset uploading progress
AlgorithmParamRequest algorParamReq = hpcJobInfo.getAlgorithmParamRequest();
String datasetPath = algorParamReq.getDatasetPath();
int dataUploadProgress = hpcJobManager.getUploadFileProgress(datasetPath);
if (dataUploadProgress > -1 && dataUploadProgress < 100) {
model.setValueAt("" + dataUploadProgress + "%", modelRow, HpcJobActivityEditor.DATA_UPLOAD_COLUMN);
} else if (dataUploadProgress == -1) {
model.setValueAt("Skipped", modelRow, HpcJobActivityEditor.DATA_UPLOAD_COLUMN);
} else {
model.setValueAt("Done", modelRow, HpcJobActivityEditor.DATA_UPLOAD_COLUMN);
}
// Prior Knowledge uploading progress
String priorKnowledgePath = algorParamReq.getPriorKnowledgePath();
int priorKnowledgeUploadProgress = -1;
if (priorKnowledgePath != null) {
LOGGER.debug("priorKnowledgePath: " + priorKnowledgePath);
priorKnowledgeUploadProgress = hpcJobManager.getUploadFileProgress(priorKnowledgePath);
if (priorKnowledgeUploadProgress > -1 && priorKnowledgeUploadProgress < 100) {
model.setValueAt("" + priorKnowledgeUploadProgress + "%", modelRow, HpcJobActivityEditor.KNOWLEDGE_UPLOAD_COLUMN);
} else {
model.setValueAt("Done", modelRow, HpcJobActivityEditor.KNOWLEDGE_UPLOAD_COLUMN);
}
} else {
model.setValueAt("Skipped", modelRow, HpcJobActivityEditor.KNOWLEDGE_UPLOAD_COLUMN);
}
if (dataUploadProgress == 100 && (priorKnowledgeUploadProgress == -1 || priorKnowledgeUploadProgress == 100)) {
LOGGER.debug("HpcJobInfo Id: " + hpcJobInfo.getId() + " done with both uploading");
Map<HpcAccount, Set<HpcJobInfo>> pendingHpcJobInfoMap = hpcJobManager.getPendingHpcJobInfoMap();
Map<HpcAccount, Set<HpcJobInfo>> submittedHpcJobInfoMap = hpcJobManager.getSubmittedHpcJobInfoMap();
if (pendingHpcJobInfoMap != null) {
Set<HpcJobInfo> pendingJobSet = pendingHpcJobInfoMap.get(hpcJobInfo.getHpcAccount());
// Is the job still stuck in the pre-processed schedule
// task?
long id = -1;
for (HpcJobInfo pendingJob : pendingJobSet) {
if (pendingJob.getId() == hpcJobInfo.getId()) {
id = pendingJob.getId();
continue;
}
}
// The job is not in the pre-processed schedule task
if (id == -1 && submittedHpcJobInfoMap != null) {
Set<HpcJobInfo> submittedJobSet = submittedHpcJobInfoMap.get(hpcJobInfo.getHpcAccount());
// Is the job in the submitted schedule task?
for (HpcJobInfo submittedJob : submittedJobSet) {
if (submittedJob.getId() == hpcJobInfo.getId()) {
// Status
switch(submittedJob.getStatus()) {
case -1:
model.setValueAt("Pending", modelRow, HpcJobActivityEditor.STATUS_COLUMN);
break;
case 0:
model.setValueAt("Submitted", modelRow, HpcJobActivityEditor.STATUS_COLUMN);
break;
case 1:
model.setValueAt("Running", modelRow, HpcJobActivityEditor.STATUS_COLUMN);
break;
case 2:
model.setValueAt("Kill Request", modelRow, HpcJobActivityEditor.STATUS_COLUMN);
break;
}
// Submitted time
if (submittedJob.getSubmittedTime() != null) {
model.setValueAt(FilePrint.fileTimestamp(hpcJobInfo.getSubmittedTime().getTime()), modelRow, HpcJobActivityEditor.ACTIVE_SUBMITTED_COLUMN);
}
// Hpc Pid
model.setValueAt(submittedJob.getPid(), modelRow, HpcJobActivityEditor.ACTIVE_HPC_JOB_ID_COLUMN);
// last update
HpcJobLog hpcJobLog = hpcJobManager.getHpcJobLog(submittedJob);
model.setValueAt(FilePrint.fileTimestamp(hpcJobLog.getLastUpdatedTime().getTime()), modelRow, HpcJobActivityEditor.ACTIVE_LAST_UPDATED_COLUMN);
// Remove from the pending queue
notPendingJobAnymoreSet.add(submittedJob);
continue;
}
}
}
}
}
}
}
return notPendingJobAnymoreSet;
}
Aggregations