use of edu.pitt.dbmi.tetrad.db.entity.HpcJobInfo in project tetrad by cmu-phil.
the class HpcJobManager method addNewSubmittedHpcJob.
public synchronized void addNewSubmittedHpcJob(final HpcJobInfo hpcJobInfo) {
HpcAccount hpcAccount = hpcJobInfo.getHpcAccount();
LOGGER.debug("addNewSubmittedHpcJob: connection: " + hpcAccount.getConnectionName());
LOGGER.debug("addNewSubmittedHpcJob: algorithm: " + hpcJobInfo.getAlgoId());
LOGGER.debug("addNewSubmittedHpcJob: status: " + hpcJobInfo.getStatus());
LOGGER.debug("addNewSubmittedHpcJob: " + "pid: " + hpcJobInfo.getPid());
Set<HpcJobInfo> hpcJobInfos = submittedHpcJobInfoMap.get(hpcAccount);
if (hpcJobInfos == null) {
hpcJobInfos = new LinkedHashSet<>();
}
hpcJobInfos.add(hpcJobInfo);
submittedHpcJobInfoMap.put(hpcAccount, hpcJobInfos);
removePendingHpcJob(hpcJobInfo);
}
use of edu.pitt.dbmi.tetrad.db.entity.HpcJobInfo in project tetrad by cmu-phil.
the class GeneralAlgorithmEditor method doRemoteCompute.
private void doRemoteCompute(final GeneralAlgorithmRunner runner, final HpcAccount hpcAccount) throws Exception {
// **********************
// Show progress panel *
// **********************
Frame ancestor = (Frame) JOptionUtils.centeringComp().getTopLevelAncestor();
final JDialog progressDialog = new JDialog(ancestor, "HPC Job Submission's Progress...", false);
Dimension progressDim = new Dimension(500, 150);
JTextArea progressTextArea = new JTextArea();
progressTextArea.setPreferredSize(progressDim);
progressTextArea.setEditable(false);
JScrollPane progressScroller = new JScrollPane(progressTextArea);
progressScroller.setAlignmentX(LEFT_ALIGNMENT);
progressDialog.setLayout(new BorderLayout());
progressDialog.getContentPane().add(progressScroller, BorderLayout.CENTER);
progressDialog.pack();
Dimension screenDim = Toolkit.getDefaultToolkit().getScreenSize();
progressDialog.setLocation((screenDim.width - progressDim.width) / 2, (screenDim.height - progressDim.height) / 2);
progressDialog.setVisible(true);
int totalProcesses = 4;
String newline = "\n";
String tab = "\t";
int progressTextLength = 0;
DataModel dataModel = runner.getDataModel();
// 1. Generate temp file
Path file = null;
Path prior = null;
try {
// ****************************
// Data Preparation Progress *
// ****************************
String dataMessage = String.format("1/%1$d Data Preparation", totalProcesses);
progressTextArea.append(dataMessage);
progressTextArea.append(tab);
progressTextLength = progressTextArea.getText().length();
progressTextArea.append("Preparing...");
progressTextArea.updateUI();
file = Files.createTempFile("Tetrad-data-", ".txt");
// LOGGER.info(file.toAbsolutePath().toString());
List<String> tempLine = new ArrayList<>();
// Header
List<Node> variables = dataModel.getVariables();
if ((variables == null || variables.isEmpty()) && runner.getSourceGraph() != null) {
variables = runner.getSourceGraph().getNodes();
}
String vars = StringUtils.join(variables.toArray(), tab);
tempLine.add(vars);
// Data
DataSet dataSet = (DataSet) dataModel;
for (int i = 0; i < dataSet.getNumRows(); i++) {
String line = null;
for (int j = 0; j < dataSet.getNumColumns(); j++) {
String cell = null;
if (dataSet.isContinuous()) {
cell = String.valueOf(dataSet.getDouble(i, j));
} else {
cell = String.valueOf(dataSet.getInt(i, j));
}
if (line == null) {
line = cell;
} else {
line = line + "\t" + cell;
}
}
tempLine.add(line);
}
// for (String line : tempLine) {
// LOGGER.info(line);
// }
Files.write(file, tempLine);
// Get file's MD5 hash and use it as its identifier
String datasetMd5 = MessageDigestHash.computeMD5Hash(file);
progressTextArea.replaceRange("Done", progressTextLength, progressTextArea.getText().length());
progressTextArea.append(newline);
progressTextArea.updateUI();
// ***************************************
// Prior Knowledge Preparation Progress *
// ***************************************
String priorMessage = String.format("2/%1$d Prior Knowledge Preparation", totalProcesses);
progressTextArea.append(priorMessage);
progressTextArea.append(tab);
progressTextLength = progressTextArea.getText().length();
progressTextArea.append("Preparing...");
progressTextArea.updateUI();
// 2. Generate temp prior knowledge file
Knowledge2 knowledge = (Knowledge2) dataModel.getKnowledge();
if (knowledge != null && !knowledge.isEmpty()) {
prior = Files.createTempFile(file.getFileName().toString(), ".prior");
knowledge.saveKnowledge(Files.newBufferedWriter(prior));
progressTextArea.replaceRange("Done", progressTextLength, progressTextArea.getText().length());
progressTextArea.append(newline);
progressTextArea.updateUI();
} else {
progressTextArea.replaceRange("Skipped", progressTextLength, progressTextArea.getText().length());
progressTextArea.append(newline);
progressTextArea.updateUI();
}
// Get knowledge file's MD5 hash and use it as its identifier
String priorKnowledgeMd5 = null;
if (prior != null) {
priorKnowledgeMd5 = MessageDigestHash.computeMD5Hash(prior);
}
// *******************************************
// Algorithm Parameter Preparation Progress *
// *******************************************
String algorMessage = String.format("3/%1$d Algorithm Preparation", totalProcesses);
progressTextArea.append(algorMessage);
progressTextArea.append(tab);
progressTextLength = progressTextArea.getText().length();
progressTextArea.append("Preparing...");
progressTextArea.updateUI();
// 3.1 Algorithm Id, Independent Test Id, Score Id
AlgorithmModel algoModel = algorithmList.getSelectedValue();
String algoId = algoModel.getAlgorithm().getAnnotation().command();
// Test
String testId = null;
if (indTestComboBox.isEnabled()) {
IndependenceTestModel indTestModel = indTestComboBox.getItemAt(indTestComboBox.getSelectedIndex());
testId = indTestModel.getIndependenceTest().getAnnotation().command();
}
// Score
String scoreId = null;
if (scoreComboBox.isEnabled()) {
ScoreModel scoreModel = scoreComboBox.getItemAt(scoreComboBox.getSelectedIndex());
scoreId = scoreModel.getScore().getAnnotation().command();
}
// 3.2 Parameters
AlgorithmParamRequest algorithmParamRequest = new AlgorithmParamRequest();
// Test and score
algorithmParamRequest.setTestId(testId);
algorithmParamRequest.setScoreId(scoreId);
// Dataset and Prior paths
String datasetPath = file.toAbsolutePath().toString();
LOGGER.info(datasetPath);
algorithmParamRequest.setDatasetPath(datasetPath);
algorithmParamRequest.setDatasetMd5(datasetMd5);
if (prior != null) {
String priorKnowledgePath = prior.toAbsolutePath().toString();
LOGGER.info(priorKnowledgePath);
algorithmParamRequest.setPriorKnowledgePath(priorKnowledgePath);
algorithmParamRequest.setPriorKnowledgeMd5(priorKnowledgeMd5);
}
// VariableType
if (dataModel.isContinuous()) {
algorithmParamRequest.setVariableType("continuous");
} else if (dataModel.isDiscrete()) {
algorithmParamRequest.setVariableType("discrete");
} else {
algorithmParamRequest.setVariableType("mixed");
}
// FileDelimiter
// Pre-determined
String fileDelimiter = "tab";
algorithmParamRequest.setFileDelimiter(fileDelimiter);
Set<AlgorithmParameter> AlgorithmParameters = new HashSet<>();
Parameters parameters = runner.getParameters();
List<String> parameterNames = runner.getAlgorithm().getParameters();
for (String parameter : parameterNames) {
String value = parameters.get(parameter).toString();
LOGGER.info("parameter: " + parameter + "\tvalue: " + value);
if (value != null) {
AlgorithmParameter algorParam = new AlgorithmParameter();
algorParam.setParameter(parameter);
algorParam.setValue(value);
AlgorithmParameters.add(algorParam);
}
}
algorithmParamRequest.setAlgorithmParameters(AlgorithmParameters);
String maxHeapSize = null;
do {
maxHeapSize = JOptionPane.showInputDialog(progressDialog, "Enter Your Request Java Max Heap Size (GB):", "5");
} while (maxHeapSize != null && !StringUtils.isNumeric(maxHeapSize));
if (maxHeapSize != null) {
JvmOptions jvmOptions = new JvmOptions();
jvmOptions.setMaxHeapSize(Integer.parseInt(maxHeapSize));
algorithmParamRequest.setJvmOptions(jvmOptions);
}
// Hpc parameters
final HpcAccountManager hpcAccountManager = desktop.getHpcAccountManager();
JsonWebToken jsonWebToken = HpcAccountUtils.getJsonWebToken(hpcAccountManager, hpcAccount);
if (jsonWebToken.getWallTime() != null) {
// User allowed to customize the job's wall time
String[] wallTime = jsonWebToken.getWallTime();
Object userwallTime = JOptionPane.showInputDialog(progressDialog, "Wall Time:", "Choose Your Wall Time (in Hour)", JOptionPane.QUESTION_MESSAGE, null, wallTime, wallTime[0]);
if (wallTime != null && userwallTime != null) {
HpcParameter hpcParameter = new HpcParameter();
hpcParameter.setKey("walltime");
hpcParameter.setValue(userwallTime.toString());
LOGGER.info("walltime: " + userwallTime.toString());
Set<HpcParameter> hpcParameters = new HashSet<>();
hpcParameters.add(hpcParameter);
algorithmParamRequest.setHpcParameters(hpcParameters);
}
}
progressTextArea.replaceRange("Done", progressTextLength, progressTextArea.getText().length());
progressTextArea.append(newline);
progressTextArea.updateUI();
// ********************************
// Adding HPC Job Queue Progress *
// ********************************
String dbMessage = String.format("4/%1$d HPC Job Queue Submission", totalProcesses);
progressTextArea.append(dbMessage);
progressTextArea.append(tab);
progressTextLength = progressTextArea.getText().length();
progressTextArea.append("Preparing...");
progressTextArea.updateUI();
HpcJobManager hpcJobManager = desktop.getHpcJobManager();
// 4.1 Save HpcJobInfo
hpcJobInfo = new HpcJobInfo();
hpcJobInfo.setAlgoId(algoId);
hpcJobInfo.setAlgorithmParamRequest(algorithmParamRequest);
hpcJobInfo.setStatus(-1);
hpcJobInfo.setHpcAccount(hpcAccount);
hpcJobManager.submitNewHpcJobToQueue(hpcJobInfo, this);
progressTextArea.replaceRange("Done", progressTextLength, progressTextArea.getText().length());
progressTextArea.append(newline);
progressTextArea.updateUI();
this.jsonResult = null;
JOptionPane.showMessageDialog(ancestor, "The " + hpcJobInfo.getAlgoId() + " job on the " + hpcJobInfo.getHpcAccount().getConnectionName() + " node is in the queue successfully!");
} catch (IOException exception) {
LOGGER.error("", exception);
} finally {
progressDialog.setVisible(false);
progressDialog.dispose();
}
(new HpcJobActivityAction("")).actionPerformed(null);
}
use of edu.pitt.dbmi.tetrad.db.entity.HpcJobInfo in project tetrad by cmu-phil.
the class HpcJobActivityEditor method removeSubmittedDisplayJobFromActiveTableModel.
public synchronized void removeSubmittedDisplayJobFromActiveTableModel(final Set<HpcJobInfo> finishedJobSet) {
DefaultTableModel model = (DefaultTableModel) jobsTable.getModel();
Map<Long, Integer> rowMap = new HashMap<>();
for (int row = 0; row < model.getRowCount(); row++) {
rowMap.put(Long.valueOf(model.getValueAt(row, ID_COLUMN).toString()), row);
}
for (final HpcJobInfo hpcJobInfo : finishedJobSet) {
if (rowMap.containsKey(hpcJobInfo.getId())) {
model.removeRow(rowMap.get(hpcJobInfo.getId()));
}
}
}
use of edu.pitt.dbmi.tetrad.db.entity.HpcJobInfo in project tetrad by cmu-phil.
the class HpcJobActivityEditor method getFinishedRowData.
private Vector<Vector<String>> getFinishedRowData(final TetradDesktop desktop, final List<HpcAccount> exclusiveHpcAccounts) throws Exception {
final Vector<Vector<String>> finishedRowData = new Vector<>();
HpcJobManager hpcJobManager = desktop.getHpcJobManager();
Map<Long, HpcJobInfo> finishedHpcJobIdMap = null;
// Finished jobs
Map<HpcAccount, Set<HpcJobInfo>> finishedHpcJobInfoMap = hpcJobManager.getFinishedHpcJobInfoMap();
for (HpcAccount hpcAccount : finishedHpcJobInfoMap.keySet()) {
if (exclusiveHpcAccounts != null && !exclusiveHpcAccounts.contains(hpcAccount)) {
continue;
}
Set<HpcJobInfo> finishedHpcJobSet = finishedHpcJobInfoMap.get(hpcAccount);
for (HpcJobInfo hpcJobInfo : finishedHpcJobSet) {
if (finishedHpcJobIdMap == null) {
finishedHpcJobIdMap = new HashMap<>();
}
finishedHpcJobIdMap.put(hpcJobInfo.getId(), hpcJobInfo);
}
}
if (finishedHpcJobIdMap != null) {
List<Long> finishedJobIds = new ArrayList<>(finishedHpcJobIdMap.keySet());
Collections.sort(finishedJobIds);
Collections.reverse(finishedJobIds);
for (Long jobId : finishedJobIds) {
final HpcJobInfo hpcJobInfo = finishedHpcJobIdMap.get(jobId);
Vector<String> rowData = new Vector<>();
HpcJobLog hpcJobLog = hpcJobManager.getHpcJobLog(hpcJobInfo);
// Local job id
rowData.add(hpcJobInfo.getId().toString());
int status = hpcJobInfo.getStatus();
switch(status) {
case 3:
rowData.add("Finished");
break;
case 4:
rowData.add("Canceled");
break;
case 5:
rowData.add("Finished");
break;
case 6:
rowData.add("Error");
break;
}
// Locally added time
rowData.add(FilePrint.fileTimestamp(hpcJobLog.getAddedTime().getTime()));
// HPC node name
HpcAccount hpcAccount = hpcJobInfo.getHpcAccount();
rowData.add(hpcAccount.getConnectionName());
// Algorithm
rowData.add(hpcJobInfo.getAlgoId());
// Submitted time
rowData.add(hpcJobInfo.getSubmittedTime() != null ? FilePrint.fileTimestamp(hpcJobInfo.getSubmittedTime().getTime()) : "");
// HPC job id
rowData.add("" + hpcJobInfo.getPid());
// Result Name
switch(status) {
case 3:
rowData.add(hpcJobInfo.getResultFileName());
break;
case 4:
rowData.add("");
break;
case 5:
rowData.add(hpcJobInfo.getResultFileName());
break;
case 6:
rowData.add(hpcJobInfo.getErrorResultFileName());
break;
}
// Finished time
if (status != 4) {
rowData.add(FilePrint.fileTimestamp(hpcJobLog.getEndedTime().getTime()));
} else {
rowData.add("");
}
// Canceled time
if (status == 4) {
rowData.add(hpcJobLog.getCanceledTime() != null ? FilePrint.fileTimestamp(hpcJobLog.getCanceledTime().getTime()) : "");
} else {
rowData.add("");
}
// Last update time
rowData.add(FilePrint.fileTimestamp(hpcJobLog.getLastUpdatedTime().getTime()));
// Delete job from db
rowData.add("Delete");
finishedRowData.add(rowData);
}
}
return finishedRowData;
}
use of edu.pitt.dbmi.tetrad.db.entity.HpcJobInfo in project tetrad by cmu-phil.
the class HpcJobManager method updateSubmittedHpcJobInfo.
public synchronized void updateSubmittedHpcJobInfo(final HpcJobInfo hpcJobInfo) {
final HpcAccount hpcAccount = hpcJobInfo.getHpcAccount();
Set<HpcJobInfo> hpcJobInfos = submittedHpcJobInfoMap.get(hpcAccount);
if (hpcJobInfos != null) {
for (HpcJobInfo jobInfo : hpcJobInfos) {
if (jobInfo.getId() == hpcJobInfo.getId()) {
// LOGGER.debug("updateSubmittedHpcJobInfo: Found
// hpcJobInfo in the submittedHpcJobInfoMap & removed it!");
hpcJobInfos.remove(jobInfo);
}
}
hpcJobInfos.add(hpcJobInfo);
submittedHpcJobInfoMap.put(hpcAccount, hpcJobInfos);
}
}
Aggregations