Search in sources :

Example 6 with TetradDesktop

use of edu.cmu.tetradapp.app.TetradDesktop in project tetrad by cmu-phil.

the class HpcJobPreProcessTask method run.

@Override
public void run() {
    TetradDesktop desktop = (TetradDesktop) DesktopController.getInstance();
    while (desktop == null) {
        try {
            Thread.sleep(1000);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }
    final HpcAccountManager hpcAccountManager = desktop.getHpcAccountManager();
    final HpcJobManager hpcJobManager = desktop.getHpcJobManager();
    HpcAccount hpcAccount = hpcJobInfo.getHpcAccount();
    AlgorithmParamRequest algorParamReq = hpcJobInfo.getAlgorithmParamRequest();
    String datasetPath = algorParamReq.getDatasetPath();
    String priorKnowledgePath = algorParamReq.getPriorKnowledgePath();
    try {
        HpcAccountService hpcAccountService = hpcJobManager.getHpcAccountService(hpcAccount);
        HpcJobLog hpcJobLog = hpcJobManager.getHpcJobLog(hpcJobInfo);
        String log = "Initiated connection to " + hpcAccount.getConnectionName();
        LOGGER.debug(log);
        hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
        log = "datasetPath: " + datasetPath;
        System.out.println(log);
        Path file = Paths.get(datasetPath);
        // Get file's MD5 hash and use it as its identifier
        String md5 = algorParamReq.getDatasetMd5();
        // Initiate data uploading progress
        hpcJobManager.updateUploadFileProgress(datasetPath, 0);
        Path prior = null;
        if (priorKnowledgePath != null) {
            log = "priorKnowledgePath: " + priorKnowledgePath;
            LOGGER.debug(log);
            prior = Paths.get(priorKnowledgePath);
            // Initiate prior knowledge uploading progress
            hpcJobManager.updateUploadFileProgress(priorKnowledgePath, 0);
        }
        // Check if this dataset already exists with this md5 hash
        RemoteDataFileService remoteDataService = hpcAccountService.getRemoteDataService();
        DataFile dataFile = HpcAccountUtils.getRemoteDataFile(hpcAccountManager, remoteDataService, hpcAccount, md5);
        DataUploadService dataUploadService = hpcAccountService.getDataUploadService();
        // If not, upload the file
        if (dataFile == null) {
            log = "Started uploading " + file.getFileName().toString();
            LOGGER.debug(log);
            dataUploadService.startUpload(file, HpcAccountUtils.getJsonWebToken(hpcAccountManager, hpcAccount));
            hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
            int progress;
            while ((progress = dataUploadService.getUploadJobStatus(file.toAbsolutePath().toString())) < 100) {
                // System.out.println("Uploading "
                // + file.toAbsolutePath().toString() + " Progress: "
                // + progress + "%");
                hpcJobManager.updateUploadFileProgress(datasetPath, progress);
                Thread.sleep(10);
            }
            hpcJobManager.updateUploadFileProgress(datasetPath, progress);
            log = "Finished uploading " + file.getFileName().toString();
            LOGGER.debug(log);
            hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
            // Get remote datafile
            dataFile = HpcAccountUtils.getRemoteDataFile(hpcAccountManager, remoteDataService, hpcAccount, md5);
            HpcAccountUtils.summarizeDataset(remoteDataService, algorParamReq, dataFile.getId(), HpcAccountUtils.getJsonWebToken(hpcAccountManager, hpcAccount));
            log = "Summarized " + file.getFileName().toString();
            LOGGER.debug(log);
            hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
        } else {
            log = "Skipped uploading " + file.getFileName().toString();
            LOGGER.debug(log);
            hpcJobManager.updateUploadFileProgress(datasetPath, -1);
            hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
            if (dataFile.getFileSummary().getVariableType() == null) {
                HpcAccountUtils.summarizeDataset(remoteDataService, algorParamReq, dataFile.getId(), HpcAccountUtils.getJsonWebToken(hpcAccountManager, hpcAccount));
                log = "Summarized " + file.getFileName().toString();
                LOGGER.debug(log);
                hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, "Summarized " + file.getFileName().toString());
            }
        }
        DataFile priorKnowledgeFile = null;
        // Prior Knowledge File
        if (prior != null) {
            // Get prior knowledge file Id
            md5 = algorParamReq.getPriorKnowledgeMd5();
            priorKnowledgeFile = HpcAccountUtils.getRemotePriorKnowledgeFile(hpcAccountManager, remoteDataService, hpcAccount, md5);
            if (priorKnowledgeFile == null) {
                // Upload prior knowledge file
                dataUploadService.startUpload(prior, HpcAccountUtils.getJsonWebToken(hpcAccountManager, hpcAccount));
                log = "Started uploading Prior Knowledge File";
                LOGGER.debug(log);
                hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
                int progress;
                while ((progress = dataUploadService.getUploadJobStatus(prior.toAbsolutePath().toString())) < 100) {
                    hpcJobManager.updateUploadFileProgress(priorKnowledgePath, progress);
                    Thread.sleep(10);
                }
                hpcJobManager.updateUploadFileProgress(priorKnowledgePath, progress);
                priorKnowledgeFile = HpcAccountUtils.getRemotePriorKnowledgeFile(hpcAccountManager, remoteDataService, hpcAccount, md5);
                log = "Finished uploading Prior Knowledge File";
                LOGGER.debug(log);
                hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
            }
        }
        // Algorithm Job Preparation
        edu.pitt.dbmi.ccd.rest.client.dto.algo.AlgorithmParamRequest paramRequest = new edu.pitt.dbmi.ccd.rest.client.dto.algo.AlgorithmParamRequest();
        String algoId = hpcJobInfo.getAlgoId();
        paramRequest.setAlgoId(algoId);
        paramRequest.setDatasetFileId(dataFile.getId());
        // Test
        if (algorParamReq.getTestId() != null) {
            paramRequest.setTestId(algorParamReq.getTestId());
        }
        // Score
        if (algorParamReq.getScoreId() != null) {
            paramRequest.setScoreId(algorParamReq.getScoreId());
        }
        Set<AlgoParameter> algorithmParameters = new HashSet<>();
        for (AlgorithmParameter param : algorParamReq.getAlgorithmParameters()) {
            algorithmParameters.add(new AlgoParameter(param.getParameter(), param.getValue()));
            LOGGER.debug("AlgorithmParameter: " + param.getParameter() + " : " + param.getValue());
        }
        if (priorKnowledgeFile != null) {
            paramRequest.setPriorKnowledgeFileId(priorKnowledgeFile.getId());
            LOGGER.debug("priorKnowledgeFileId: " + priorKnowledgeFile.getId());
        }
        paramRequest.setAlgoParameters(algorithmParameters);
        if (algorParamReq.getJvmOptions() != null) {
            JvmOptions jvmOptions = new JvmOptions();
            jvmOptions.setMaxHeapSize(algorParamReq.getJvmOptions().getMaxHeapSize());
            paramRequest.setJvmOptions(jvmOptions);
        }
        Set<HpcParameter> hpcParameters = algorParamReq.getHpcParameters();
        if (hpcParameters != null) {
            Set<edu.pitt.dbmi.ccd.rest.client.dto.algo.HpcParameter> hpcParams = new HashSet<>();
            for (HpcParameter param : hpcParameters) {
                edu.pitt.dbmi.ccd.rest.client.dto.algo.HpcParameter hpcParam = new edu.pitt.dbmi.ccd.rest.client.dto.algo.HpcParameter();
                hpcParam.setKey(param.getKey());
                hpcParam.setValue(param.getValue());
                hpcParams.add(hpcParam);
                LOGGER.debug("HpcParameter: " + hpcParam.getKey() + " : " + hpcParam.getValue());
            }
            paramRequest.setHpcParameters(hpcParams);
        }
        // Submit a job
        JobQueueService jobQueueService = hpcAccountService.getJobQueueService();
        JobInfo jobInfo = jobQueueService.addToRemoteQueue(paramRequest, HpcAccountUtils.getJsonWebToken(hpcAccountManager, hpcAccount));
        // Log the job submission
        hpcJobInfo.setSubmittedTime(new Date(System.currentTimeMillis()));
        // Submitted
        hpcJobInfo.setStatus(0);
        hpcJobInfo.setPid(jobInfo.getId());
        hpcJobInfo.setResultFileName(jobInfo.getResultFileName());
        hpcJobInfo.setResultJsonFileName(jobInfo.getResultJsonFileName());
        hpcJobInfo.setErrorResultFileName(jobInfo.getErrorResultFileName());
        hpcJobManager.updateHpcJobInfo(hpcJobInfo);
        log = "Submitted job to " + hpcAccount.getConnectionName();
        LOGGER.debug(log);
        hpcJobManager.logHpcJobLogDetail(hpcJobLog, 0, log);
        LOGGER.debug("HpcJobPreProcessTask: HpcJobInfo: id : " + hpcJobInfo.getId() + " : pid : " + hpcJobInfo.getPid() + " : " + hpcJobInfo.getAlgoId() + hpcJobInfo.getAlgorithmParamRequest().getTestId() == null ? "" : " : " + hpcJobInfo.getAlgorithmParamRequest().getTestId() + hpcJobInfo.getAlgorithmParamRequest().getScoreId() == null ? "" : " : " + hpcJobInfo.getAlgorithmParamRequest().getScoreId() + " : " + hpcJobInfo.getResultFileName());
        hpcJobManager.addNewSubmittedHpcJob(hpcJobInfo);
    } catch (Exception e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}
Also used : HpcAccount(edu.pitt.dbmi.tetrad.db.entity.HpcAccount) DataUploadService(edu.pitt.dbmi.ccd.rest.client.service.data.DataUploadService) RemoteDataFileService(edu.pitt.dbmi.ccd.rest.client.service.data.RemoteDataFileService) DataFile(edu.pitt.dbmi.ccd.rest.client.dto.data.DataFile) HpcJobManager(edu.cmu.tetradapp.app.hpc.manager.HpcJobManager) JobInfo(edu.pitt.dbmi.ccd.rest.client.dto.algo.JobInfo) HpcJobInfo(edu.pitt.dbmi.tetrad.db.entity.HpcJobInfo) HpcAccountManager(edu.cmu.tetradapp.app.hpc.manager.HpcAccountManager) AlgorithmParamRequest(edu.pitt.dbmi.tetrad.db.entity.AlgorithmParamRequest) TetradDesktop(edu.cmu.tetradapp.app.TetradDesktop) HashSet(java.util.HashSet) Path(java.nio.file.Path) HpcAccountService(edu.cmu.tetradapp.app.hpc.manager.HpcAccountService) Date(java.util.Date) JobQueueService(edu.pitt.dbmi.ccd.rest.client.service.jobqueue.JobQueueService) HpcParameter(edu.pitt.dbmi.tetrad.db.entity.HpcParameter) AlgoParameter(edu.pitt.dbmi.ccd.rest.client.dto.algo.AlgoParameter) JvmOptions(edu.pitt.dbmi.ccd.rest.client.dto.algo.JvmOptions) AlgorithmParameter(edu.pitt.dbmi.tetrad.db.entity.AlgorithmParameter) HpcJobLog(edu.pitt.dbmi.tetrad.db.entity.HpcJobLog)

Example 7 with TetradDesktop

use of edu.cmu.tetradapp.app.TetradDesktop in project tetrad by cmu-phil.

the class KillHpcJobAction method actionPerformed.

@Override
public void actionPerformed(ActionEvent e) {
    JTable table = (JTable) e.getSource();
    int modelRow = Integer.valueOf(e.getActionCommand());
    DefaultTableModel activeJobTableModel = (DefaultTableModel) table.getModel();
    long jobId = Long.valueOf(activeJobTableModel.getValueAt(modelRow, HpcJobActivityEditor.ID_COLUMN).toString()).longValue();
    int answer = JOptionPane.showConfirmDialog(parentComp, "Would you like to cancel this HPC job id: " + jobId + "?", "Cancel HPC job", JOptionPane.YES_NO_OPTION);
    if (answer == JOptionPane.NO_OPTION)
        return;
    TetradDesktop desktop = (TetradDesktop) DesktopController.getInstance();
    final HpcJobManager hpcJobManager = desktop.getHpcJobManager();
    HpcJobInfo hpcJobInfo = hpcJobManager.findHpcJobInfoById(Long.valueOf(activeJobTableModel.getValueAt(modelRow, HpcJobActivityEditor.ID_COLUMN).toString()).longValue());
    if (hpcJobInfo != null) {
        try {
            if (hpcJobInfo.getPid() != null) {
                // Update table
                activeJobTableModel.setValueAt("Kill Request", modelRow, 1);
                table.updateUI();
                hpcJobInfo = hpcJobManager.requestHpcJobKilled(hpcJobInfo);
                // Update hpcJobInfo instance
                hpcJobManager.updateHpcJobInfo(hpcJobInfo);
                // Update hpcJobLog instance
                HpcJobLog hpcJobLog = hpcJobManager.getHpcJobLog(hpcJobInfo);
                if (hpcJobLog != null) {
                    hpcJobLog.setLastUpdatedTime(new Date(System.currentTimeMillis()));
                    hpcJobManager.updateHpcJobLog(hpcJobLog);
                    // Update hpcJobLogDetail instance
                    String log = "Requested job id " + hpcJobLog.getId() + " killed";
                    hpcJobManager.logHpcJobLogDetail(hpcJobLog, 2, log);
                }
            } else {
                // Update table
                activeJobTableModel.removeRow(modelRow);
                table.updateUI();
                hpcJobManager.removePendingHpcJob(hpcJobInfo);
                // Killed
                hpcJobInfo.setStatus(4);
                // Update hpcJobInfo instance
                hpcJobManager.updateHpcJobInfo(hpcJobInfo);
                // Update hpcJobLog instance
                HpcJobLog hpcJobLog = hpcJobManager.getHpcJobLog(hpcJobInfo);
                if (hpcJobLog != null) {
                    hpcJobLog.setCanceledTime(new Date(System.currentTimeMillis()));
                    hpcJobLog.setLastUpdatedTime(new Date(System.currentTimeMillis()));
                    hpcJobManager.updateHpcJobLog(hpcJobLog);
                    // Update hpcJobLogDetail instance
                    String log = "Killed job id " + hpcJobLog.getId();
                    hpcJobManager.logHpcJobLogDetail(hpcJobLog, 4, log);
                }
            }
        } catch (Exception e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        }
    }
}
Also used : HpcJobManager(edu.cmu.tetradapp.app.hpc.manager.HpcJobManager) JTable(javax.swing.JTable) DefaultTableModel(javax.swing.table.DefaultTableModel) HpcJobInfo(edu.pitt.dbmi.tetrad.db.entity.HpcJobInfo) HpcJobLog(edu.pitt.dbmi.tetrad.db.entity.HpcJobLog) Date(java.util.Date) TetradDesktop(edu.cmu.tetradapp.app.TetradDesktop)

Example 8 with TetradDesktop

use of edu.cmu.tetradapp.app.TetradDesktop in project tetrad by cmu-phil.

the class LoadHpcGraphJsonAction method actionPerformed.

@Override
public void actionPerformed(ActionEvent e) {
    TetradDesktop desktop = (TetradDesktop) DesktopController.getInstance();
    final HpcAccountManager hpcAccountManager = desktop.getHpcAccountManager();
    final HpcJobManager hpcJobManager = desktop.getHpcJobManager();
    JComponent comp = buildHpcJsonChooserComponent(desktop);
    int option = JOptionPane.showConfirmDialog(JOptionUtils.centeringComp(), comp, "High-Performance Computing Account Json Results Chooser", JOptionPane.OK_CANCEL_OPTION, JOptionPane.PLAIN_MESSAGE);
    if (option == JOptionPane.OK_OPTION && jsonFileName != null && hpcAccount != null) {
        try {
            HpcAccountService hpcAccountService = hpcJobManager.getHpcAccountService(hpcAccount);
            ResultService resultService = hpcAccountService.getResultService();
            String json = resultService.downloadAlgorithmResultFile(jsonFileName, HpcAccountUtils.getJsonWebToken(hpcAccountManager, hpcAccount));
            Graph graph = JsonUtils.parseJSONObjectToTetradGraph(json);
            GraphUtils.circleLayout(graph, 300, 300, 150);
            graphEditable.setGraph(graph);
            graphEditable.setName(jsonFileName);
        } catch (Exception e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        }
    } else {
        LOGGER.debug("Option: OK " + (option == JOptionPane.OK_OPTION));
        LOGGER.debug("Option: jsonFileName " + (jsonFileName != null));
        LOGGER.debug("Option: computingAccount " + (hpcAccount != null));
    }
}
Also used : Graph(edu.cmu.tetrad.graph.Graph) HpcJobManager(edu.cmu.tetradapp.app.hpc.manager.HpcJobManager) HpcAccountService(edu.cmu.tetradapp.app.hpc.manager.HpcAccountService) ResultService(edu.pitt.dbmi.ccd.rest.client.service.result.ResultService) HpcAccountManager(edu.cmu.tetradapp.app.hpc.manager.HpcAccountManager) JComponent(javax.swing.JComponent) FilePrint(edu.pitt.dbmi.ccd.commons.file.FilePrint) TetradDesktop(edu.cmu.tetradapp.app.TetradDesktop)

Aggregations

TetradDesktop (edu.cmu.tetradapp.app.TetradDesktop)8 HpcJobManager (edu.cmu.tetradapp.app.hpc.manager.HpcJobManager)5 HpcJobInfo (edu.pitt.dbmi.tetrad.db.entity.HpcJobInfo)5 HpcAccountManager (edu.cmu.tetradapp.app.hpc.manager.HpcAccountManager)4 JobInfo (edu.pitt.dbmi.ccd.rest.client.dto.algo.JobInfo)3 HpcAccount (edu.pitt.dbmi.tetrad.db.entity.HpcAccount)3 HpcJobLog (edu.pitt.dbmi.tetrad.db.entity.HpcJobLog)3 Date (java.util.Date)3 HpcAccountService (edu.cmu.tetradapp.app.hpc.manager.HpcAccountService)2 JobQueueService (edu.pitt.dbmi.ccd.rest.client.service.jobqueue.JobQueueService)2 HashSet (java.util.HashSet)2 JComponent (javax.swing.JComponent)2 JTable (javax.swing.JTable)2 DefaultTableModel (javax.swing.table.DefaultTableModel)2 Graph (edu.cmu.tetrad.graph.Graph)1 GeneralAlgorithmEditor (edu.cmu.tetradapp.editor.GeneralAlgorithmEditor)1 FilePrint (edu.pitt.dbmi.ccd.commons.file.FilePrint)1 AlgoParameter (edu.pitt.dbmi.ccd.rest.client.dto.algo.AlgoParameter)1 JvmOptions (edu.pitt.dbmi.ccd.rest.client.dto.algo.JvmOptions)1 ResultFile (edu.pitt.dbmi.ccd.rest.client.dto.algo.ResultFile)1