Search in sources :

Example 11 with HpcJobInfo

use of edu.pitt.dbmi.tetrad.db.entity.HpcJobInfo in project tetrad by cmu-phil.

the class HpcJobManager method removeFinishedHpcJob.

public synchronized void removeFinishedHpcJob(final HpcJobInfo hpcJobInfo) {
    HpcAccount hpcAccount = hpcJobInfo.getHpcAccount();
    LOGGER.debug("removedFinishedHpcJob: connection: " + hpcAccount.getConnectionName());
    LOGGER.debug("removedFinishedHpcJob: algorithm: " + hpcJobInfo.getAlgoId());
    LOGGER.debug("removedFinishedHpcJob: status: " + hpcJobInfo.getStatus());
    LOGGER.debug("removedFinishedHpcJob: pid: " + hpcJobInfo.getPid());
    Set<HpcJobInfo> hpcJobInfos = submittedHpcJobInfoMap.get(hpcAccount);
    if (hpcJobInfos != null) {
        for (HpcJobInfo jobInfo : hpcJobInfos) {
            if (jobInfo.getId() == hpcJobInfo.getId()) {
                // LOGGER.debug("removeFinishedHpcJob: Found
                // hpcJobInfo in the submittedHpcJobInfoMap & removed it!");
                hpcJobInfos.remove(jobInfo);
            }
        }
        if (hpcJobInfos.isEmpty()) {
            submittedHpcJobInfoMap.remove(hpcAccount);
        } else {
            submittedHpcJobInfoMap.put(hpcAccount, hpcJobInfos);
        }
    }
    hpcGraphResultMap.remove(hpcJobInfo);
}
Also used : HpcAccount(edu.pitt.dbmi.tetrad.db.entity.HpcAccount) HpcJobInfo(edu.pitt.dbmi.tetrad.db.entity.HpcJobInfo)

Example 12 with HpcJobInfo

use of edu.pitt.dbmi.tetrad.db.entity.HpcJobInfo in project tetrad by cmu-phil.

the class HpcJobManager method resumePreProcessJobs.

private synchronized void resumePreProcessJobs() {
    // Lookup on DB for HpcJobInfo with status -1 (Pending)
    List<HpcJobInfo> pendingHpcJobInfo = hpcJobInfoService.findByStatus(-1);
    if (pendingHpcJobInfo != null) {
        for (HpcJobInfo hpcJobInfo : pendingHpcJobInfo) {
            LOGGER.debug("resumePreProcessJobs: " + hpcJobInfo.getAlgoId() + " : " + hpcJobInfo.getHpcAccount().getConnectionName() + " : " + hpcJobInfo.getAlgorithmParamRequest().getDatasetPath());
            final HpcAccount hpcAccount = hpcJobInfo.getHpcAccount();
            Set<HpcJobInfo> hpcJobInfos = pendingHpcJobInfoMap.get(hpcAccount);
            if (hpcJobInfos == null) {
                hpcJobInfos = new LinkedHashSet<>();
            }
            hpcJobInfos.add(hpcJobInfo);
            pendingHpcJobInfoMap.put(hpcAccount, hpcJobInfos);
            HpcJobPreProcessTask preProcessTask = new HpcJobPreProcessTask(hpcJobInfo);
            executorService.submit(preProcessTask);
        }
    } else {
        LOGGER.debug("resumePreProcessJobs: no pending jobs to be resumed");
    }
}
Also used : HpcJobPreProcessTask(edu.cmu.tetradapp.app.hpc.task.HpcJobPreProcessTask) HpcAccount(edu.pitt.dbmi.tetrad.db.entity.HpcAccount) HpcJobInfo(edu.pitt.dbmi.tetrad.db.entity.HpcJobInfo)

Example 13 with HpcJobInfo

use of edu.pitt.dbmi.tetrad.db.entity.HpcJobInfo in project tetrad by cmu-phil.

the class HpcJobManager method getFinishedHpcJobInfoMap.

public synchronized Map<HpcAccount, Set<HpcJobInfo>> getFinishedHpcJobInfoMap() {
    final Map<HpcAccount, Set<HpcJobInfo>> finishedHpcJobInfoMap = new HashMap<>();
    // 5 (Result Downloaded); 6 (Error Result Downloaded);
    for (int status = 3; status <= 6; status++) {
        // LOGGER.debug("getFinishedHpcJobInfoMap: "
        // + "looping status: " + status);
        List<HpcJobInfo> finishedHpcJobInfo = hpcJobInfoService.findByStatus(status);
        if (finishedHpcJobInfo != null) {
            for (HpcJobInfo hpcJobInfo : finishedHpcJobInfo) {
                final HpcAccount hpcAccount = hpcJobInfo.getHpcAccount();
                Set<HpcJobInfo> hpcJobInfos = finishedHpcJobInfoMap.get(hpcAccount);
                if (hpcJobInfos == null) {
                    hpcJobInfos = new LinkedHashSet<>();
                }
                hpcJobInfos.add(hpcJobInfo);
                finishedHpcJobInfoMap.put(hpcAccount, hpcJobInfos);
            }
        }
    }
    return finishedHpcJobInfoMap;
}
Also used : LinkedHashSet(java.util.LinkedHashSet) Set(java.util.Set) HashMap(java.util.HashMap) HpcAccount(edu.pitt.dbmi.tetrad.db.entity.HpcAccount) HpcJobInfo(edu.pitt.dbmi.tetrad.db.entity.HpcJobInfo)

Example 14 with HpcJobInfo

use of edu.pitt.dbmi.tetrad.db.entity.HpcJobInfo in project tetrad by cmu-phil.

the class HpcJobManager method removePendingHpcJob.

public synchronized void removePendingHpcJob(final HpcJobInfo hpcJobInfo) {
    HpcAccount hpcAccount = hpcJobInfo.getHpcAccount();
    LOGGER.debug("removedPendingHpcJob: connection: " + hpcAccount.getConnectionName());
    LOGGER.debug("removedPendingHpcJob: algorithm: " + hpcJobInfo.getAlgoId());
    LOGGER.debug("removedPendingHpcJob: status: " + hpcJobInfo.getStatus());
    LOGGER.debug("removedPendingHpcJob: pid: " + hpcJobInfo.getPid());
    Set<HpcJobInfo> hpcJobInfos = pendingHpcJobInfoMap.get(hpcAccount);
    if (hpcJobInfos != null) {
        for (HpcJobInfo jobInfo : hpcJobInfos) {
            if (jobInfo.getId() == hpcJobInfo.getId()) {
                // LOGGER.debug("removedPendingHpcJob: Found
                // hpcJobInfo in the pendingHpcJobInfoMap & removed it!");
                hpcJobInfos.remove(jobInfo);
            }
        }
        if (hpcJobInfos.isEmpty()) {
            pendingHpcJobInfoMap.remove(hpcAccount);
        } else {
            pendingHpcJobInfoMap.put(hpcAccount, hpcJobInfos);
        }
    }
}
Also used : HpcAccount(edu.pitt.dbmi.tetrad.db.entity.HpcAccount) HpcJobInfo(edu.pitt.dbmi.tetrad.db.entity.HpcJobInfo)

Example 15 with HpcJobInfo

use of edu.pitt.dbmi.tetrad.db.entity.HpcJobInfo in project tetrad by cmu-phil.

the class HpcJobPreProcessTask method run.

@Override
public void run() {
    TetradDesktop desktop = (TetradDesktop) DesktopController.getInstance();
    while (desktop == null) {
        try {
            Thread.sleep(1000);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }
    final HpcAccountManager hpcAccountManager = desktop.getHpcAccountManager();
    final HpcJobManager hpcJobManager = desktop.getHpcJobManager();
    HpcAccount hpcAccount = hpcJobInfo.getHpcAccount();
    AlgorithmParamRequest algorParamReq = hpcJobInfo.getAlgorithmParamRequest();
    String datasetPath = algorParamReq.getDatasetPath();
    String priorKnowledgePath = algorParamReq.getPriorKnowledgePath();
    try {
        HpcAccountService hpcAccountService = hpcJobManager.getHpcAccountService(hpcAccount);
        HpcJobLog hpcJobLog = hpcJobManager.getHpcJobLog(hpcJobInfo);
        String log = "Initiated connection to " + hpcAccount.getConnectionName();
        LOGGER.debug(log);
        hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
        log = "datasetPath: " + datasetPath;
        System.out.println(log);
        Path file = Paths.get(datasetPath);
        // Get file's MD5 hash and use it as its identifier
        String md5 = algorParamReq.getDatasetMd5();
        // Initiate data uploading progress
        hpcJobManager.updateUploadFileProgress(datasetPath, 0);
        Path prior = null;
        if (priorKnowledgePath != null) {
            log = "priorKnowledgePath: " + priorKnowledgePath;
            LOGGER.debug(log);
            prior = Paths.get(priorKnowledgePath);
            // Initiate prior knowledge uploading progress
            hpcJobManager.updateUploadFileProgress(priorKnowledgePath, 0);
        }
        // Check if this dataset already exists with this md5 hash
        RemoteDataFileService remoteDataService = hpcAccountService.getRemoteDataService();
        DataFile dataFile = HpcAccountUtils.getRemoteDataFile(hpcAccountManager, remoteDataService, hpcAccount, md5);
        DataUploadService dataUploadService = hpcAccountService.getDataUploadService();
        // If not, upload the file
        if (dataFile == null) {
            log = "Started uploading " + file.getFileName().toString();
            LOGGER.debug(log);
            dataUploadService.startUpload(file, HpcAccountUtils.getJsonWebToken(hpcAccountManager, hpcAccount));
            hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
            int progress;
            while ((progress = dataUploadService.getUploadJobStatus(file.toAbsolutePath().toString())) < 100) {
                // System.out.println("Uploading "
                // + file.toAbsolutePath().toString() + " Progress: "
                // + progress + "%");
                hpcJobManager.updateUploadFileProgress(datasetPath, progress);
                Thread.sleep(10);
            }
            hpcJobManager.updateUploadFileProgress(datasetPath, progress);
            log = "Finished uploading " + file.getFileName().toString();
            LOGGER.debug(log);
            hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
            // Get remote datafile
            dataFile = HpcAccountUtils.getRemoteDataFile(hpcAccountManager, remoteDataService, hpcAccount, md5);
            HpcAccountUtils.summarizeDataset(remoteDataService, algorParamReq, dataFile.getId(), HpcAccountUtils.getJsonWebToken(hpcAccountManager, hpcAccount));
            log = "Summarized " + file.getFileName().toString();
            LOGGER.debug(log);
            hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
        } else {
            log = "Skipped uploading " + file.getFileName().toString();
            LOGGER.debug(log);
            hpcJobManager.updateUploadFileProgress(datasetPath, -1);
            hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
            if (dataFile.getFileSummary().getVariableType() == null) {
                HpcAccountUtils.summarizeDataset(remoteDataService, algorParamReq, dataFile.getId(), HpcAccountUtils.getJsonWebToken(hpcAccountManager, hpcAccount));
                log = "Summarized " + file.getFileName().toString();
                LOGGER.debug(log);
                hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, "Summarized " + file.getFileName().toString());
            }
        }
        DataFile priorKnowledgeFile = null;
        // Prior Knowledge File
        if (prior != null) {
            // Get prior knowledge file Id
            md5 = algorParamReq.getPriorKnowledgeMd5();
            priorKnowledgeFile = HpcAccountUtils.getRemotePriorKnowledgeFile(hpcAccountManager, remoteDataService, hpcAccount, md5);
            if (priorKnowledgeFile == null) {
                // Upload prior knowledge file
                dataUploadService.startUpload(prior, HpcAccountUtils.getJsonWebToken(hpcAccountManager, hpcAccount));
                log = "Started uploading Prior Knowledge File";
                LOGGER.debug(log);
                hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
                int progress;
                while ((progress = dataUploadService.getUploadJobStatus(prior.toAbsolutePath().toString())) < 100) {
                    hpcJobManager.updateUploadFileProgress(priorKnowledgePath, progress);
                    Thread.sleep(10);
                }
                hpcJobManager.updateUploadFileProgress(priorKnowledgePath, progress);
                priorKnowledgeFile = HpcAccountUtils.getRemotePriorKnowledgeFile(hpcAccountManager, remoteDataService, hpcAccount, md5);
                log = "Finished uploading Prior Knowledge File";
                LOGGER.debug(log);
                hpcJobManager.logHpcJobLogDetail(hpcJobLog, -1, log);
            }
        }
        // Algorithm Job Preparation
        edu.pitt.dbmi.ccd.rest.client.dto.algo.AlgorithmParamRequest paramRequest = new edu.pitt.dbmi.ccd.rest.client.dto.algo.AlgorithmParamRequest();
        String algoId = hpcJobInfo.getAlgoId();
        paramRequest.setAlgoId(algoId);
        paramRequest.setDatasetFileId(dataFile.getId());
        // Test
        if (algorParamReq.getTestId() != null) {
            paramRequest.setTestId(algorParamReq.getTestId());
        }
        // Score
        if (algorParamReq.getScoreId() != null) {
            paramRequest.setScoreId(algorParamReq.getScoreId());
        }
        Set<AlgoParameter> algorithmParameters = new HashSet<>();
        for (AlgorithmParameter param : algorParamReq.getAlgorithmParameters()) {
            algorithmParameters.add(new AlgoParameter(param.getParameter(), param.getValue()));
            LOGGER.debug("AlgorithmParameter: " + param.getParameter() + " : " + param.getValue());
        }
        if (priorKnowledgeFile != null) {
            paramRequest.setPriorKnowledgeFileId(priorKnowledgeFile.getId());
            LOGGER.debug("priorKnowledgeFileId: " + priorKnowledgeFile.getId());
        }
        paramRequest.setAlgoParameters(algorithmParameters);
        if (algorParamReq.getJvmOptions() != null) {
            JvmOptions jvmOptions = new JvmOptions();
            jvmOptions.setMaxHeapSize(algorParamReq.getJvmOptions().getMaxHeapSize());
            paramRequest.setJvmOptions(jvmOptions);
        }
        Set<HpcParameter> hpcParameters = algorParamReq.getHpcParameters();
        if (hpcParameters != null) {
            Set<edu.pitt.dbmi.ccd.rest.client.dto.algo.HpcParameter> hpcParams = new HashSet<>();
            for (HpcParameter param : hpcParameters) {
                edu.pitt.dbmi.ccd.rest.client.dto.algo.HpcParameter hpcParam = new edu.pitt.dbmi.ccd.rest.client.dto.algo.HpcParameter();
                hpcParam.setKey(param.getKey());
                hpcParam.setValue(param.getValue());
                hpcParams.add(hpcParam);
                LOGGER.debug("HpcParameter: " + hpcParam.getKey() + " : " + hpcParam.getValue());
            }
            paramRequest.setHpcParameters(hpcParams);
        }
        // Submit a job
        JobQueueService jobQueueService = hpcAccountService.getJobQueueService();
        JobInfo jobInfo = jobQueueService.addToRemoteQueue(paramRequest, HpcAccountUtils.getJsonWebToken(hpcAccountManager, hpcAccount));
        // Log the job submission
        hpcJobInfo.setSubmittedTime(new Date(System.currentTimeMillis()));
        // Submitted
        hpcJobInfo.setStatus(0);
        hpcJobInfo.setPid(jobInfo.getId());
        hpcJobInfo.setResultFileName(jobInfo.getResultFileName());
        hpcJobInfo.setResultJsonFileName(jobInfo.getResultJsonFileName());
        hpcJobInfo.setErrorResultFileName(jobInfo.getErrorResultFileName());
        hpcJobManager.updateHpcJobInfo(hpcJobInfo);
        log = "Submitted job to " + hpcAccount.getConnectionName();
        LOGGER.debug(log);
        hpcJobManager.logHpcJobLogDetail(hpcJobLog, 0, log);
        LOGGER.debug("HpcJobPreProcessTask: HpcJobInfo: id : " + hpcJobInfo.getId() + " : pid : " + hpcJobInfo.getPid() + " : " + hpcJobInfo.getAlgoId() + hpcJobInfo.getAlgorithmParamRequest().getTestId() == null ? "" : " : " + hpcJobInfo.getAlgorithmParamRequest().getTestId() + hpcJobInfo.getAlgorithmParamRequest().getScoreId() == null ? "" : " : " + hpcJobInfo.getAlgorithmParamRequest().getScoreId() + " : " + hpcJobInfo.getResultFileName());
        hpcJobManager.addNewSubmittedHpcJob(hpcJobInfo);
    } catch (Exception e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}
Also used : HpcAccount(edu.pitt.dbmi.tetrad.db.entity.HpcAccount) DataUploadService(edu.pitt.dbmi.ccd.rest.client.service.data.DataUploadService) RemoteDataFileService(edu.pitt.dbmi.ccd.rest.client.service.data.RemoteDataFileService) DataFile(edu.pitt.dbmi.ccd.rest.client.dto.data.DataFile) HpcJobManager(edu.cmu.tetradapp.app.hpc.manager.HpcJobManager) JobInfo(edu.pitt.dbmi.ccd.rest.client.dto.algo.JobInfo) HpcJobInfo(edu.pitt.dbmi.tetrad.db.entity.HpcJobInfo) HpcAccountManager(edu.cmu.tetradapp.app.hpc.manager.HpcAccountManager) AlgorithmParamRequest(edu.pitt.dbmi.tetrad.db.entity.AlgorithmParamRequest) TetradDesktop(edu.cmu.tetradapp.app.TetradDesktop) HashSet(java.util.HashSet) Path(java.nio.file.Path) HpcAccountService(edu.cmu.tetradapp.app.hpc.manager.HpcAccountService) Date(java.util.Date) JobQueueService(edu.pitt.dbmi.ccd.rest.client.service.jobqueue.JobQueueService) HpcParameter(edu.pitt.dbmi.tetrad.db.entity.HpcParameter) AlgoParameter(edu.pitt.dbmi.ccd.rest.client.dto.algo.AlgoParameter) JvmOptions(edu.pitt.dbmi.ccd.rest.client.dto.algo.JvmOptions) AlgorithmParameter(edu.pitt.dbmi.tetrad.db.entity.AlgorithmParameter) HpcJobLog(edu.pitt.dbmi.tetrad.db.entity.HpcJobLog)

Aggregations

HpcJobInfo (edu.pitt.dbmi.tetrad.db.entity.HpcJobInfo)20 HpcAccount (edu.pitt.dbmi.tetrad.db.entity.HpcAccount)14 HpcJobLog (edu.pitt.dbmi.tetrad.db.entity.HpcJobLog)8 HpcJobManager (edu.cmu.tetradapp.app.hpc.manager.HpcJobManager)7 HashSet (java.util.HashSet)7 Set (java.util.Set)6 TetradDesktop (edu.cmu.tetradapp.app.TetradDesktop)5 FilePrint (edu.pitt.dbmi.ccd.commons.file.FilePrint)5 HashMap (java.util.HashMap)5 DefaultTableModel (javax.swing.table.DefaultTableModel)5 AlgorithmParamRequest (edu.pitt.dbmi.tetrad.db.entity.AlgorithmParamRequest)4 Date (java.util.Date)4 HpcAccountManager (edu.cmu.tetradapp.app.hpc.manager.HpcAccountManager)3 JobInfo (edu.pitt.dbmi.ccd.rest.client.dto.algo.JobInfo)3 ArrayList (java.util.ArrayList)3 HpcJobPreProcessTask (edu.cmu.tetradapp.app.hpc.task.HpcJobPreProcessTask)2 JobQueueService (edu.pitt.dbmi.ccd.rest.client.service.jobqueue.JobQueueService)2 AlgorithmParameter (edu.pitt.dbmi.tetrad.db.entity.AlgorithmParameter)2 HpcParameter (edu.pitt.dbmi.tetrad.db.entity.HpcParameter)2 Path (java.nio.file.Path)2