Search in sources :

Example 1 with Table

use of gov.sandia.n2a.ui.jobs.Table in project n2a by frothga.

the class Main method runHeadless.

/**
 *        Assumes this app was started solely for the purpose of running one specific job.
 *        This job operates outside the normal job management. The user is responsible
 *        for everything, including load balancing, directory and file management.
 *        Jobs can run remotely, but there is no support for retrieving results.
 */
public static void runHeadless(MNode record) {
    // See PanelEquations.launchJob()
    // Use current working directory, on assumption that's what the caller wants.
    Path jobDir = Paths.get(System.getProperty("user.dir")).toAbsolutePath();
    // This allows a remote job to run in the regular jobs directory there.
    String jobKey = new SimpleDateFormat("yyyy-MM-dd-HHmmss", Locale.ROOT).format(new Date());
    // Make this appear as if it is from the jobs collection.
    MDoc job = new MDoc(jobDir.resolve("job"), jobKey);
    String key = record.key();
    MNode doc = AppData.models.childOrEmpty(key);
    record.mergeUnder(doc);
    // TODO: the only reason to collate here is to ensure that host and backend are correctly identified if they are inherited. Need a more efficient method, such as lazy collation in MPart.
    MPart collated = new MPart(record);
    NodeJob.collectJobParameters(collated, key, job);
    NodeJob.saveSnapshot(record, job);
    // Handle remote host
    // If a remote host is used, it must be specified exactly, rather than a list of possibilities.
    Host host = Host.get(job);
    if (// Need to note the key so user can easily find the remote job directory.
    host instanceof Remote) {
        job.set(jobKey, "remoteKey");
        job.save();
    }
    // Start the job.
    Backend backend = Backend.getBackend(job.get("backend"));
    backend.start(job);
    // Wait for completion
    NodeJob node = new NodeJobHeadless(job);
    while (node.complete < 1) node.monitorProgress();
    // Convert to CSV, if requested.
    if (record.getFlag("$metadata", "csv")) {
        Table table = new Table(jobDir.resolve("out"), false);
        try {
            table.dumpCSV(jobDir.resolve("out.csv"));
        } catch (IOException e) {
        }
    }
    // Extract results requested in ASV
    MNode ASV = record.child("$metadata", "dakota", "ASV");
    // nothing more to do
    if (ASV == null)
        return;
    OutputParser output = new OutputParser();
    output.parse(jobDir.resolve("out"));
    try (BufferedWriter writer = Files.newBufferedWriter(jobDir.resolve("results"))) {
        for (MNode o : ASV) {
            String name = o.get();
            Column c = output.getColumn(name);
            float value = 0;
            if (c != null && !c.values.isEmpty())
                value = c.values.get(c.values.size() - 1);
            writer.write(value + " " + name);
        }
    } catch (IOException e) {
    }
}
Also used : Path(java.nio.file.Path) MPart(gov.sandia.n2a.eqset.MPart) Table(gov.sandia.n2a.ui.jobs.Table) Remote(gov.sandia.n2a.host.Remote) Host(gov.sandia.n2a.host.Host) IOException(java.io.IOException) MNode(gov.sandia.n2a.db.MNode) Date(java.util.Date) MDoc(gov.sandia.n2a.db.MDoc) BufferedWriter(java.io.BufferedWriter) Backend(gov.sandia.n2a.plugins.extpoints.Backend) Column(gov.sandia.n2a.ui.jobs.OutputParser.Column) NodeJob(gov.sandia.n2a.ui.jobs.NodeJob) OutputParser(gov.sandia.n2a.ui.jobs.OutputParser) SimpleDateFormat(java.text.SimpleDateFormat)

Example 2 with Table

use of gov.sandia.n2a.ui.jobs.Table in project n2a by frothga.

the class Main method studyHeadless.

/**
 *        Run a study from the command line.
 *        Unlike runHeadless(), this function uses all the usual job management machinery.
 */
public static void studyHeadless(MNode record) {
    String key = record.key();
    MNode doc = AppData.models.childOrEmpty(key);
    record.mergeUnder(doc);
    MPart collated = new MPart(record);
    if (!collated.containsKey("study"))
        return;
    // Start host monitor threads (see PanelRun constructor for non-headless procedure)
    Host.restartAssignmentThread();
    for (Host h : Host.getHosts()) h.restartMonitorThread();
    MNode studyNode = PanelEquations.createStudy(collated);
    // constructed in paused state
    Study study = new Study(studyNode);
    // start
    study.togglePause();
    study.waitForCompletion();
    // Output CSV files, if requested.
    if (record.getFlag("$metadata", "csv")) {
        Path studyDir = study.getDir();
        try (BufferedWriter parms = Files.newBufferedWriter(studyDir.resolve("study.csv"))) {
            SampleTableModel samples = new SampleTableModel();
            samples.update(study);
            int rows = samples.getRowCount();
            int cols = samples.getColumnCount();
            int lastCol = cols - 1;
            // Header for study.csv file
            for (// first column is job status, so skip it
            int c = 1; // first column is job status, so skip it
            c < cols; // first column is job status, so skip it
            c++) {
                parms.write(samples.getColumnName(c));
                if (c < lastCol)
                    parms.write(",");
            }
            parms.newLine();
            // Rows for study.csv file, along with converted output of each job.
            for (int r = 0; r < rows; r++) {
                for (int c = 1; c < cols; c++) {
                    parms.write(samples.getValueAt(r, c).toString());
                    if (c < lastCol)
                        parms.write(",");
                }
                parms.newLine();
                NodeJob jobNode = study.getJob(r);
                Path jobDir = Host.getJobDir(Host.getLocalResourceDir(), jobNode.getSource());
                try {
                    Table table = new Table(jobDir.resolve("out"), false);
                    table.dumpCSV(studyDir.resolve(r + ".csv"));
                } catch (IOException e) {
                }
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
    // See MainFrame window close listener
    // Save any modified data, particularly the study record.
    AppData.quit();
    // Close down any ssh sessions.
    Host.quit();
}
Also used : Path(java.nio.file.Path) Study(gov.sandia.n2a.ui.studies.Study) MPart(gov.sandia.n2a.eqset.MPart) Table(gov.sandia.n2a.ui.jobs.Table) SampleTableModel(gov.sandia.n2a.ui.studies.PanelStudy.SampleTableModel) Host(gov.sandia.n2a.host.Host) NodeJob(gov.sandia.n2a.ui.jobs.NodeJob) IOException(java.io.IOException) MNode(gov.sandia.n2a.db.MNode) BufferedWriter(java.io.BufferedWriter)

Aggregations

MNode (gov.sandia.n2a.db.MNode)2 MPart (gov.sandia.n2a.eqset.MPart)2 Host (gov.sandia.n2a.host.Host)2 NodeJob (gov.sandia.n2a.ui.jobs.NodeJob)2 Table (gov.sandia.n2a.ui.jobs.Table)2 BufferedWriter (java.io.BufferedWriter)2 IOException (java.io.IOException)2 Path (java.nio.file.Path)2 MDoc (gov.sandia.n2a.db.MDoc)1 Remote (gov.sandia.n2a.host.Remote)1 Backend (gov.sandia.n2a.plugins.extpoints.Backend)1 OutputParser (gov.sandia.n2a.ui.jobs.OutputParser)1 Column (gov.sandia.n2a.ui.jobs.OutputParser.Column)1 SampleTableModel (gov.sandia.n2a.ui.studies.PanelStudy.SampleTableModel)1 Study (gov.sandia.n2a.ui.studies.Study)1 SimpleDateFormat (java.text.SimpleDateFormat)1 Date (java.util.Date)1