use of gov.sandia.n2a.ui.jobs.NodeJob in project n2a by frothga.
the class Main method runHeadless.
/**
* Assumes this app was started solely for the purpose of running one specific job.
* This job operates outside the normal job management. The user is responsible
* for everything, including load balancing, directory and file management.
* Jobs can run remotely, but there is no support for retrieving results.
*/
public static void runHeadless(MNode record) {
// See PanelEquations.launchJob()
// Use current working directory, on assumption that's what the caller wants.
Path jobDir = Paths.get(System.getProperty("user.dir")).toAbsolutePath();
// This allows a remote job to run in the regular jobs directory there.
String jobKey = new SimpleDateFormat("yyyy-MM-dd-HHmmss", Locale.ROOT).format(new Date());
// Make this appear as if it is from the jobs collection.
MDoc job = new MDoc(jobDir.resolve("job"), jobKey);
String key = record.key();
MNode doc = AppData.models.childOrEmpty(key);
record.mergeUnder(doc);
// TODO: the only reason to collate here is to ensure that host and backend are correctly identified if they are inherited. Need a more efficient method, such as lazy collation in MPart.
MPart collated = new MPart(record);
NodeJob.collectJobParameters(collated, key, job);
NodeJob.saveSnapshot(record, job);
// Handle remote host
// If a remote host is used, it must be specified exactly, rather than a list of possibilities.
Host host = Host.get(job);
if (// Need to note the key so user can easily find the remote job directory.
host instanceof Remote) {
job.set(jobKey, "remoteKey");
job.save();
}
// Start the job.
Backend backend = Backend.getBackend(job.get("backend"));
backend.start(job);
// Wait for completion
NodeJob node = new NodeJobHeadless(job);
while (node.complete < 1) node.monitorProgress();
// Convert to CSV, if requested.
if (record.getFlag("$metadata", "csv")) {
Table table = new Table(jobDir.resolve("out"), false);
try {
table.dumpCSV(jobDir.resolve("out.csv"));
} catch (IOException e) {
}
}
// Extract results requested in ASV
MNode ASV = record.child("$metadata", "dakota", "ASV");
// nothing more to do
if (ASV == null)
return;
OutputParser output = new OutputParser();
output.parse(jobDir.resolve("out"));
try (BufferedWriter writer = Files.newBufferedWriter(jobDir.resolve("results"))) {
for (MNode o : ASV) {
String name = o.get();
Column c = output.getColumn(name);
float value = 0;
if (c != null && !c.values.isEmpty())
value = c.values.get(c.values.size() - 1);
writer.write(value + " " + name);
}
} catch (IOException e) {
}
}
use of gov.sandia.n2a.ui.jobs.NodeJob in project n2a by frothga.
the class OptimizerLM method getSeries.
public OutputParser.Column getSeries(int index) {
NodeJob node = study.getJob(index);
Path jobDir = node.getJobPath().getParent();
OutputParser parser = new OutputParser();
parser.parse(jobDir.resolve("study"));
return parser.getColumn("goal");
}
use of gov.sandia.n2a.ui.jobs.NodeJob in project n2a by frothga.
the class PanelEquations method launchJob.
public void launchJob() {
if (record == null)
return;
prepareForTabChange();
String jobKey = new SimpleDateFormat("yyyy-MM-dd-HHmmss", Locale.ROOT).format(new Date());
MDoc job = (MDoc) AppData.runs.childOrCreate(jobKey);
NodeJob.collectJobParameters(root.source, record.key(), job);
// Force directory (and job file) to exist, so Backends can work with the dir.
job.save();
NodeJob.saveSnapshot(record, job);
MainTabbedPane mtp = (MainTabbedPane) MainFrame.instance.tabs;
mtp.setPreferredFocus(PanelRun.instance, PanelRun.instance.tree);
mtp.selectTab("Runs");
NodeJob node = PanelRun.instance.addNewRun(job, true);
// Hack to allow local jobs to bypass the wait-for-host queue.
// It would be better for all jobs to check for resources before starting.
// However, the time cost for the local check could be as long as the job itself
// (for very simple models). There is some expectation that the user knows
// the state of their own system when they choose to hit the play button.
Backend backend = Backend.getBackend(job.get("backend"));
String backendName = backend.getName().toLowerCase();
Host h = Host.get(job);
boolean internal = backend instanceof InternalBackend;
boolean localhost = !(h instanceof Remote);
boolean forbidden = h.config.get("backend", backendName).equals("0");
if (// use of Internal overrides host selection
internal || (localhost && !forbidden)) {
// In case it was "internal" but not "localhost", set host to correct value.
job.set("localhost", "host");
backend.start(job);
h.monitor(node);
return;
}
Host.waitForHost(node);
}
use of gov.sandia.n2a.ui.jobs.NodeJob in project n2a by frothga.
the class PanelStudy method delete.
/**
* Delete studies associated with currently selected items in list.
* Execute on EDT only.
*/
public void delete() {
List<Study> studies = list.getSelectedValuesList();
if (studies.size() < 1)
return;
int nextSelection = list.getSelectedIndex();
displayStudy = null;
for (Study study : studies) {
// stop the worker thread; does not stop individual jobs that are currently running
study.stop();
model.removeElement(study);
}
int count = model.getSize();
if (nextSelection < 0)
nextSelection = 0;
if (nextSelection >= count)
nextSelection = count - 1;
if (// make new selection and load display pane
nextSelection >= 0) {
list.setSelectedIndex(nextSelection);
displayStudy = list.getSelectedValue();
}
view();
// Purge data
Thread purgeThread = new Thread("Delete Studies") {
public void run() {
for (Study study : studies) {
// It does no harm to clear the record out from under the worker thread.
// Any further access will simply not be written to disk.
String studyKey = study.source.key();
AppData.studies.clear(studyKey);
// Purge any jobs that were started directly by the study.
List<TreePath> paths = new ArrayList<TreePath>();
int jobCount = study.getJobCount();
for (int index = 0; index < jobCount; index++) {
String jobKey = study.getJobKey(index);
// Test whether this job was started directly by the study.
if (!jobKey.startsWith(studyKey))
continue;
NodeJob jobNode;
synchronized (PanelRun.jobNodes) {
jobNode = PanelRun.jobNodes.get(jobKey);
}
if (jobNode != null)
paths.add(new TreePath(jobNode.getPath()));
}
PanelRun.instance.delete(paths.toArray(new TreePath[paths.size()]));
}
}
};
purgeThread.setDaemon(true);
purgeThread.start();
}
use of gov.sandia.n2a.ui.jobs.NodeJob in project n2a by frothga.
the class Study method getJob.
public NodeJob getJob(int index) {
String jobKey = getJobKey(index);
NodeJob result;
synchronized (PanelRun.jobNodes) {
result = PanelRun.jobNodes.get(jobKey);
}
return result;
}
Aggregations