use of org.gridlab.gat.resources.ResourceDescription in project compss by bsc-wdc.
the class SshLsfResourceBrokerAdaptor method createBsubScript.
private java.io.File createBsubScript(JobDescription description, String returnValueFile, int nproc) throws GATInvocationException {
// Adding bsub options
String Queue = null;
long Time = -1;
Integer cpus = null;
String jobname = null;
java.io.File temp;
LSFScriptWriter job = null;
HashMap<String, Object> rd_HashMap = null;
SoftwareDescription sd = description.getSoftwareDescription();
ResourceDescription rd = description.getResourceDescription();
// Corrected initialization of rd_HashMap: rd may be null ... --Ceriel
if (rd != null) {
rd_HashMap = (HashMap<String, Object>) rd.getDescription();
}
if (rd_HashMap == null) {
rd_HashMap = new HashMap<String, Object>();
}
// try {
temp = new java.io.File("lsf" + Math.random());
try {
job = new LSFScriptWriter(new BufferedWriter(new FileWriter(temp)));
String userScript = (String) gatContext.getPreferences().get(SSHLSF_SCRIPT);
if (userScript != null) {
// a specified job script overrides everything, except for
// pre-staging, post-staging,
// and exit status.
BufferedReader f = new BufferedReader(new FileReader(userScript));
for (; ; ) {
String s = f.readLine();
if (s == null) {
break;
}
job.print(s + "\n");
}
} else {
job.print("#!/bin/sh\n");
job.print("# bsub script automatically generated by GAT SshLsf adaptor\n");
// Resources: queue, walltime, memory size, et cetera.
Queue = (String) rd_HashMap.get("machine.queue");
if (Queue == null) {
Queue = sd.getStringAttribute(SoftwareDescription.JOB_QUEUE, null);
}
if (Queue != null) {
job.addOption("q", Queue);
}
Time = sd.getLongAttribute(SoftwareDescription.WALLTIME_MAX, -1L);
cpus = (Integer) rd_HashMap.get(HardwareResourceDescription.CPU_COUNT);
if (cpus == null) {
cpus = sd.getIntAttribute("coreCount", 1);
}
job.addOption("n", cpus);
// In a single node
job.addOption("R", "\"span[ptile=" + cpus + "]\"");
if (Time > 0) {
// Reformat time.
int minutes = (int) (Time % 60);
job.addOption("W", minutes);
} else {
job.addOption("W", 60);
}
String nativeFlags = null;
Object o = rd == null ? null : rd.getResourceAttribute(SSHLSF_NATIVE_FLAGS);
if (o != null && o instanceof String) {
nativeFlags = (String) o;
} else {
String s = sd == null ? null : sd.getStringAttribute(SSHLSF_NATIVE_FLAGS, null);
if (s != null) {
nativeFlags = s;
} else {
o = gatContext.getPreferences().get(SSHLSF_NATIVE_FLAGS);
if (o != null && o instanceof String) {
nativeFlags = (String) o;
}
}
}
if (nativeFlags != null) {
String[] splits = nativeFlags.split("##");
for (String s : splits) {
job.addString(s);
}
}
String path = sd.getStringAttribute(SoftwareDescription.SANDBOX_ROOT, "");
if (!path.isEmpty() && !path.endsWith(File.separator)) {
path = path + File.separator;
}
// Set working dir.
// job.addOption("cwd", path);
// Name for the job.
jobname = (String) rd_HashMap.get("Jobname");
if (jobname == null) {
jobname = brokerURI.getUserInfo();
if (jobname == null || "".equals(jobname)) {
jobname = "compss_remotejob_" + System.getProperty("user.name");
}
}
if (jobname != null)
job.addOption("J", jobname);
if (sd.getStdout() != null) {
job.addOption("oo", path + sd.getStdout().getName());
}
if (sd.getStderr() != null) {
job.addOption("eo", path + sd.getStderr().getName());
}
addScriptExecution(job, sd, rd);
}
job.print("echo retvalue = $? > " + returnValueFile + "\n");
} catch (Throwable e) {
throw new GATInvocationException("Cannot create temporary bsub file " + temp.getAbsolutePath(), e);
} finally {
if (job != null)
job.close();
}
return temp;
}
use of org.gridlab.gat.resources.ResourceDescription in project compss by bsc-wdc.
the class GATJob method prepareJob.
private JobDescription prepareJob() throws Exception {
// Get the information related to the job
logger.debug("Preparing GAT Job " + this.jobId);
TaskDescription taskParams = this.taskParams;
String targetPath = getResourceNode().getInstallDir();
String targetHost = getResourceNode().getHost();
String targetUser = getResourceNode().getUser();
if (userNeeded && !targetUser.isEmpty()) {
targetUser += "@";
} else {
targetUser = "";
}
SoftwareDescription sd = new SoftwareDescription();
sd.setExecutable(targetPath + WORKER_SCRIPT_PATH + WORKER_SCRIPT_NAME);
ArrayList<String> lArgs = new ArrayList<String>();
// Common arguments: language working_dir lib_path num_obsolete [obs1... obsN] tracing [event_type task_id
// slot_id]
lArgs.add(LANG);
lArgs.add(getResourceNode().getWorkingDir());
lArgs.add(getResourceNode().getLibPath());
LogicalData[] obsoleteFiles = getResource().pollObsoletes();
if (obsoleteFiles != null) {
lArgs.add("" + obsoleteFiles.length);
for (LogicalData ld : obsoleteFiles) {
String renaming = ld.getName();
lArgs.add(renaming);
}
} else {
lArgs.add("0");
}
// Check sandbox working dir
boolean isSpecific = false;
String sandboxDir = null;
AbstractMethodImplementation absImpl = (AbstractMethodImplementation) this.impl;
switch(absImpl.getMethodType()) {
case BINARY:
BinaryImplementation binaryImpl = (BinaryImplementation) absImpl;
sandboxDir = binaryImpl.getWorkingDir();
isSpecific = true;
break;
case MPI:
MPIImplementation mpiImpl = (MPIImplementation) absImpl;
sandboxDir = mpiImpl.getWorkingDir();
isSpecific = true;
break;
case DECAF:
DecafImplementation decafImpl = (DecafImplementation) absImpl;
sandboxDir = decafImpl.getWorkingDir();
isSpecific = true;
break;
case OMPSS:
OmpSsImplementation ompssImpl = (OmpSsImplementation) absImpl;
sandboxDir = ompssImpl.getWorkingDir();
isSpecific = true;
break;
case OPENCL:
OpenCLImplementation openclImpl = (OpenCLImplementation) absImpl;
sandboxDir = openclImpl.getWorkingDir();
isSpecific = true;
break;
case METHOD:
sandboxDir = null;
break;
}
if (sandboxDir == null || sandboxDir.isEmpty() || sandboxDir.equals(Constants.UNASSIGNED)) {
sandboxDir = getResourceNode().getWorkingDir() + File.separator + "sandBox" + File.separator + "job_" + this.jobId;
isSpecific = false;
}
// Processing parameters to get symlinks pairs to create (symlinks) and how to pass parameters in the GAT
// Job(paramArgs)
ArrayList<String> symlinks = new ArrayList<>();
ArrayList<String> paramArgs = new ArrayList<>();
processParameters(sandboxDir, symlinks, paramArgs);
// Adding info to create symlinks between renamed files and original names
lArgs.add(Boolean.toString(isSpecific));
lArgs.add(sandboxDir);
if (symlinks.size() > 0) {
lArgs.add(String.valueOf(symlinks.size()));
lArgs.addAll(symlinks);
} else {
lArgs.add("0");
}
lArgs.add(Boolean.toString(Tracer.isActivated()));
lArgs.add(getHostName());
if (debug) {
logger.debug("hostName " + getHostName());
}
if (Tracer.isActivated()) {
// event type
lArgs.add(String.valueOf(Tracer.getTaskEventsType()));
// task id
lArgs.add(String.valueOf(this.taskParams.getId() + 1));
int slot = Tracer.getNextSlot(targetHost);
// slot id
lArgs.add(String.valueOf(slot));
sd.addAttribute("slot", slot);
}
// Language-dependent arguments: taskSandbox_dir app_dir classpath pythonpath debug storage_conf
// method_impl_type method_impl_params
// numSlaves [slave1,..,slaveN] numCus
// has_target num_params par_type_1 par_1 ... par_type_n par_n
lArgs.add(sandboxDir);
lArgs.add(getResourceNode().getAppDir());
lArgs.add(getClasspath());
lArgs.add(getPythonpath());
lArgs.add(String.valueOf(debug));
lArgs.add(STORAGE_CONF);
lArgs.add(String.valueOf(absImpl.getMethodType()));
switch(absImpl.getMethodType()) {
case METHOD:
MethodImplementation methodImpl = (MethodImplementation) absImpl;
lArgs.add(methodImpl.getDeclaringClass());
String methodName = methodImpl.getAlternativeMethodName();
if (methodName == null || methodName.isEmpty()) {
methodName = taskParams.getName();
}
lArgs.add(methodName);
break;
case MPI:
MPIImplementation mpiImpl = (MPIImplementation) absImpl;
lArgs.add(mpiImpl.getMpiRunner());
lArgs.add(mpiImpl.getBinary());
break;
case DECAF:
DecafImplementation decafImpl = (DecafImplementation) absImpl;
lArgs.add(targetPath + DecafImplementation.SCRIPT_PATH);
String dfScript = decafImpl.getDfScript();
if (!dfScript.startsWith(File.separator)) {
String appPath = getResourceNode().getAppDir();
dfScript = appPath + File.separator + dfScript;
}
lArgs.add(dfScript);
String dfExecutor = decafImpl.getDfExecutor();
if (dfExecutor == null || dfExecutor.isEmpty() || dfExecutor.equals(Constants.UNASSIGNED)) {
dfExecutor = "executor.sh";
}
if (!dfExecutor.startsWith(File.separator) && !dfExecutor.startsWith("./")) {
dfExecutor = "./" + dfExecutor;
}
lArgs.add(dfExecutor);
String dfLib = decafImpl.getDfLib();
if (dfLib == null || dfLib.isEmpty()) {
dfLib = Constants.UNASSIGNED;
}
lArgs.add(dfLib);
lArgs.add(decafImpl.getMpiRunner());
break;
case OMPSS:
OmpSsImplementation ompssImpl = (OmpSsImplementation) absImpl;
lArgs.add(ompssImpl.getBinary());
break;
case OPENCL:
OpenCLImplementation openclImpl = (OpenCLImplementation) absImpl;
lArgs.add(openclImpl.getKernel());
break;
case BINARY:
BinaryImplementation binaryImpl = (BinaryImplementation) absImpl;
lArgs.add(binaryImpl.getBinary());
break;
}
// Slave nodes and cus description
lArgs.add(String.valueOf(slaveWorkersNodeNames.size()));
lArgs.addAll(slaveWorkersNodeNames);
lArgs.add(String.valueOf(((MethodResourceDescription) this.impl.getRequirements()).getTotalCPUComputingUnits()));
// Add parameter arguments already processed
lArgs.addAll(paramArgs);
// Conversion vector -> array
String[] arguments = new String[lArgs.size()];
arguments = lArgs.toArray(arguments);
try {
sd.setArguments(arguments);
} catch (NullPointerException e) {
StringBuilder sb = new StringBuilder("Null argument parameter of job " + this.jobId + " " + absImpl.getMethodDefinition() + "\n");
int i = 0;
for (Parameter param : taskParams.getParameters()) {
sb.append("Parameter ").append(i).append("\n");
DataType type = param.getType();
sb.append("\t Type: ").append(param.getType()).append("\n");
if (type == DataType.FILE_T || type == DataType.OBJECT_T) {
DependencyParameter dPar = (DependencyParameter) param;
DataAccessId dAccId = dPar.getDataAccessId();
sb.append("\t Target: ").append(dPar.getDataTarget()).append("\n");
if (type == DataType.OBJECT_T) {
if (dAccId instanceof RAccessId) {
sb.append("\t Direction: " + "R").append("\n");
} else {
// for the worker to know it must write the object to disk
sb.append("\t Direction: " + "W").append("\n");
}
}
} else if (type == DataType.STRING_T) {
BasicTypeParameter btParS = (BasicTypeParameter) param;
// Check spaces
String value = btParS.getValue().toString();
int numSubStrings = value.split(" ").length;
sb.append("\t Num Substrings: " + Integer.toString(numSubStrings)).append("\n");
sb.append("\t Value:" + value).append("\n");
} else {
// Basic types
BasicTypeParameter btParB = (BasicTypeParameter) param;
sb.append("\t Value: " + btParB.getValue().toString()).append("\n");
}
i++;
}
logger.error(sb.toString());
listener.jobFailed(this, JobEndStatus.SUBMISSION_FAILED);
}
sd.addAttribute("jobId", jobId);
// JEA Changed to allow execution in MN
sd.addAttribute(SoftwareDescription.WALLTIME_MAX, absImpl.getRequirements().getWallClockLimit());
if (absImpl.getRequirements().getHostQueues().size() > 0) {
sd.addAttribute(SoftwareDescription.JOB_QUEUE, absImpl.getRequirements().getHostQueues().get(0));
}
sd.addAttribute("coreCount", absImpl.getRequirements().getTotalCPUComputingUnits());
sd.addAttribute("gpuCount", absImpl.getRequirements().getTotalGPUComputingUnits());
sd.addAttribute("fpgaCount", absImpl.getRequirements().getTotalFPGAComputingUnits());
sd.addAttribute(SoftwareDescription.MEMORY_MAX, absImpl.getRequirements().getMemorySize());
// sd.addAttribute(SoftwareDescription.SANDBOX_ROOT, "/tmp/");
sd.addAttribute(SoftwareDescription.SANDBOX_ROOT, getResourceNode().getWorkingDir());
sd.addAttribute(SoftwareDescription.SANDBOX_USEROOT, "true");
sd.addAttribute(SoftwareDescription.SANDBOX_DELETE, "false");
/*
* sd.addAttribute(SoftwareDescription.SANDBOX_PRESTAGE_STDIN, "false");
* sd.addAttribute(SoftwareDescription.SANDBOX_POSTSTAGE_STDOUT, "false");
* sd.addAttribute(SoftwareDescription.SANDBOX_POSTSTAGE_STDERR, "false");
*/
if (debug) {
// Set standard output file for job
File outFile = GAT.createFile(context, Protocol.ANY_URI.getSchema() + File.separator + JOBS_DIR + "job" + jobId + "_" + this.getHistory() + ".out");
sd.setStdout(outFile);
}
if (debug || usingGlobus) {
// Set standard error file for job
File errFile = GAT.createFile(context, Protocol.ANY_URI.getSchema() + File.separator + JOBS_DIR + "job" + jobId + "_" + this.getHistory() + ".err");
sd.setStderr(errFile);
}
Map<String, Object> attributes = new HashMap<String, Object>();
attributes.put(RES_ATTR, Protocol.ANY_URI.getSchema() + targetUser + targetHost);
attributes.put("Jobname", "compss_remote_job_" + jobId);
ResourceDescription rd = new HardwareResourceDescription(attributes);
if (debug) {
logger.debug("Ready to submit job " + jobId + ":");
logger.debug(" * Host: " + targetHost);
logger.debug(" * Executable: " + sd.getExecutable());
StringBuilder sb = new StringBuilder(" - Arguments:");
for (String arg : sd.getArguments()) {
sb.append(" ").append(arg);
}
logger.debug(sb.toString());
}
JobDescription jd = new JobDescription(sd, rd);
// jd.setProcessCount(method.getRequirements().getProcessorCoreCount());
return jd;
}
use of org.gridlab.gat.resources.ResourceDescription in project compss by bsc-wdc.
the class AdvertJob method main.
public static void main(String[] args) throws Exception {
try {
GATContext c = new GATContext();
Preferences prefs = new Preferences();
prefs.put("File.adaptor.name", "local,commandlinessh");
prefs.put("job.stop.on.exit", "false");
c.addPreferences(prefs);
SoftwareDescription sd = new SoftwareDescription();
sd.setExecutable("/bin/sleep");
sd.setArguments("100");
// stdout & stderr
File stdout = GAT.createFile(c, "std.out");
File stderr = GAT.createFile(c, "std.err");
sd.setStderr(stderr);
sd.setStdout(stdout);
ResourceDescription rd = new HardwareResourceDescription();
JobDescription jd = new JobDescription(sd, rd);
ResourceBroker broker = GAT.createResourceBroker(c, new URI("sshsge://fs0.das3.cs.vu.nl"));
Job job = broker.submitJob(jd);
AdvertService a = GAT.createAdvertService(c);
MetaData m = new MetaData();
m.put("name", "testJob");
a.add(job, m, "/rob/testJob");
a.exportDataBase(new URI("file:///mydb"));
GAT.end();
System.exit(0);
} catch (Throwable e) {
e.printStackTrace();
}
}
Aggregations