use of org.apache.sysml.runtime.DMLScriptException in project incubator-systemml by apache.
the class DMLScript method executeScript.
/**
* Single entry point for all public invocation alternatives (e.g.,
* main, executeScript, JaqlUdf etc)
*
* @param conf Hadoop configuration
* @param args arguments
* @return true if success, false otherwise
* @throws DMLException if DMLException occurs
* @throws ParseException if ParseException occurs
*/
public static boolean executeScript(Configuration conf, String[] args) throws DMLException {
//parse arguments and set execution properties
//keep old rtplatform
RUNTIME_PLATFORM oldrtplatform = rtplatform;
//keep old explain
ExplainType oldexplain = EXPLAIN;
Options options = createCLIOptions();
try {
DMLOptions dmlOptions = parseCLArguments(args, options);
// String[] scriptArgs = null; //optional script arguments
// boolean namedScriptArgs = false;
STATISTICS = dmlOptions.stats;
STATISTICS_COUNT = dmlOptions.statsCount;
USE_ACCELERATOR = dmlOptions.gpu;
FORCE_ACCELERATOR = dmlOptions.forceGPU;
EXPLAIN = dmlOptions.explainType;
ENABLE_DEBUG_MODE = dmlOptions.debug;
SCRIPT_TYPE = dmlOptions.scriptType;
rtplatform = dmlOptions.execMode;
String fnameOptConfig = dmlOptions.configFile;
boolean isFile = dmlOptions.filePath != null;
String fileOrScript = isFile ? dmlOptions.filePath : dmlOptions.script;
boolean help = dmlOptions.help;
if (help) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("systemml", options);
return true;
}
if (dmlOptions.clean) {
cleanSystemMLWorkspace();
return true;
}
//set log level
if (!ENABLE_DEBUG_MODE)
setLoggingProperties(conf);
//Step 2: prepare script invocation
if (isFile && StringUtils.endsWithIgnoreCase(fileOrScript, ".pydml")) {
SCRIPT_TYPE = ScriptType.PYDML;
}
String dmlScriptStr = readDMLScript(isFile, fileOrScript);
Map<String, String> argVals = dmlOptions.argVals;
DML_FILE_PATH_ANTLR_PARSER = dmlOptions.filePath;
//Step 3: invoke dml script
printInvocationInfo(fileOrScript, fnameOptConfig, argVals);
if (ENABLE_DEBUG_MODE) {
// inner try loop is just to isolate the debug exception, which will allow to manage the bugs from debugger v/s runtime
launchDebugger(dmlScriptStr, fnameOptConfig, argVals, SCRIPT_TYPE);
} else {
execute(dmlScriptStr, fnameOptConfig, argVals, args, SCRIPT_TYPE);
}
} catch (AlreadySelectedException e) {
System.err.println("Mutually exclusive options were selected. " + e.getMessage());
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("systemml", options);
return false;
} catch (org.apache.commons.cli.ParseException e) {
System.err.println(e.getMessage());
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("systemml", options);
} catch (ParseException pe) {
throw pe;
} catch (DMLScriptException e) {
//rethrow DMLScriptException to propagate stop call
throw e;
} catch (Exception ex) {
LOG.error("Failed to execute DML script.", ex);
throw new DMLException(ex);
} finally {
//reset runtime platform and visualize flag
rtplatform = oldrtplatform;
EXPLAIN = oldexplain;
}
return true;
}
use of org.apache.sysml.runtime.DMLScriptException in project incubator-systemml by apache.
the class WhileProgramBlock method execute.
public void execute(ExecutionContext ec) throws DMLRuntimeException {
//execute while loop
try {
// prepare update in-place variables
UpdateType[] flags = prepareUpdateInPlaceVariables(ec, _tid);
//run loop body until predicate becomes false
while (executePredicate(ec).getBooleanValue()) {
//execute all child blocks
for (int i = 0; i < _childBlocks.size(); i++) {
ec.updateDebugState(i);
_childBlocks.get(i).execute(ec);
}
}
// reset update-in-place variables
resetUpdateInPlaceVariableFlags(ec, flags);
} catch (DMLScriptException e) {
//propagate stop call
throw e;
} catch (Exception e) {
throw new DMLRuntimeException(printBlockErrorLocation() + "Error evaluating while program block", e);
}
//execute exit instructions
try {
executeInstructions(_exitInstructions, ec);
} catch (Exception e) {
throw new DMLRuntimeException(printBlockErrorLocation() + "Error executing while exit instructions.", e);
}
}
use of org.apache.sysml.runtime.DMLScriptException in project incubator-systemml by apache.
the class ForProgramBlock method execute.
@Override
public void execute(ExecutionContext ec) throws DMLRuntimeException {
// add the iterable predicate variable to the variable set
String iterVarName = _iterablePredicateVars[0];
// evaluate from, to, incr only once (assumption: known at for entry)
IntObject from = executePredicateInstructions(1, _fromInstructions, ec);
IntObject to = executePredicateInstructions(2, _toInstructions, ec);
IntObject incr = (_incrementInstructions == null || _incrementInstructions.isEmpty()) && _iterablePredicateVars[3] == null ? new IntObject((from.getLongValue() <= to.getLongValue()) ? 1 : -1) : executePredicateInstructions(3, _incrementInstructions, ec);
if (//would produce infinite loop
incr.getLongValue() == 0)
throw new DMLRuntimeException(this.printBlockErrorLocation() + "Expression for increment of variable '" + iterVarName + "' must evaluate to a non-zero value.");
// execute for loop
try {
// prepare update in-place variables
UpdateType[] flags = prepareUpdateInPlaceVariables(ec, _tid);
// run for loop body for each instance of predicate sequence
SequenceIterator seqIter = new SequenceIterator(iterVarName, from, to, incr);
for (IntObject iterVar : seqIter) {
//set iteration variable
ec.setVariable(iterVarName, iterVar);
//execute all child blocks
for (int i = 0; i < this._childBlocks.size(); i++) {
ec.updateDebugState(i);
_childBlocks.get(i).execute(ec);
}
}
// reset update-in-place variables
resetUpdateInPlaceVariableFlags(ec, flags);
} catch (DMLScriptException e) {
//propagate stop call
throw e;
} catch (Exception e) {
throw new DMLRuntimeException(printBlockErrorLocation() + "Error evaluating for program block", e);
}
//execute exit instructions
try {
executeInstructions(_exitInstructions, ec);
} catch (Exception e) {
throw new DMLRuntimeException(printBlockErrorLocation() + "Error evaluating for exit instructions", e);
}
}
use of org.apache.sysml.runtime.DMLScriptException in project incubator-systemml by apache.
the class Program method execute.
public void execute(ExecutionContext ec) throws DMLRuntimeException {
ec.initDebugProgramCounters();
try {
for (int i = 0; i < _programBlocks.size(); i++) {
ec.updateDebugState(i);
_programBlocks.get(i).execute(ec);
}
} catch (DMLScriptException e) {
throw e;
} catch (Exception e) {
throw new DMLRuntimeException(e);
}
ec.clearDebugProgramCounters();
}
use of org.apache.sysml.runtime.DMLScriptException in project incubator-systemml by apache.
the class DMLYarnClient method launchDMLYarnAppmaster.
/**
* Method to launch the dml yarn app master and execute the given dml script
* with the given configuration and jar file.
*
* NOTE: on launching the yarn app master, we do not explicitly probe if we
* are running on a yarn or MR1 cluster. In case of MR1, already the class
* YarnConfiguration will not be found and raise a classnotfound. In case of any
* exception we fall back to run CP directly in the client process.
*
* @return true if dml program successfully executed as yarn app master
* @throws IOException if IOException occurs
* @throws DMLScriptException if DMLScriptException occurs
*/
protected boolean launchDMLYarnAppmaster() throws IOException, DMLScriptException {
boolean ret = false;
String hdfsWD = null;
try {
Timing time = new Timing(true);
// load yarn configuration
YarnConfiguration yconf = new YarnConfiguration();
// create yarn client
YarnClient yarnClient = YarnClient.createYarnClient();
yarnClient.init(yconf);
yarnClient.start();
// create application and get the ApplicationID
YarnClientApplication app = yarnClient.createApplication();
ApplicationSubmissionContext appContext = app.getApplicationSubmissionContext();
ApplicationId appId = appContext.getApplicationId();
LOG.debug("Created application (applicationID: " + appId + ")");
// prepare hdfs working directory via ApplicationID
// copy script, config, jar file to hdfs
hdfsWD = DMLAppMasterUtils.constructHDFSWorkingDir(_dmlConfig, appId);
copyResourcesToHdfsWorkingDir(yconf, hdfsWD);
//construct command line argument
String command = constructAMCommand(_args, _dmlConfig);
LOG.debug("Constructed application master command: \n" + command);
// set up the container launch context for the application master
ContainerLaunchContext amContainer = Records.newRecord(ContainerLaunchContext.class);
amContainer.setCommands(Collections.singletonList(command));
amContainer.setLocalResources(constructLocalResourceMap(yconf));
amContainer.setEnvironment(constructEnvionmentMap(yconf));
// Set up resource type requirements for ApplicationMaster
int memHeap = _dmlConfig.getIntValue(DMLConfig.YARN_APPMASTERMEM);
int memAlloc = (int) computeMemoryAllocation(memHeap);
Resource capability = Records.newRecord(Resource.class);
capability.setMemory(memAlloc);
capability.setVirtualCores(NUM_CORES);
LOG.debug("Requested application resources: memory=" + memAlloc + ", vcores=" + NUM_CORES);
// Finally, set-up ApplicationSubmissionContext for the application
String qname = _dmlConfig.getTextValue(DMLConfig.YARN_APPQUEUE);
// application name
appContext.setApplicationName(APPMASTER_NAME);
appContext.setAMContainerSpec(amContainer);
appContext.setResource(capability);
// queue
appContext.setQueue(qname);
LOG.debug("Configured application meta data: name=" + APPMASTER_NAME + ", queue=" + qname);
// submit application (non-blocking)
yarnClient.submitApplication(appContext);
// Check application status periodically (and output web ui address)
ApplicationReport appReport = yarnClient.getApplicationReport(appId);
LOG.info("Application tracking-URL: " + appReport.getTrackingUrl());
YarnApplicationState appState = appReport.getYarnApplicationState();
YarnApplicationState oldState = appState;
LOG.info("Application state: " + appState);
while (appState != YarnApplicationState.FINISHED && appState != YarnApplicationState.KILLED && appState != YarnApplicationState.FAILED) {
//wait for 200ms
Thread.sleep(APP_STATE_INTERVAL);
appReport = yarnClient.getApplicationReport(appId);
appState = appReport.getYarnApplicationState();
if (appState != oldState) {
oldState = appState;
LOG.info("Application state: " + appState);
}
}
//check final status (failed or succeeded)
FinalApplicationStatus finalState = appReport.getFinalApplicationStatus();
LOG.info("Application final status: " + finalState);
//show application and total runtime
double appRuntime = (double) (appReport.getFinishTime() - appReport.getStartTime()) / 1000;
LOG.info("Application runtime: " + appRuntime + " sec.");
LOG.info("Total runtime: " + String.format("%.3f", time.stop() / 1000) + " sec.");
//raised script-level error in case of failed final status
if (finalState != FinalApplicationStatus.SUCCEEDED) {
//propagate script-level stop call message
String stop_msg = readMessageToHDFSWorkingDir(_dmlConfig, yconf, appId);
if (stop_msg != null)
throw new DMLScriptException(stop_msg);
//generic failure message
throw new DMLRuntimeException("DML yarn app master finished with final status: " + finalState + ".");
}
ret = true;
} catch (DMLScriptException ex) {
//rethrow DMLScriptException to propagate stop call
throw ex;
} catch (Exception ex) {
LOG.error("Failed to run DML yarn app master.", ex);
ret = false;
} finally {
//cleanup working directory
if (hdfsWD != null)
MapReduceTool.deleteFileIfExistOnHDFS(hdfsWD);
}
return ret;
}
Aggregations