use of org.apache.sysml.api.mlcontext.MLContext in project incubator-systemml by apache.
the class NNTest method setUpClass.
@BeforeClass
public static void setUpClass() {
spark = createSystemMLSparkSession("MLContextTest", "local");
ml = new MLContext(spark);
}
use of org.apache.sysml.api.mlcontext.MLContext in project incubator-systemml by apache.
the class MLContextTest method setUpClass.
@BeforeClass
public static void setUpClass() {
spark = createSystemMLSparkSession("MLContextTest", "local");
ml = new MLContext(spark);
sc = MLContextUtil.getJavaSparkContext(ml);
}
use of org.apache.sysml.api.mlcontext.MLContext in project incubator-systemml by apache.
the class MLContextFrameTest method setUpClass.
@BeforeClass
public static void setUpClass() {
spark = createSystemMLSparkSession("MLContextFrameTest", "local");
ml = new MLContext(spark);
sc = MLContextUtil.getJavaSparkContext(ml);
ml.setExplainLevel(ExplainLevel.RECOMPILE_HOPS);
}
use of org.apache.sysml.api.mlcontext.MLContext in project incubator-systemml by apache.
the class SparkExecutionContext method initSparkContext.
private static synchronized void initSparkContext() {
// check for redundant spark context init
if (_spctx != null)
return;
long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
// create a default spark context (master, appname, etc refer to system properties
// as given in the spark configuration or during spark-submit)
MLContext mlCtxObj = MLContextProxy.getActiveMLContext();
if (mlCtxObj != null) {
// This is when DML is called through spark shell
// Will clean the passing of static variables later as this involves minimal change to DMLScript
_spctx = MLContextUtil.getJavaSparkContext(mlCtxObj);
} else {
if (DMLScript.USE_LOCAL_SPARK_CONFIG) {
// For now set 4 cores for integration testing :)
SparkConf conf = createSystemMLSparkConf().setMaster("local[*]").setAppName("My local integration test app");
// This is discouraged in spark but have added only for those testcase that cannot stop the context properly
// conf.set("spark.driver.allowMultipleContexts", "true");
conf.set("spark.ui.enabled", "false");
_spctx = new JavaSparkContext(conf);
} else // default cluster setup
{
// setup systemml-preferred spark configuration (w/o user choice)
SparkConf conf = createSystemMLSparkConf();
_spctx = new JavaSparkContext(conf);
}
_parRDDs.clear();
}
// Set warning if spark.driver.maxResultSize is not set. It needs to be set before starting Spark Context for CP collect
String strDriverMaxResSize = _spctx.getConf().get("spark.driver.maxResultSize", "1g");
long driverMaxResSize = UtilFunctions.parseMemorySize(strDriverMaxResSize);
if (driverMaxResSize != 0 && driverMaxResSize < OptimizerUtils.getLocalMemBudget() && !DMLScript.USE_LOCAL_SPARK_CONFIG)
LOG.warn("Configuration parameter spark.driver.maxResultSize set to " + UtilFunctions.formatMemorySize(driverMaxResSize) + "." + " You can set it through Spark default configuration setting either to 0 (unlimited) or to available memory budget of size " + UtilFunctions.formatMemorySize((long) OptimizerUtils.getLocalMemBudget()) + ".");
// TODO if spark context passed in from outside (mlcontext), we need to clean this up at the end
if (MRJobConfiguration.USE_BINARYBLOCK_SERIALIZATION)
MRJobConfiguration.addBinaryBlockSerializationFramework(_spctx.hadoopConfiguration());
// statistics maintenance
if (DMLScript.STATISTICS) {
Statistics.setSparkCtxCreateTime(System.nanoTime() - t0);
}
}
use of org.apache.sysml.api.mlcontext.MLContext in project incubator-systemml by apache.
the class GenerateClassesForMLContext method addConvenienceMethodsToMLContext.
/**
* Add methods to MLContext to allow tab-completion to folders/packages
* (such as {@code ml.scripts()} and {@code ml.nn()}).
*
* @param source
* path to source directory (typically, the scripts directory)
* @param fullDirClassName
* the full name of the class representing the source (scripts)
* directory
*/
public static void addConvenienceMethodsToMLContext(String source, String fullDirClassName) {
try {
ClassPool pool = ClassPool.getDefault();
CtClass ctMLContext = pool.get(MLContext.class.getName());
CtClass dirClass = pool.get(fullDirClassName);
String methodName = convertFullClassNameToConvenienceMethodName(fullDirClassName);
System.out.println("Adding " + methodName + "() to " + ctMLContext.getName());
String methodBody = "{ " + fullDirClassName + " z = new " + fullDirClassName + "(); return z; }";
CtMethod ctMethod = CtNewMethod.make(Modifier.PUBLIC, dirClass, methodName, null, null, methodBody, ctMLContext);
ctMLContext.addMethod(ctMethod);
addPackageConvenienceMethodsToMLContext(source, ctMLContext);
ctMLContext.writeFile(destination);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (RuntimeException e) {
e.printStackTrace();
} catch (NotFoundException e) {
e.printStackTrace();
} catch (CannotCompileException e) {
e.printStackTrace();
}
}
Aggregations