use of org.apache.sysml.api.mlcontext.MLContext in project systemml by apache.
the class GPUTests method assertEqualMatrices.
/**
* Asserts that the values in two matrices are in {@link UnaryOpTests#DOUBLE_PRECISION_THRESHOLD} of each other
*
* @param expected expected matrix
* @param actual actual matrix
*/
private void assertEqualMatrices(Matrix expected, Matrix actual) {
try {
// Faster way to compare two matrices
MLContext cpuMLC = new MLContext(spark);
String scriptStr = "num_mismatch = sum((abs(X - Y) / X) > " + getTHRESHOLD() + ");";
Script script = ScriptFactory.dmlFromString(scriptStr).in("X", expected).in("Y", actual).out("num_mismatch");
long num_mismatch = cpuMLC.execute(script).getLong("num_mismatch");
cpuMLC.close();
if (num_mismatch == 0)
return;
// If error, print the actual incorrect values
MatrixBlock expectedMB = expected.toMatrixObject().acquireRead();
MatrixBlock actualMB = actual.toMatrixObject().acquireRead();
long rows = expectedMB.getNumRows();
long cols = expectedMB.getNumColumns();
Assert.assertEquals(rows, actualMB.getNumRows());
Assert.assertEquals(cols, actualMB.getNumColumns());
if (PRINT_MAT_ERROR)
printMatrixIfNotEqual(expectedMB, actualMB);
for (int i = 0; i < rows; i++) {
for (int j = 0; j < cols; j++) {
double expectedDouble = expectedMB.quickGetValue(i, j);
double actualDouble = actualMB.quickGetValue(i, j);
if (expectedDouble != 0.0 && !Double.isNaN(expectedDouble) && Double.isFinite(expectedDouble)) {
double relativeError = Math.abs((expectedDouble - actualDouble) / expectedDouble);
double absoluteError = Math.abs(expectedDouble - actualDouble);
Formatter format = new Formatter();
format.format("Relative error(%f) is more than threshold (%f). Expected = %f, Actual = %f, differed at [%d, %d]", relativeError, getTHRESHOLD(), expectedDouble, actualDouble, i, j);
if (FLOATING_POINT_PRECISION.equals("double"))
Assert.assertTrue(format.toString(), relativeError < getTHRESHOLD());
else
Assert.assertTrue(format.toString(), relativeError < getTHRESHOLD() || absoluteError < getTHRESHOLD());
format.close();
} else {
Assert.assertEquals(expectedDouble, actualDouble, getTHRESHOLD());
}
}
}
expected.toMatrixObject().release();
actual.toMatrixObject().release();
} catch (DMLRuntimeException e) {
throw new RuntimeException(e);
}
}
use of org.apache.sysml.api.mlcontext.MLContext in project systemml by apache.
the class GPUTests method generateIntegerSequenceMatrix.
/**
* Generates an input matrix which is a sequence of integers
* @param spark valid instance of {@link SparkSession}
* @param m number of rows
* @param n number of columns
* @return a matrix with a sequence of integers
*/
protected Matrix generateIntegerSequenceMatrix(SparkSession spark, int m, int n) {
MLContext genMLC = new MLContext(spark);
String scriptStr;
scriptStr = "temp = seq(1, " + (m * n) + ")" + "in1 = matrix(temp, rows=" + m + ", cols=" + n + ")";
Script generateScript = ScriptFactory.dmlFromString(scriptStr).out("in1");
Matrix in1 = genMLC.execute(generateScript).getMatrix("in1");
genMLC.close();
return in1;
}
use of org.apache.sysml.api.mlcontext.MLContext in project systemml by apache.
the class MLContextMultipleScriptsTest method runMLContextTestMultipleScript.
private static void runMLContextTestMultipleScript(RUNTIME_PLATFORM platform, boolean wRead) {
RUNTIME_PLATFORM oldplatform = DMLScript.rtplatform;
DMLScript.rtplatform = platform;
// create mlcontext
SparkSession spark = createSystemMLSparkSession("MLContextMultipleScriptsTest", "local");
MLContext ml = new MLContext(spark);
ml.setExplain(true);
String dml1 = baseDirectory + File.separator + "MultiScript1.dml";
String dml2 = baseDirectory + File.separator + (wRead ? "MultiScript2b.dml" : "MultiScript2.dml");
String dml3 = baseDirectory + File.separator + (wRead ? "MultiScript3b.dml" : "MultiScript3.dml");
try {
// run script 1
Script script1 = dmlFromFile(dml1).in("$rows", rows).in("$cols", cols).out("X");
Matrix X = ml.execute(script1).getMatrix("X");
Script script2 = dmlFromFile(dml2).in("X", X).out("Y");
Matrix Y = ml.execute(script2).getMatrix("Y");
Script script3 = dmlFromFile(dml3).in("X", X).in("Y", Y).out("z");
String z = ml.execute(script3).getString("z");
System.out.println(z);
} finally {
DMLScript.rtplatform = oldplatform;
// stop underlying spark context to allow single jvm tests (otherwise the
// next test that tries to create a SparkContext would fail)
spark.stop();
// clear status mlcontext and spark exec context
ml.close();
}
}
use of org.apache.sysml.api.mlcontext.MLContext in project systemml by apache.
the class SparkExecutionContext method initSparkContext.
private static synchronized void initSparkContext() {
// check for redundant spark context init
if (_spctx != null)
return;
long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
// create a default spark context (master, appname, etc refer to system properties
// as given in the spark configuration or during spark-submit)
MLContext mlCtxObj = MLContextProxy.getActiveMLContext();
if (mlCtxObj != null) {
// This is when DML is called through spark shell
// Will clean the passing of static variables later as this involves minimal change to DMLScript
_spctx = MLContextUtil.getJavaSparkContext(mlCtxObj);
} else {
if (DMLScript.USE_LOCAL_SPARK_CONFIG) {
// For now set 4 cores for integration testing :)
SparkConf conf = createSystemMLSparkConf().setMaster("local[*]").setAppName("My local integration test app");
// This is discouraged in spark but have added only for those testcase that cannot stop the context properly
// conf.set("spark.driver.allowMultipleContexts", "true");
conf.set("spark.ui.enabled", "false");
_spctx = new JavaSparkContext(conf);
} else // default cluster setup
{
// setup systemml-preferred spark configuration (w/o user choice)
SparkConf conf = createSystemMLSparkConf();
_spctx = new JavaSparkContext(conf);
}
_parRDDs.clear();
}
// Set warning if spark.driver.maxResultSize is not set. It needs to be set before starting Spark Context for CP collect
String strDriverMaxResSize = _spctx.getConf().get("spark.driver.maxResultSize", "1g");
long driverMaxResSize = UtilFunctions.parseMemorySize(strDriverMaxResSize);
if (driverMaxResSize != 0 && driverMaxResSize < OptimizerUtils.getLocalMemBudget() && !DMLScript.USE_LOCAL_SPARK_CONFIG)
LOG.warn("Configuration parameter spark.driver.maxResultSize set to " + UtilFunctions.formatMemorySize(driverMaxResSize) + "." + " You can set it through Spark default configuration setting either to 0 (unlimited) or to available memory budget of size " + UtilFunctions.formatMemorySize((long) OptimizerUtils.getLocalMemBudget()) + ".");
// TODO if spark context passed in from outside (mlcontext), we need to clean this up at the end
if (MRJobConfiguration.USE_BINARYBLOCK_SERIALIZATION)
MRJobConfiguration.addBinaryBlockSerializationFramework(_spctx.hadoopConfiguration());
// statistics maintenance
if (DMLScript.STATISTICS) {
Statistics.setSparkCtxCreateTime(System.nanoTime() - t0);
}
}
use of org.apache.sysml.api.mlcontext.MLContext in project systemml by apache.
the class APICodegenTest method runMLContextParforDatasetTest.
private void runMLContextParforDatasetTest(boolean jmlc) {
try {
double[][] X = getRandomMatrix(rows, cols, -10, 10, sparsity, 76543);
MatrixBlock mX = DataConverter.convertToMatrixBlock(X);
String s = "X = read(\"/tmp\");" + "R = colSums(X/rowSums(X));" + "write(R, \"tmp2\")";
// execute scripts
if (jmlc) {
DMLScript.STATISTICS = true;
Connection conn = new Connection(ConfigType.CODEGEN_ENABLED, ConfigType.ALLOW_DYN_RECOMPILATION);
PreparedScript pscript = conn.prepareScript(s, new String[] { "X" }, new String[] { "R" }, false);
pscript.setMatrix("X", mX, false);
pscript.executeScript();
conn.close();
System.out.println(Statistics.display());
} else {
SparkConf conf = SparkExecutionContext.createSystemMLSparkConf().setAppName("MLContextTest").setMaster("local");
JavaSparkContext sc = new JavaSparkContext(conf);
MLContext ml = new MLContext(sc);
ml.setConfigProperty(DMLConfig.CODEGEN, "true");
ml.setStatistics(true);
Script script = dml(s).in("X", mX).out("R");
ml.execute(script);
ml.resetConfig();
sc.stop();
ml.close();
}
// check for generated operator
Assert.assertTrue(heavyHittersContainsSubString("spoofRA"));
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
Aggregations