use of org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM in project incubator-systemml by apache.
the class RoundTest method runTest.
// -----------------------------------------------------------------------------
private void runTest(RUNTIME_PLATFORM rt, TEST_TYPE test, int rows, int cols, double sparsity) {
RUNTIME_PLATFORM rtOld = rtplatform;
rtplatform = rt;
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if (rtplatform == RUNTIME_PLATFORM.SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
try {
TestConfiguration config = getTestConfiguration(test.scriptName);
config.addVariable("rows", rows);
config.addVariable("cols", cols);
loadTestConfiguration(config);
/* This is for running the junit test the new way, i.e., construct the arguments directly */
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + test.scriptName + ".dml";
programArgs = new String[] { "-args", input("math"), output("R") };
fullRScriptName = HOME + test.scriptName + ".R";
rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " " + expectedDir();
long seed = System.nanoTime();
double[][] matrix = getRandomMatrix(rows, cols, 10, 20, sparsity, seed);
writeInputMatrixWithMTD("math", matrix, true);
runTest(true, false, null, -1);
runRScript(true);
TestUtils.compareDMLHDFSFileWithRFile(expected("R"), output("R"), 1e-9);
} finally {
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
// reset runtime platform
rtplatform = rtOld;
}
}
use of org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM in project incubator-systemml by apache.
the class SVDFactorizeTest method runTestSVDFactorize.
private void runTestSVDFactorize(int rows, int cols, RUNTIME_PLATFORM rt) {
RUNTIME_PLATFORM rtold = rtplatform;
rtplatform = rt;
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if (rtplatform == RUNTIME_PLATFORM.SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
try {
getAndLoadTestConfiguration(TEST_NAME1);
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + TEST_NAME1 + ".dml";
programArgs = new String[] { "-args", input("A"), output("D") };
double[][] A = getRandomMatrix(rows, cols, 0, 1, sparsity, 10);
MatrixCharacteristics mc = new MatrixCharacteristics(rows, cols, -1, -1, -1);
writeInputMatrixWithMTD("A", A, false, mc);
// Expected matrix = 1x1 zero matrix
double[][] D = new double[1][1];
D[0][0] = 0.0;
writeExpectedMatrix("D", D);
runTest(true, false, null, -1);
compareResults(1e-8);
} finally {
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
rtplatform = rtold;
}
}
use of org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM in project incubator-systemml by apache.
the class FullDistributionTest method runDFTest.
/**
* Internal test method - all these tests are expected to run in CP independent of the passed
* instType. However, we test all backends to ensure correct compilation in the presence of
* forced execution types.
*
* @param type
* @param inverse
* @param param1
* @param param2
* @param instType
*/
private void runDFTest(TEST_TYPE type, boolean inverse, Double param1, Double param2, ExecType instType) {
// setup multi backend configuration
RUNTIME_PLATFORM platformOld = rtplatform;
switch(instType) {
case MR:
rtplatform = RUNTIME_PLATFORM.HADOOP;
break;
case SPARK:
rtplatform = RUNTIME_PLATFORM.SPARK;
break;
default:
rtplatform = RUNTIME_PLATFORM.HYBRID;
break;
}
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if (rtplatform == RUNTIME_PLATFORM.SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
try {
// set test and parameters
getAndLoadTestConfiguration(TEST_NAME);
double in = (new Random(System.nanoTime())).nextDouble();
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + TEST_NAME + "_" + type.toString() + ".dml";
fullRScriptName = HOME + TEST_NAME + "_" + type.toString() + ".R";
switch(type) {
case NORMAL_NOPARAMS:
programArgs = new String[] { "-args", Double.toString(in), output("dfout") };
rCmd = "Rscript" + " " + fullRScriptName + " " + Double.toString(in) + " " + expected("dfout");
break;
case NORMAL_MEAN:
case NORMAL_SD:
case T:
case CHISQ:
case EXP:
programArgs = new String[] { "-args", Double.toString(in), Double.toString(param1), output("dfout") };
rCmd = "Rscript" + " " + fullRScriptName + " " + Double.toString(in) + " " + Double.toString(param1) + " " + expected("dfout");
break;
case NORMAL:
case F:
programArgs = new String[] { "-args", Double.toString(in), Double.toString(param1), Double.toString(param2), output("dfout") };
rCmd = "Rscript" + " " + fullRScriptName + " " + Double.toString(in) + " " + Double.toString(param1) + " " + Double.toString(param2) + " " + expected("dfout");
break;
default:
throw new RuntimeException("Invalid distribution function: " + type);
}
// run test
runTest(true, false, null, -1);
runRScript(true);
// compare results
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("dfout");
HashMap<CellIndex, Double> rfile = readRMatrixFromFS("dfout");
TestUtils.compareMatrices(dmlfile, rfile, 1e-8, "DMLout", "Rout");
} finally {
rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
}
}
use of org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM in project incubator-systemml by apache.
the class MLContextMultipleScriptsTest method runMLContextTestMultipleScript.
private static void runMLContextTestMultipleScript(RUNTIME_PLATFORM platform, boolean wRead) {
RUNTIME_PLATFORM oldplatform = DMLScript.rtplatform;
DMLScript.rtplatform = platform;
// create mlcontext
SparkSession spark = createSystemMLSparkSession("MLContextMultipleScriptsTest", "local");
MLContext ml = new MLContext(spark);
ml.setExplain(true);
String dml1 = baseDirectory + File.separator + "MultiScript1.dml";
String dml2 = baseDirectory + File.separator + (wRead ? "MultiScript2b.dml" : "MultiScript2.dml");
String dml3 = baseDirectory + File.separator + (wRead ? "MultiScript3b.dml" : "MultiScript3.dml");
try {
// run script 1
Script script1 = dmlFromFile(dml1).in("$rows", rows).in("$cols", cols).out("X");
Matrix X = ml.execute(script1).getMatrix("X");
Script script2 = dmlFromFile(dml2).in("X", X).out("Y");
Matrix Y = ml.execute(script2).getMatrix("Y");
Script script3 = dmlFromFile(dml3).in("X", X).in("Y", Y).out("z");
String z = ml.execute(script3).getString("z");
System.out.println(z);
} finally {
DMLScript.rtplatform = oldplatform;
// stop underlying spark context to allow single jvm tests (otherwise the
// next test that tries to create a SparkContext would fail)
spark.stop();
// clear status mlcontext and spark exec context
ml.close();
}
}
use of org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM in project incubator-systemml by apache.
the class FullMatrixVectorRowCellwiseOperationTest method runMatrixVectorCellwiseOperationTest.
/**
* @param sparseM1
* @param sparseM2
* @param instType
*/
private void runMatrixVectorCellwiseOperationTest(OpType type, SparsityType sparseM1, SparsityType sparseM2, ExecType instType) {
// rtplatform for MR
RUNTIME_PLATFORM platformOld = rtplatform;
switch(instType) {
case MR:
rtplatform = RUNTIME_PLATFORM.HADOOP;
break;
case SPARK:
rtplatform = RUNTIME_PLATFORM.SPARK;
break;
default:
rtplatform = RUNTIME_PLATFORM.HYBRID;
break;
}
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if (rtplatform == RUNTIME_PLATFORM.SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
try {
String TEST_NAME = null;
switch(type) {
case ADDITION:
TEST_NAME = TEST_NAME1;
break;
case SUBTRACTION:
TEST_NAME = TEST_NAME2;
break;
case MULTIPLICATION:
TEST_NAME = TEST_NAME3;
break;
case DIVISION:
TEST_NAME = TEST_NAME4;
break;
}
TestConfiguration config = getTestConfiguration(TEST_NAME);
// get sparsity
double lsparsity1 = 1.0, lsparsity2 = 1.0;
switch(sparseM1) {
case DENSE:
lsparsity1 = sparsity1;
break;
case SPARSE:
lsparsity1 = sparsity2;
break;
case EMPTY:
lsparsity1 = 0.0;
break;
}
switch(sparseM2) {
case DENSE:
lsparsity2 = sparsity1;
break;
case SPARSE:
lsparsity2 = sparsity2;
break;
case EMPTY:
lsparsity2 = 0.0;
break;
}
String TEST_CACHE_DIR = "";
if (TEST_CACHE_ENABLED && (type != OpType.DIVISION)) {
TEST_CACHE_DIR = type.ordinal() + "_" + lsparsity1 + "_" + lsparsity2 + "/";
}
loadTestConfiguration(config, TEST_CACHE_DIR);
/* This is for running the junit test the new way, i.e., construct the arguments directly */
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + TEST_NAME + ".dml";
programArgs = new String[] { "-explain", "recompile_runtime", "-args", input("A"), input("B"), output("C") };
fullRScriptName = HOME + TEST_NAME + ".R";
rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " " + expectedDir();
// generate actual dataset
double[][] A = getRandomMatrix(rows, cols, 0, (lsparsity1 == 0) ? 0 : 1, lsparsity1, 7);
writeInputMatrixWithMTD("A", A, true);
double[][] B = getRandomMatrix(1, cols, 0, (lsparsity2 == 0) ? 0 : 1, lsparsity2, 3);
writeInputMatrixWithMTD("B", B, true);
boolean exceptionExpected = false;
runTest(true, exceptionExpected, null, -1);
if (!(type == OpType.DIVISION)) {
runRScript(true);
// compare matrices
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("C");
HashMap<CellIndex, Double> rfile = readRMatrixFromFS("C");
TestUtils.compareMatrices(dmlfile, rfile, eps, "Stat-DML", "Stat-R");
} else {
// For division, IEEE 754 defines x/0.0 as INFINITY and 0.0/0.0 as NaN.
// Java handles this correctly while R always returns 1.0E308 in those cases.
// Hence, we directly write the expected results.
double[][] C = new double[rows][cols];
for (int i = 0; i < rows; i++) for (int j = 0; j < cols; j++) C[i][j] = A[i][j] / B[0][j];
writeExpectedMatrix("C", C);
compareResults();
}
} finally {
rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
}
}
Aggregations