use of org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM in project incubator-systemml by apache.
the class CovarianceWeightsTest method runCovarianceTest.
/**
* @param sparseM1
* @param sparseM2
* @param instType
*/
private void runCovarianceTest(boolean sparse, ExecType et) {
// rtplatform for MR
RUNTIME_PLATFORM platformOld = rtplatform;
switch(et) {
case MR:
rtplatform = RUNTIME_PLATFORM.HADOOP;
break;
case SPARK:
rtplatform = RUNTIME_PLATFORM.SPARK;
break;
default:
rtplatform = RUNTIME_PLATFORM.HYBRID;
break;
}
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if (rtplatform == RUNTIME_PLATFORM.SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
try {
TestConfiguration config = getTestConfiguration(TEST_NAME);
loadTestConfiguration(config);
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + TEST_NAME + ".dml";
programArgs = new String[] { "-args", input("A"), input("B"), input("C"), output("R") };
fullRScriptName = HOME + TEST_NAME + ".R";
rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " " + expectedDir();
// generate actual dataset (always dense because values <=0 invalid)
double sparsitya = sparse ? sparsity2 : sparsity1;
double[][] A = getRandomMatrix(rows, 1, 1, maxVal, sparsitya, 7);
writeInputMatrixWithMTD("A", A, true);
double[][] B = getRandomMatrix(rows, 1, 1, 1, 1.0, 34);
writeInputMatrixWithMTD("B", B, true);
double[][] C = getRandomMatrix(rows, 1, 1, 1, 1.0, 8623);
writeInputMatrixWithMTD("C", C, true);
runTest(true, false, null, -1);
runRScript(true);
// compare matrices
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("R");
HashMap<CellIndex, Double> rfile = readRMatrixFromFS("R");
TestUtils.compareMatrices(dmlfile, rfile, eps, "Stat-DML", "Stat-R");
} finally {
rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
}
}
use of org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM in project incubator-systemml by apache.
the class FullIfElseTest method runIfElseTest.
private void runIfElseTest(boolean matrix1, boolean matrix2, boolean matrix3, boolean sparse, ExecType et) {
// rtplatform for MR
RUNTIME_PLATFORM platformOld = rtplatform;
switch(et) {
case MR:
rtplatform = RUNTIME_PLATFORM.HADOOP;
break;
case SPARK:
rtplatform = RUNTIME_PLATFORM.SPARK;
break;
default:
rtplatform = RUNTIME_PLATFORM.HYBRID_SPARK;
break;
}
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
boolean rewritesOld = OptimizerUtils.ALLOW_ALGEBRAIC_SIMPLIFICATION;
if (rtplatform == RUNTIME_PLATFORM.SPARK || rtplatform == RUNTIME_PLATFORM.HYBRID_SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
// test runtime ops
OptimizerUtils.ALLOW_ALGEBRAIC_SIMPLIFICATION = false;
try {
TestConfiguration config = getTestConfiguration(TEST_NAME1);
loadTestConfiguration(config);
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + TEST_NAME1 + ".dml";
programArgs = new String[] { "-explain", "-args", input("A"), input("B"), input("C"), output("R") };
fullRScriptName = HOME + TEST_NAME1 + ".R";
rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " " + expectedDir();
// generate actual datasets (matrices and scalars)
double sparsity = sparse ? sparsity2 : sparsity1;
double[][] A = matrix1 ? getRandomMatrix(rows, cols, 0, 1, sparsity, 1) : getScalar(1);
writeInputMatrixWithMTD("A", A, true);
double[][] B = matrix2 ? getRandomMatrix(rows, cols, 0, 1, sparsity, 2) : getScalar(2);
writeInputMatrixWithMTD("B", B, true);
double[][] C = matrix3 ? getRandomMatrix(rows, cols, 0, 1, sparsity, 3) : getScalar(3);
writeInputMatrixWithMTD("C", C, true);
// run test cases
runTest(true, false, null, -1);
runRScript(true);
// compare output matrices
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("R");
HashMap<CellIndex, Double> rfile = readRMatrixFromFS("R");
TestUtils.compareMatrices(dmlfile, rfile, 0, "Stat-DML", "Stat-R");
} finally {
rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
OptimizerUtils.ALLOW_ALGEBRAIC_SIMPLIFICATION = rewritesOld;
}
}
use of org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM in project incubator-systemml by apache.
the class QuantileWeightsTest method runQuantileTest.
private void runQuantileTest(String TEST_NAME, double p, boolean sparse, ExecType et) {
// rtplatform for MR
RUNTIME_PLATFORM platformOld = rtplatform;
switch(et) {
case MR:
rtplatform = RUNTIME_PLATFORM.HADOOP;
break;
case SPARK:
rtplatform = RUNTIME_PLATFORM.SPARK;
break;
default:
rtplatform = RUNTIME_PLATFORM.HYBRID;
break;
}
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if (rtplatform == RUNTIME_PLATFORM.SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
try {
TestConfiguration config = getTestConfiguration(TEST_NAME);
loadTestConfiguration(config);
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + TEST_NAME + ".dml";
programArgs = new String[] { "-args", input("A"), input("W"), Double.toString(p), output("R") };
fullRScriptName = HOME + TEST_NAME + ".R";
rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " " + p + " " + expectedDir();
// generate actual dataset (always dense because values <=0 invalid)
double sparsitya = sparse ? sparsity2 : sparsity1;
double[][] A = getRandomMatrix(rows, 1, 1, maxVal, sparsitya, 1236);
writeInputMatrixWithMTD("A", A, true);
double[][] W = getRandomMatrix(rows, 1, 1, 1, 1.0, 1);
writeInputMatrixWithMTD("W", W, true);
runTest(true, false, null, -1);
runRScript(true);
// compare matrices
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("R");
HashMap<CellIndex, Double> rfile = readRMatrixFromFS("R");
TestUtils.compareMatrices(dmlfile, rfile, eps, "Stat-DML", "Stat-R");
} finally {
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
rtplatform = platformOld;
}
}
use of org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM in project incubator-systemml by apache.
the class TernaryAggregateTest method runTernaryAggregateTest.
private void runTernaryAggregateTest(String testname, boolean sparse, boolean vectors, boolean rewrites, ExecType et) {
// rtplatform for MR
RUNTIME_PLATFORM platformOld = rtplatform;
switch(et) {
case MR:
rtplatform = RUNTIME_PLATFORM.HADOOP;
break;
case SPARK:
rtplatform = RUNTIME_PLATFORM.SPARK;
break;
default:
rtplatform = RUNTIME_PLATFORM.HYBRID;
break;
}
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if (rtplatform == RUNTIME_PLATFORM.SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
boolean rewritesOld = OptimizerUtils.ALLOW_SUM_PRODUCT_REWRITES;
try {
TestConfiguration config = getTestConfiguration(testname);
loadTestConfiguration(config);
OptimizerUtils.ALLOW_SUM_PRODUCT_REWRITES = rewrites;
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + testname + ".dml";
programArgs = new String[] { "-stats", "-args", input("A"), output("R") };
fullRScriptName = HOME + testname + ".R";
rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " " + expectedDir();
// generate actual dataset
double sparsity = sparse ? sparsity2 : sparsity1;
double[][] A = getRandomMatrix(vectors ? rows * cols : rows, vectors ? 1 : cols, 0, 1, sparsity, 17);
writeInputMatrixWithMTD("A", A, true);
// run test cases
runTest(true, false, null, -1);
runRScript(true);
// compare output matrices
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("R");
HashMap<CellIndex, Double> rfile = readRMatrixFromFS("R");
TestUtils.compareMatrices(dmlfile, rfile, eps, "Stat-DML", "Stat-R");
// check for rewritten patterns in statistics output
if (rewrites && et != ExecType.MR) {
String opcode = ((et == ExecType.SPARK) ? Instruction.SP_INST_PREFIX : "") + (((testname.equals(TEST_NAME1) || vectors) ? "tak+*" : "tack+*"));
Assert.assertEquals(Boolean.TRUE, Boolean.valueOf(Statistics.getCPHeavyHitterOpCodes().contains(opcode)));
}
} finally {
rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
OptimizerUtils.ALLOW_SUM_PRODUCT_REWRITES = rewritesOld;
}
}
use of org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM in project incubator-systemml by apache.
the class FrameCSVReadWriteTest method runCSVQuotesReadWriteTest.
/**
* @param rt
* @param ofmt
* @param dataset
*/
private void runCSVQuotesReadWriteTest(RUNTIME_PLATFORM rt, String ofmt) {
// set runtime platform
RUNTIME_PLATFORM rtold = rtplatform;
rtplatform = rt;
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if (rtplatform == RUNTIME_PLATFORM.SPARK || rtplatform == RUNTIME_PLATFORM.HYBRID_SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
if (!ofmt.equals("csv"))
throw new RuntimeException("Unsupported test output format");
try {
getAndLoadTestConfiguration(TEST_NAME1);
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + TEST_NAME1 + ".dml";
programArgs = new String[] { "-explain", "-args", HOME + "input/" + DATASET, output("R") };
runTest(true, false, null, -1);
// read input/output and compare
FrameReader reader1 = FrameReaderFactory.createFrameReader(InputInfo.CSVInputInfo, new CSVFileFormatProperties(false, ",", false));
FrameBlock fb1 = reader1.readFrameFromHDFS(HOME + "input/" + DATASET, -1L, -1L);
FrameReader reader2 = FrameReaderFactory.createFrameReader(InputInfo.CSVInputInfo);
FrameBlock fb2 = reader2.readFrameFromHDFS(output("R"), -1L, -1L);
String[][] R1 = DataConverter.convertToStringFrame(fb1);
String[][] R2 = DataConverter.convertToStringFrame(fb2);
TestUtils.compareFrames(R1, R2, R1.length, R1[0].length);
} catch (Exception ex) {
throw new RuntimeException(ex);
} finally {
rtplatform = rtold;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
}
}
Aggregations