use of org.apache.sysml.runtime.matrix.data.MatrixValue.CellIndex in project incubator-systemml by apache.
the class FullSignTest method runSignTest.
/**
* @param sparseM1
* @param sparseM2
* @param instType
*/
private void runSignTest(String testname, boolean sparse, ExecType instType) {
RUNTIME_PLATFORM platformOld = rtplatform;
switch(instType) {
case MR:
rtplatform = RUNTIME_PLATFORM.HADOOP;
break;
case SPARK:
rtplatform = RUNTIME_PLATFORM.SPARK;
break;
default:
rtplatform = RUNTIME_PLATFORM.HYBRID;
break;
}
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if (rtplatform == RUNTIME_PLATFORM.SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
try {
String TEST_NAME = testname;
double sparsity = (sparse) ? spSparse : spDense;
String TEST_CACHE_DIR = "";
if (TEST_CACHE_ENABLED) {
TEST_CACHE_DIR = sparsity + "/";
}
TestConfiguration config = getTestConfiguration(TEST_NAME);
loadTestConfiguration(config, TEST_CACHE_DIR);
// This is for running the junit test the new way, i.e., construct the arguments directly
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + TEST_NAME + ".dml";
// stats parameter required for opcode check
programArgs = new String[] { "-stats", "-args", input("A"), output("B") };
fullRScriptName = HOME + TEST_NAME + ".R";
rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " " + expectedDir();
// generate actual dataset
double[][] A = getRandomMatrix(rows, cols, -1, 1, sparsity, 7);
writeInputMatrixWithMTD("A", A, true);
runTest(true, false, null, -1);
runRScript(true);
// compare matrices
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("B");
HashMap<CellIndex, Double> rfile = readRMatrixFromFS("B");
TestUtils.compareMatrices(dmlfile, rfile, 0, "Stat-DML", "Stat-R");
// check generated opcode
if (instType == ExecType.CP)
Assert.assertTrue("Missing opcode: sign", Statistics.getCPHeavyHitterOpCodes().contains("sign"));
else if (instType == ExecType.SPARK)
Assert.assertTrue("Missing opcode: " + Instruction.SP_INST_PREFIX + "sign", Statistics.getCPHeavyHitterOpCodes().contains(Instruction.SP_INST_PREFIX + "sign"));
} finally {
rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
}
}
use of org.apache.sysml.runtime.matrix.data.MatrixValue.CellIndex in project incubator-systemml by apache.
the class MLUnaryBuiltinTest method runMLUnaryBuiltinTest.
/**
* @param sparseM1
* @param sparseM2
* @param instType
*/
private void runMLUnaryBuiltinTest(String testname, InputType type, boolean sparse, ExecType instType) {
// rtplatform for MR
RUNTIME_PLATFORM platformOld = rtplatform;
switch(instType) {
case MR:
rtplatform = RUNTIME_PLATFORM.HADOOP;
break;
case SPARK:
rtplatform = RUNTIME_PLATFORM.SPARK;
break;
default:
rtplatform = RUNTIME_PLATFORM.HYBRID;
break;
}
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if (rtplatform == RUNTIME_PLATFORM.SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
try {
int rows = rowsMatrix;
int cols = (type == InputType.COL_VECTOR) ? 1 : colsMatrix;
double sparsity = (sparse) ? spSparse : spDense;
String TEST_NAME = testname;
String TEST_CACHE_DIR = "";
if (TEST_CACHE_ENABLED) {
TEST_CACHE_DIR = testname + type.ordinal() + "_" + sparsity + "/";
}
TestConfiguration config = getTestConfiguration(TEST_NAME);
loadTestConfiguration(config, TEST_CACHE_DIR);
// This is for running the junit test the new way, i.e., construct the arguments directly
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + TEST_NAME + ".dml";
programArgs = new String[] { "-stats", "-args", input("A"), output("B") };
fullRScriptName = HOME + TEST_NAME + ".R";
rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " " + expectedDir();
// generate actual dataset
double[][] A = getRandomMatrix(rows, cols, -0.05, 1, sparsity, 7);
writeInputMatrixWithMTD("A", A, true);
runTest(true, false, null, -1);
if (// in CP no MR jobs should be executed
instType == ExecType.CP)
Assert.assertEquals("Unexpected number of executed MR jobs.", 0, Statistics.getNoOfExecutedMRJobs());
runRScript(true);
// compare matrices
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("B");
HashMap<CellIndex, Double> rfile = readRMatrixFromFS("B");
TestUtils.compareMatrices(dmlfile, rfile, eps, "Stat-DML", "Stat-R");
} finally {
rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
}
}
use of org.apache.sysml.runtime.matrix.data.MatrixValue.CellIndex in project incubator-systemml by apache.
the class MinusTest method runTestMinus.
private void runTestMinus(boolean sparse, ExecType et) {
// handle rows and cols
RUNTIME_PLATFORM platformOld = rtplatform;
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if (et == ExecType.SPARK) {
rtplatform = RUNTIME_PLATFORM.SPARK;
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
} else {
rtplatform = (et == ExecType.MR) ? RUNTIME_PLATFORM.HADOOP : RUNTIME_PLATFORM.SINGLE_NODE;
}
try {
// register test configuration
TestConfiguration config = getTestConfiguration(TEST_NAME1);
config.addVariable("rows", rows);
config.addVariable("cols", cols);
String TEST_CACHE_DIR = "";
if (TEST_CACHE_ENABLED) {
TEST_CACHE_DIR = sparse + "/";
}
loadTestConfiguration(config, TEST_CACHE_DIR);
/* This is for running the junit test the new way, i.e., construct the arguments directly */
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + TEST_NAME1 + ".dml";
programArgs = new String[] { "-args", input("X"), String.valueOf(rows), String.valueOf(cols), output("Y") };
fullRScriptName = HOME + TEST_NAME1 + ".R";
rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " " + expectedDir();
double sparsity = sparse ? sparsitySparse : sparsityDense;
double[][] X = getRandomMatrix(rows, cols, 0, 1, sparsity, 7);
writeInputMatrix("X", X, true);
runTest(true, false, null, -1);
runRScript(true);
// compare matrices
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("Y");
HashMap<CellIndex, Double> rfile = readRMatrixFromFS("Y");
TestUtils.compareMatrices(dmlfile, rfile, 1e-12, "Stat-DML", "Stat-R");
} finally {
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
// reset platform for additional tests
rtplatform = platformOld;
}
}
use of org.apache.sysml.runtime.matrix.data.MatrixValue.CellIndex in project incubator-systemml by apache.
the class ReplaceTest method runTestReplace.
/**
* @param test
* @param pattern
* @param sparse
* @param etype
*/
private void runTestReplace(String test, double pattern, boolean sparse, ExecType etype) {
RUNTIME_PLATFORM platformOld = rtplatform;
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
try {
if (etype == ExecType.SPARK) {
rtplatform = RUNTIME_PLATFORM.SPARK;
} else {
rtplatform = (etype == ExecType.MR) ? RUNTIME_PLATFORM.HADOOP : RUNTIME_PLATFORM.HYBRID;
}
if (rtplatform == RUNTIME_PLATFORM.SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
double sparsity = (sparse) ? sparsity2 : sparsity1;
// register test configuration
TestConfiguration config = getTestConfiguration(test);
config.addVariable("rows", rows);
config.addVariable("cols", cols);
loadTestConfiguration(config);
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + test + ".dml";
programArgs = new String[] { "-args", input("A"), String.valueOf(rows), String.valueOf(cols), output("C"), // only respected for TEST_NAME1
String.valueOf(pattern) };
fullRScriptName = HOME + test + ".R";
rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " " + pattern + " " + expectedDir();
double[][] A = getRandomMatrix(rows, cols, 0, 1, sparsity, 7);
replaceRandom(A, rows, cols, pattern, 10);
writeInputMatrix("A", A, true);
writeExpectedMatrix("A", A);
runTest(true, false, null, -1);
runRScript(true);
int numMRExpect = (etype == ExecType.MR) ? (test.equals(TEST_NAME1) ? 1 : test.equals(TEST_NAME5) ? 3 : 2) : 0;
Assert.assertEquals("Unexpected number of executed MR jobs.", numMRExpect, // reblock in test1, reblock+GMR in test2-4
Statistics.getNoOfExecutedMRJobs());
// compare matrices
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("C");
HashMap<CellIndex, Double> rfile = readRMatrixFromFS("C");
TestUtils.compareMatrices(dmlfile, rfile, 1e-14, "Stat-DML", "Stat-R");
} finally {
// reset platform for additional tests
rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
}
}
use of org.apache.sysml.runtime.matrix.data.MatrixValue.CellIndex in project incubator-systemml by apache.
the class FullDistributionTest method runDFTest.
/**
* Internal test method - all these tests are expected to run in CP independent of the passed
* instType. However, we test all backends to ensure correct compilation in the presence of
* forced execution types.
*
* @param type
* @param inverse
* @param param1
* @param param2
* @param instType
*/
private void runDFTest(TEST_TYPE type, boolean inverse, Double param1, Double param2, ExecType instType) {
// setup multi backend configuration
RUNTIME_PLATFORM platformOld = rtplatform;
switch(instType) {
case MR:
rtplatform = RUNTIME_PLATFORM.HADOOP;
break;
case SPARK:
rtplatform = RUNTIME_PLATFORM.SPARK;
break;
default:
rtplatform = RUNTIME_PLATFORM.HYBRID;
break;
}
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if (rtplatform == RUNTIME_PLATFORM.SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
try {
// set test and parameters
getAndLoadTestConfiguration(TEST_NAME);
double in = (new Random(System.nanoTime())).nextDouble();
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + TEST_NAME + "_" + type.toString() + ".dml";
fullRScriptName = HOME + TEST_NAME + "_" + type.toString() + ".R";
switch(type) {
case NORMAL_NOPARAMS:
programArgs = new String[] { "-args", Double.toString(in), output("dfout") };
rCmd = "Rscript" + " " + fullRScriptName + " " + Double.toString(in) + " " + expected("dfout");
break;
case NORMAL_MEAN:
case NORMAL_SD:
case T:
case CHISQ:
case EXP:
programArgs = new String[] { "-args", Double.toString(in), Double.toString(param1), output("dfout") };
rCmd = "Rscript" + " " + fullRScriptName + " " + Double.toString(in) + " " + Double.toString(param1) + " " + expected("dfout");
break;
case NORMAL:
case F:
programArgs = new String[] { "-args", Double.toString(in), Double.toString(param1), Double.toString(param2), output("dfout") };
rCmd = "Rscript" + " " + fullRScriptName + " " + Double.toString(in) + " " + Double.toString(param1) + " " + Double.toString(param2) + " " + expected("dfout");
break;
default:
throw new RuntimeException("Invalid distribution function: " + type);
}
// run test
runTest(true, false, null, -1);
runRScript(true);
// compare results
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("dfout");
HashMap<CellIndex, Double> rfile = readRMatrixFromFS("dfout");
TestUtils.compareMatrices(dmlfile, rfile, 1e-8, "DMLout", "Rout");
} finally {
rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
}
}
Aggregations