use of org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM in project incubator-systemml by apache.
the class OuterTableExpandTest method runOuterTableTest.
/**
* @param sparseM1
* @param sparseM2
* @param instType
*/
private void runOuterTableTest(String testname, boolean sparse, boolean left, ExecType instType) {
// rtplatform for MR
RUNTIME_PLATFORM platformOld = rtplatform;
switch(instType) {
case MR:
rtplatform = RUNTIME_PLATFORM.HADOOP;
break;
case SPARK:
rtplatform = RUNTIME_PLATFORM.SPARK;
break;
default:
rtplatform = RUNTIME_PLATFORM.HYBRID;
break;
}
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if (rtplatform == RUNTIME_PLATFORM.SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
try {
String TEST_NAME = testname;
TestConfiguration config = getTestConfiguration(TEST_NAME);
loadTestConfiguration(config);
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + TEST_NAME + ".dml";
programArgs = new String[] { "-explain", "-args", input("A"), String.valueOf(cols2), String.valueOf(left).toUpperCase(), output("C") };
fullRScriptName = HOME + TEST_NAME + ".R";
rCmd = getRCmd(inputDir(), Integer.toString(cols2), String.valueOf(left).toUpperCase(), expectedDir());
// generate actual datasets
double sparsity = sparse ? sparsity2 : sparsity1;
double[][] A = TestUtils.round(getRandomMatrix(rows, 1, 1, cols2, sparsity, 235));
writeInputMatrixWithMTD("A", A, true);
// run the testcase (expect exceptions for table w/ 0s)
boolean exceptionExpected = testname.equals(TEST_NAME2) && sparsity < 1.0;
runTest(true, exceptionExpected, DMLException.class, -1);
runRScript(true);
if (!exceptionExpected) {
// compare matrices
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("C");
HashMap<CellIndex, Double> rfile = readRMatrixFromFS("C");
TestUtils.compareMatrices(dmlfile, rfile, eps, "Stat-DML", "Stat-R");
// check meta data
checkDMLMetaDataFile("C", new MatrixCharacteristics(left ? rows : cols2, left ? cols2 : rows, 1, 1));
// check compiled/executed jobs
if (rtplatform == RUNTIME_PLATFORM.HADOOP) {
// reblock+gmr if rexpand; otherwise 3/5
int expectedNumCompiled = 2;
int expectedNumExecuted = expectedNumCompiled;
checkNumCompiledMRJobs(expectedNumCompiled);
checkNumExecutedMRJobs(expectedNumExecuted);
}
}
} finally {
rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
}
}
use of org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM in project incubator-systemml by apache.
the class RewriteCTableToRExpandTest method testRewriteCTableRExpand.
private void testRewriteCTableRExpand(String testname, CropType type) {
TestConfiguration config = getTestConfiguration(testname);
loadTestConfiguration(config);
int outDim = maxVal + ((type == CropType.CROP) ? -7 : 7);
boolean unknownTests = (testname.equals(TEST_NAME5) || testname.equals(TEST_NAME6));
RUNTIME_PLATFORM platformOld = rtplatform;
if (unknownTests)
rtplatform = RUNTIME_PLATFORM.SINGLE_NODE;
try {
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + testname + ".dml";
programArgs = new String[] { "-explain", "-stats", "-args", input("A"), String.valueOf(outDim), output("R") };
fullRScriptName = HOME + testname + ".R";
rCmd = getRCmd(inputDir(), String.valueOf(outDim), expectedDir());
double[][] A = getRandomMatrix(rows, 1, 1, 10, 1.0, 7);
writeInputMatrixWithMTD("A", A, false);
// run performance tests
runTest(true, false, null, -1);
// compare output meta data
boolean left = (testname.equals(TEST_NAME1) || testname.equals(TEST_NAME3) || testname.equals(TEST_NAME5) || testname.equals(TEST_NAME6));
boolean pos = (testname.equals(TEST_NAME1) || testname.equals(TEST_NAME2));
int rrows = (left && pos) ? rows : outDim;
int rcols = (!left && pos) ? rows : outDim;
if (!unknownTests)
checkDMLMetaDataFile("R", new MatrixCharacteristics(rrows, rcols, 1, 1));
// check for applied rewrite
Assert.assertEquals(Boolean.valueOf(testname.equals(TEST_NAME1) || testname.equals(TEST_NAME2) || unknownTests), Boolean.valueOf(heavyHittersContainsSubString("rexpand")));
} finally {
rtplatform = platformOld;
}
}
use of org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM in project incubator-systemml by apache.
the class FullReblockTest method runReblockTest.
/**
* @param oi
* @param sparse
* @param type
* @param et
*/
private void runReblockTest(OutputInfo oi, boolean sparse, Type type, ExecType et) {
String TEST_NAME = (type == Type.Multiple) ? TEST_NAME2 : TEST_NAME1;
double sparsity = (sparse) ? sparsity2 : sparsity1;
int rows = (type == Type.Vector) ? rowsV : rowsM;
int cols = (type == Type.Vector) ? colsV : colsM;
RUNTIME_PLATFORM platformOld = rtplatform;
switch(et) {
case MR:
rtplatform = RUNTIME_PLATFORM.HADOOP;
break;
case SPARK:
rtplatform = RUNTIME_PLATFORM.SPARK;
break;
default:
rtplatform = RUNTIME_PLATFORM.HYBRID;
break;
}
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if (rtplatform == RUNTIME_PLATFORM.SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
TestConfiguration config = getTestConfiguration(TEST_NAME);
config.addVariable("rows", rows);
config.addVariable("cols", cols);
loadTestConfiguration(config);
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + TEST_NAME + ".dml";
if (type == Type.Multiple) {
programArgs = new String[] { "-args", input("A1"), input("A2"), output("C1"), output("C2") };
} else {
programArgs = new String[] { "-args", input("A"), output("C") };
}
boolean success = false;
long seed1 = System.nanoTime();
long seed2 = System.nanoTime() + 7;
try {
// run test cases with single or multiple inputs
if (type == Type.Multiple) {
double[][] A1 = getRandomMatrix(rows, cols, 0, 1, sparsity, seed1);
double[][] A2 = getRandomMatrix(rows, cols, 0, 1, sparsity, seed2);
// force binary reblock for 999 to match 1000
writeMatrix(A1, input("A1"), oi, rows, cols, blocksize - 1, blocksize - 1);
writeMatrix(A2, input("A2"), oi, rows, cols, blocksize - 1, blocksize - 1);
runTest(true, false, null, -1);
double[][] C1 = readMatrix(output("C1"), InputInfo.BinaryBlockInputInfo, rows, cols, blocksize, blocksize);
double[][] C2 = readMatrix(output("C2"), InputInfo.BinaryBlockInputInfo, rows, cols, blocksize, blocksize);
TestUtils.compareMatrices(A1, C1, rows, cols, eps);
TestUtils.compareMatrices(A2, C2, rows, cols, eps);
} else {
double[][] A = getRandomMatrix(rows, cols, 0, 1, sparsity, seed1);
// force binary reblock for 999 to match 1000
writeMatrix(A, input("A"), oi, rows, cols, blocksize - 1, blocksize - 1);
runTest(true, false, null, -1);
double[][] C = readMatrix(output("C"), InputInfo.BinaryBlockInputInfo, rows, cols, blocksize, blocksize);
TestUtils.compareMatrices(A, C, rows, cols, eps);
}
success = true;
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
} finally {
rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
if (!success)
LOG.error("FullReblockTest failed with seed=" + seed1 + ", seed2=" + seed2);
}
}
use of org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM in project incubator-systemml by apache.
the class RandTest5 method runRandTest.
/**
* @param sparse
* @param et
*/
private void runRandTest(boolean sparse, RandMinMaxType type, ExecType et) {
// keep old runtime
RUNTIME_PLATFORM platformOld = rtplatform;
switch(et) {
case MR:
rtplatform = RUNTIME_PLATFORM.HADOOP;
break;
case SPARK:
rtplatform = RUNTIME_PLATFORM.SPARK;
break;
default:
rtplatform = RUNTIME_PLATFORM.HYBRID;
break;
}
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if (rtplatform == RUNTIME_PLATFORM.SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
// set basic parameters
String TEST_NAME = TEST_NAME1;
double sparsity = (sparse) ? sparsity2 : sparsity1;
double min = -1, max = -1;
switch(type) {
case POSITIVE_ONLY:
min = minP;
max = maxP;
break;
case NEGATIVE_ONLY:
min = minN;
max = maxN;
break;
case NEGATIVE_POSITIVE:
min = minN;
max = maxP;
break;
}
TestConfiguration config = getTestConfiguration(TEST_NAME);
config.addVariable("rows", rows);
config.addVariable("cols", cols);
loadTestConfiguration(config);
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + TEST_NAME + ".dml";
programArgs = new String[] { "-explain", "-args", Integer.toString(rows), Integer.toString(cols), Double.toString(min), Double.toString(max), Double.toString(sparsity), output("C") };
try {
// run tests
runTest(true, false, null, -1);
// check validity results (rows, cols, min, max)
checkResults(rows, cols, min, max);
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
} finally {
rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
}
}
use of org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM in project incubator-systemml by apache.
the class RandVarSeedTest method runRandVarMinMaxTest.
/**
* @param TEST_NAME
* @param instType
*/
private void runRandVarMinMaxTest(String TEST_NAME, ExecType instType) {
// rtplatform for MR
RUNTIME_PLATFORM platformOld = rtplatform;
switch(instType) {
case MR:
rtplatform = RUNTIME_PLATFORM.HADOOP;
break;
case SPARK:
rtplatform = RUNTIME_PLATFORM.SPARK;
break;
default:
rtplatform = RUNTIME_PLATFORM.HYBRID;
break;
}
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if (rtplatform == RUNTIME_PLATFORM.SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
try {
getAndLoadTestConfiguration(TEST_NAME);
long seed = new Random(7).nextLong();
String HOME = SCRIPT_DIR + TEST_DIR;
String fnameSeed = input("s");
fullDMLScriptName = HOME + TEST_NAME + ".dml";
programArgs = new String[] { "-args", Integer.toString(rows), Integer.toString(cols), fnameSeed, output("R") };
// write seed as input scalar (to force treatment as variable)
MapReduceTool.writeIntToHDFS(seed, fnameSeed);
MapReduceTool.writeScalarMetaDataFile(fnameSeed + ".mtd", ValueType.INT);
// run test
runTest(true, false, null, -1);
// generate expected matrix
MatrixBlock expectedMB = MatrixBlock.randOperations(rows, cols, 1.0, 0, 1, "uniform", seed);
double[][] expectedMatrix = DataConverter.convertToDoubleMatrix(expectedMB);
// compare matrices
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("R");
double[][] resultMatrix = TestUtils.convertHashMapToDoubleArray(dmlfile);
TestUtils.compareMatrices(expectedMatrix, resultMatrix, rows, cols, 0);
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
}
}
Aggregations