use of org.apache.sysml.runtime.matrix.data.MatrixValue.CellIndex in project incubator-systemml by apache.
the class RandSizeExpressionEvalTest method runRandTest.
private void runRandTest(String testName, boolean evalExpr, boolean constFold) {
boolean oldFlagEval = OptimizerUtils.ALLOW_SIZE_EXPRESSION_EVALUATION;
boolean oldFlagFold = OptimizerUtils.ALLOW_CONSTANT_FOLDING;
boolean oldFlagRand1 = OptimizerUtils.ALLOW_RAND_JOB_RECOMPILE;
boolean oldFlagRand2 = OptimizerUtils.ALLOW_BRANCH_REMOVAL;
boolean oldFlagRand3 = OptimizerUtils.ALLOW_WORSTCASE_SIZE_EXPRESSION_EVALUATION;
try {
TestConfiguration config = getTestConfiguration(testName);
config.addVariable("rows", rows);
config.addVariable("cols", cols);
loadTestConfiguration(config);
/* This is for running the junit test the new way, i.e., construct the arguments directly */
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + testName + ".dml";
programArgs = new String[] { "-args", Integer.toString(rows), Integer.toString(cols), output("R") };
OptimizerUtils.ALLOW_SIZE_EXPRESSION_EVALUATION = evalExpr;
OptimizerUtils.ALLOW_CONSTANT_FOLDING = constFold;
// disable rand specific recompile
OptimizerUtils.ALLOW_RAND_JOB_RECOMPILE = false;
OptimizerUtils.ALLOW_BRANCH_REMOVAL = false;
OptimizerUtils.ALLOW_WORSTCASE_SIZE_EXPRESSION_EVALUATION = false;
boolean exceptionExpected = false;
runTest(true, exceptionExpected, null, -1);
// check correct propagated size via final results
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("R");
Assert.assertEquals("Unexpected results.", Double.valueOf(rows * cols * 3.0), dmlfile.get(new CellIndex(1, 1)));
// check expected number of compiled and executed MR jobs
if (evalExpr || constFold) {
Assert.assertEquals("Unexpected number of executed MR jobs.", 0, Statistics.getNoOfExecutedMRJobs());
} else {
Assert.assertEquals("Unexpected number of executed MR jobs.", 2, // Rand, GMR (sum)
Statistics.getNoOfExecutedMRJobs());
}
} finally {
OptimizerUtils.ALLOW_SIZE_EXPRESSION_EVALUATION = oldFlagEval;
OptimizerUtils.ALLOW_CONSTANT_FOLDING = oldFlagFold;
OptimizerUtils.ALLOW_RAND_JOB_RECOMPILE = oldFlagRand1;
OptimizerUtils.ALLOW_BRANCH_REMOVAL = oldFlagRand2;
OptimizerUtils.ALLOW_WORSTCASE_SIZE_EXPRESSION_EVALUATION = oldFlagRand3;
}
}
use of org.apache.sysml.runtime.matrix.data.MatrixValue.CellIndex in project incubator-systemml by apache.
the class SparsityRecompileTest method runRecompileTest.
private void runRecompileTest(String testname, boolean recompile) {
boolean oldFlagRecompile = CompilerConfig.FLAG_DYN_RECOMPILE;
try {
getAndLoadTestConfiguration(testname);
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + testname + ".dml";
programArgs = new String[] { "-explain", "-args", input("V"), Double.toString(val), output("R") };
CompilerConfig.FLAG_DYN_RECOMPILE = recompile;
MatrixBlock mb = MatrixBlock.randOperations((int) rows, (int) cols, sparsity, 0, 1, "uniform", System.currentTimeMillis());
MatrixCharacteristics mc = new MatrixCharacteristics(rows, cols, OptimizerUtils.DEFAULT_BLOCKSIZE, OptimizerUtils.DEFAULT_BLOCKSIZE, (long) (rows * cols * sparsity));
DataConverter.writeMatrixToHDFS(mb, input("V"), OutputInfo.TextCellOutputInfo, mc);
MapReduceTool.writeMetaDataFile(input("V.mtd"), ValueType.DOUBLE, mc, OutputInfo.TextCellOutputInfo);
boolean exceptionExpected = false;
runTest(true, exceptionExpected, null, -1);
// CHECK compiled MR jobs
int expectNumCompiled = // reblock,GMR,GMR,GMR (one GMR less for if)
(testname.equals(TEST_NAME2) ? 3 : 4) + // (+2 resultmerge)
(testname.equals(TEST_NAME4) ? 2 : 0);
Assert.assertEquals("Unexpected number of compiled MR jobs.", expectNumCompiled, Statistics.getNoOfCompiledMRJobs());
// CHECK executed MR jobs
int expectNumExecuted = -1;
if (// (+2 resultmerge)
recompile)
// (+2 resultmerge)
expectNumExecuted = 0 + ((testname.equals(TEST_NAME4)) ? 2 : 0);
else
expectNumExecuted = // reblock,GMR,GMR,GMR (one GMR less for if)
(testname.equals(TEST_NAME2) ? 3 : 4) + // (+2 resultmerge)
((testname.equals(TEST_NAME4)) ? 2 : 0);
Assert.assertEquals("Unexpected number of executed MR jobs.", expectNumExecuted, Statistics.getNoOfExecutedMRJobs());
// compare matrices
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("R");
Assert.assertEquals((Double) val, dmlfile.get(new CellIndex(1, 1)));
} catch (Exception ex) {
throw new RuntimeException(ex);
// Assert.fail("Failed to run test: "+ex.getMessage());
} finally {
CompilerConfig.FLAG_DYN_RECOMPILE = oldFlagRecompile;
}
}
use of org.apache.sysml.runtime.matrix.data.MatrixValue.CellIndex in project incubator-systemml by apache.
the class MultipleOrderByColsTest method runOrderTest.
private void runOrderTest(String testname, boolean sparse, boolean desc, boolean ixret, ExecType et) {
RUNTIME_PLATFORM platformOld = rtplatform;
switch(et) {
case MR:
rtplatform = RUNTIME_PLATFORM.HADOOP;
break;
case SPARK:
rtplatform = RUNTIME_PLATFORM.SPARK;
break;
default:
rtplatform = RUNTIME_PLATFORM.HYBRID_SPARK;
break;
}
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if (rtplatform == RUNTIME_PLATFORM.SPARK || rtplatform == RUNTIME_PLATFORM.HYBRID_SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
try {
String TEST_NAME = testname;
TestConfiguration config = getTestConfiguration(TEST_NAME);
loadTestConfiguration(config);
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + TEST_NAME + ".dml";
programArgs = new String[] { "-stats", "-args", input("A"), String.valueOf(desc).toUpperCase(), String.valueOf(ixret).toUpperCase(), output("B") };
fullRScriptName = HOME + TEST_NAME + ".R";
rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " " + String.valueOf(desc).toUpperCase() + " " + String.valueOf(ixret).toUpperCase() + " " + expectedDir();
// with rounding for duplicates
double sparsity = (sparse) ? sparsity2 : sparsity1;
double[][] A = TestUtils.round(getRandomMatrix(rows, cols, -10, 10, sparsity, 7));
writeInputMatrixWithMTD("A", A, true);
runTest(true, false, null, -1);
runRScript(true);
// compare matrices
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("B");
HashMap<CellIndex, Double> rfile = readRMatrixFromFS("B");
TestUtils.compareMatrices(dmlfile, rfile, eps, "Stat-DML", "Stat-R");
// check for applied rewrite
if (testname.equals(TEST_NAME2) && !ixret)
Assert.assertTrue(Statistics.getCPHeavyHitterCount("rsort") == 1);
} finally {
rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
}
}
use of org.apache.sysml.runtime.matrix.data.MatrixValue.CellIndex in project incubator-systemml by apache.
the class Conv2DBackwardDataTest method runConv2DTest.
/**
* @param et
* @param sparse
*/
public void runConv2DTest(ExecType et, int imgSize, int numImg, int numChannels, int numFilters, int filterSize, int stride, int pad, boolean sparse1, boolean sparse2) {
RUNTIME_PLATFORM oldRTP = rtplatform;
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
try {
TestConfiguration config = getTestConfiguration(TEST_NAME);
if (et == ExecType.SPARK) {
rtplatform = RUNTIME_PLATFORM.SPARK;
} else {
rtplatform = (et == ExecType.MR) ? RUNTIME_PLATFORM.HADOOP : RUNTIME_PLATFORM.SINGLE_NODE;
}
if (rtplatform == RUNTIME_PLATFORM.SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
loadTestConfiguration(config);
/* This is for running the junit test the new way, i.e., construct the arguments directly */
String RI_HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = RI_HOME + TEST_NAME + ".dml";
String sparseVal1 = ("" + sparse1).toUpperCase();
String sparseVal2 = ("" + sparse2).toUpperCase();
long P = ConvolutionUtils.getP(imgSize, filterSize, stride, pad);
programArgs = new String[] { "-explain", "-args", "" + imgSize, "" + numImg, "" + numChannels, "" + numFilters, "" + filterSize, "" + stride, "" + pad, "" + P, "" + P, output("B"), sparseVal1, sparseVal2 };
boolean exceptionExpected = false;
int expectedNumberOfJobs = -1;
runTest(true, exceptionExpected, null, expectedNumberOfJobs);
fullRScriptName = RI_HOME + TEST_NAME + ".R";
rCmd = "Rscript" + " " + fullRScriptName + " " + imgSize + " " + numImg + " " + numChannels + " " + numFilters + " " + filterSize + " " + stride + " " + pad + " " + P + " " + P + " " + expectedDir() + " " + sparseVal1 + " " + sparseVal2;
// Run comparison R script
runRScript(true);
HashMap<CellIndex, Double> bHM = readRMatrixFromFS("B");
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("B");
TestUtils.compareMatrices(dmlfile, bHM, epsilon, "B-DML", "NumPy");
} finally {
rtplatform = oldRTP;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
}
}
use of org.apache.sysml.runtime.matrix.data.MatrixValue.CellIndex in project incubator-systemml by apache.
the class PoolTest method runPoolTest.
public void runPoolTest(ExecType et, int imgSize, int numImg, int numChannels, int stride, int pad, int poolSize1, int poolSize2, String poolMode, boolean sparse) {
RUNTIME_PLATFORM platformOld = rtplatform;
switch(et) {
case MR:
rtplatform = RUNTIME_PLATFORM.HADOOP;
break;
case SPARK:
rtplatform = RUNTIME_PLATFORM.SPARK;
break;
default:
rtplatform = RUNTIME_PLATFORM.HYBRID;
break;
}
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if (rtplatform == RUNTIME_PLATFORM.SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
try {
String sparseVal = String.valueOf(sparse).toUpperCase();
TestConfiguration config = getTestConfiguration(TEST_NAME);
loadTestConfiguration(config);
String RI_HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = RI_HOME + TEST_NAME + ".dml";
programArgs = new String[] { "-explain", "-args", String.valueOf(imgSize), String.valueOf(numImg), String.valueOf(numChannels), String.valueOf(poolSize1), String.valueOf(poolSize2), String.valueOf(stride), String.valueOf(pad), poolMode, output("B"), sparseVal };
fullRScriptName = RI_HOME + TEST_NAME + ".R";
rCmd = "Rscript" + " " + fullRScriptName + " " + imgSize + " " + numImg + " " + numChannels + " " + poolSize1 + " " + poolSize2 + " " + stride + " " + pad + " " + expectedDir() + " " + sparseVal + " " + poolMode;
// run scripts
runTest(true, false, null, -1);
runRScript(true);
// compare results
HashMap<CellIndex, Double> bHM = readRMatrixFromFS("B");
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("B");
TestUtils.compareMatrices(dmlfile, bHM, epsilon, "B-DML", "NumPy");
} finally {
rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
}
}
Aggregations