use of org.apache.sysml.runtime.matrix.data.MatrixValue.CellIndex in project incubator-systemml by apache.
the class FullMatrixMultiplicationTransposeSelf2Test method runTransposeSelfMatrixMultiplicationTest.
/**
* @param type
* @param instType
* @param sparse
*/
private void runTransposeSelfMatrixMultiplicationTest(MMTSJType type, ExecType instType, boolean sparse) {
RUNTIME_PLATFORM platformOld = rtplatform;
switch(instType) {
case MR:
rtplatform = RUNTIME_PLATFORM.HADOOP;
break;
case SPARK:
rtplatform = RUNTIME_PLATFORM.SPARK;
break;
default:
rtplatform = RUNTIME_PLATFORM.HYBRID;
break;
}
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if (rtplatform == RUNTIME_PLATFORM.SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
if (// force tsmm2 to prevent mapmm
instType == ExecType.SPARK)
AggBinaryOp.FORCED_MMULT_METHOD = MMultMethod.TSMM2;
// setup exec type, rows, cols, caching dir
int rows = (type == MMTSJType.LEFT) ? rows1 : cols1;
int cols = (type == MMTSJType.LEFT) ? cols1 : rows1;
double sparsity = sparse ? sparsity2 : sparsity1;
String TEST_NAME = (type == MMTSJType.LEFT) ? TEST_NAME1 : TEST_NAME2;
String TEST_CACHE_DIR = "";
if (TEST_CACHE_ENABLED)
TEST_CACHE_DIR = rows + "_" + cols + "_" + sparsity + "/";
try {
TestConfiguration config = getTestConfiguration(TEST_NAME);
loadTestConfiguration(config, TEST_CACHE_DIR);
/* This is for running the junit test the new way, i.e., construct the arguments directly */
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + TEST_NAME + ".dml";
programArgs = new String[] { "-stats", "-args", input("A"), Integer.toString(rows), Integer.toString(cols), output("B") };
fullRScriptName = HOME + TEST_NAME + ".R";
rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " " + expectedDir();
// generate actual dataset
double[][] A = getRandomMatrix(rows, cols, 0, 1, sparsity, 7);
writeInputMatrix("A", A, true);
// run dml and R scripts
runTest(true, false, null, -1);
runRScript(true);
// compare matrices
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("B");
HashMap<CellIndex, Double> rfile = readRMatrixFromFS("B");
TestUtils.compareMatrices(dmlfile, rfile, eps, "Stat-DML", "Stat-R");
// check for compiled tsmm instructions
if (instType == ExecType.SPARK || instType == ExecType.CP) {
String opcode = (instType == ExecType.SPARK) ? Instruction.SP_INST_PREFIX + "tsmm2" : "tsmm";
Assert.assertTrue("Missing opcode: " + opcode, Statistics.getCPHeavyHitterOpCodes().contains(opcode));
}
} finally {
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
AggBinaryOp.FORCED_MMULT_METHOD = null;
rtplatform = platformOld;
}
}
use of org.apache.sysml.runtime.matrix.data.MatrixValue.CellIndex in project incubator-systemml by apache.
the class FullMatrixMultiplicationTransposeSelfTest method runTransposeSelfMatrixMultiplicationTest.
/**
* @param type
* @param instType
* @param sparse
*/
private void runTransposeSelfMatrixMultiplicationTest(MMTSJType type, ExecType instType, boolean sparse) {
// setup exec type, rows, cols
int rows = -1, cols = -1;
String TEST_NAME = null;
if (type == MMTSJType.LEFT) {
if (instType == ExecType.CP) {
rows = rows1;
cols = cols1;
} else {
// if type MR
rows = rows2;
cols = cols2;
}
TEST_NAME = TEST_NAME1;
} else {
if (instType == ExecType.CP) {
rows = cols1;
cols = rows1;
} else {
// if type MR
rows = cols2;
cols = rows2;
}
TEST_NAME = TEST_NAME2;
}
double sparsity = sparse ? sparsity2 : sparsity1;
String TEST_CACHE_DIR = "";
if (TEST_CACHE_ENABLED) {
TEST_CACHE_DIR = rows + "_" + cols + "_" + sparsity + "/";
}
// rtplatform for MR
RUNTIME_PLATFORM platformOld = rtplatform;
rtplatform = (instType == ExecType.MR) ? RUNTIME_PLATFORM.HADOOP : RUNTIME_PLATFORM.HYBRID;
try {
TestConfiguration config = getTestConfiguration(TEST_NAME);
loadTestConfiguration(config, TEST_CACHE_DIR);
/* This is for running the junit test the new way, i.e., construct the arguments directly */
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + TEST_NAME + ".dml";
programArgs = new String[] { "-args", input("A"), Integer.toString(rows), Integer.toString(cols), output("B") };
fullRScriptName = HOME + TEST_NAME + ".R";
rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " " + expectedDir();
// generate actual dataset
double[][] A = getRandomMatrix(rows, cols, 0, 1, sparsity, 7);
writeInputMatrix("A", A, true);
boolean exceptionExpected = false;
runTest(true, exceptionExpected, null, -1);
runRScript(true);
// compare matrices
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("B");
HashMap<CellIndex, Double> rfile = readRMatrixFromFS("B");
TestUtils.compareMatrices(dmlfile, rfile, eps, "Stat-DML", "Stat-R");
} finally {
rtplatform = platformOld;
}
}
use of org.apache.sysml.runtime.matrix.data.MatrixValue.CellIndex in project incubator-systemml by apache.
the class FullPowerTest method runPowerTest.
/**
* @param type
* @param dt1
* @param dt2
* @param sparse
* @param instType
*/
private void runPowerTest(DataType dt1, DataType dt2, boolean sparse, ExecType instType) {
// rtplatform for MR
RUNTIME_PLATFORM platformOld = rtplatform;
rtplatform = (instType == ExecType.MR) ? RUNTIME_PLATFORM.HADOOP : RUNTIME_PLATFORM.HYBRID;
double sparsity = sparse ? sparsity2 : sparsity1;
String TEST_CACHE_DIR = "";
if (TEST_CACHE_ENABLED) {
double sparsityLeft = 1.0;
if (dt1 == DataType.MATRIX) {
sparsityLeft = sparsity;
}
double sparsityRight = 1.0;
if (dt2 == DataType.MATRIX) {
sparsityRight = sparsity;
}
TEST_CACHE_DIR = sparsityLeft + "_" + sparsityRight + "/";
}
try {
String TEST_NAME = TEST_NAME1;
TestConfiguration config = getTestConfiguration(TEST_NAME);
loadTestConfiguration(config, TEST_CACHE_DIR);
/* This is for running the junit test the new way, i.e., construct the arguments directly */
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + TEST_NAME + ".dml";
programArgs = new String[] { "-args", input("A"), input("B"), output("C") };
fullRScriptName = HOME + TEST_NAME + ".R";
rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " " + expectedDir();
if (dt1 == DataType.SCALAR && dt2 == DataType.SCALAR) {
// Clear OUT folder to prevent access denied errors running DML script
// for tests testPowSSSparseCP, testPowSSSparseMR, testPowSSDenseCP, testPowSSDenseMR
// due to setOutAndExpectedDeletionDisabled(true).
TestUtils.clearDirectory(outputDir());
}
// generate dataset A
if (dt1 == DataType.MATRIX) {
double[][] A = getRandomMatrix(rows, cols, min, max, sparsity, 7);
MatrixCharacteristics mcA = new MatrixCharacteristics(rows, cols, OptimizerUtils.DEFAULT_BLOCKSIZE, OptimizerUtils.DEFAULT_BLOCKSIZE, (long) (rows * cols * sparsity));
writeInputMatrixWithMTD("A", A, true, mcA);
} else {
double[][] A = getRandomMatrix(1, 1, min, max, 1.0, 7);
writeScalarInputMatrixWithMTD("A", A, true);
}
// generate dataset B
if (dt2 == DataType.MATRIX) {
MatrixCharacteristics mcB = new MatrixCharacteristics(rows, cols, OptimizerUtils.DEFAULT_BLOCKSIZE, OptimizerUtils.DEFAULT_BLOCKSIZE, (long) (rows * cols * sparsity));
double[][] B = getRandomMatrix(rows, cols, min, max, sparsity, 3);
writeInputMatrixWithMTD("B", B, true, mcB);
} else {
double[][] B = getRandomMatrix(1, 1, min, max, 1.0, 3);
writeScalarInputMatrixWithMTD("B", B, true);
}
boolean exceptionExpected = false;
runTest(true, exceptionExpected, null, -1);
runRScript(true);
// compare matrices
HashMap<CellIndex, Double> dmlfile = null;
HashMap<CellIndex, Double> rfile = readRMatrixFromFS("C");
if (dt1 == DataType.SCALAR && dt2 == DataType.SCALAR)
dmlfile = readScalarMatrixFromHDFS("C");
else
dmlfile = readDMLMatrixFromHDFS("C");
TestUtils.compareMatrices(dmlfile, rfile, eps, "Stat-DML", "Stat-R", true);
} finally {
rtplatform = platformOld;
}
}
use of org.apache.sysml.runtime.matrix.data.MatrixValue.CellIndex in project incubator-systemml by apache.
the class CompressedCellwiseTest method testCompressedCellwise.
private void testCompressedCellwise(String testname, SparsityType stype, ValueType vtype, ExecType et) {
boolean oldRewrites = OptimizerUtils.ALLOW_ALGEBRAIC_SIMPLIFICATION;
RUNTIME_PLATFORM platformOld = rtplatform;
switch(et) {
case MR:
rtplatform = RUNTIME_PLATFORM.HADOOP;
break;
case SPARK:
rtplatform = RUNTIME_PLATFORM.SPARK;
break;
default:
rtplatform = RUNTIME_PLATFORM.HYBRID_SPARK;
break;
}
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if (rtplatform == RUNTIME_PLATFORM.SPARK || rtplatform == RUNTIME_PLATFORM.HYBRID_SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
try {
OptimizerUtils.ALLOW_ALGEBRAIC_SIMPLIFICATION = true;
TestConfiguration config = getTestConfiguration(testname);
loadTestConfiguration(config);
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + testname + ".dml";
programArgs = new String[] { "-explain", "-stats", "-args", input("X"), output("R") };
fullRScriptName = HOME + testname + ".R";
rCmd = getRCmd(inputDir(), expectedDir());
// generate input data
double sparsity = -1;
switch(stype) {
case DENSE:
sparsity = sparsity1;
break;
case SPARSE:
sparsity = sparsity2;
break;
case EMPTY:
sparsity = sparsity3;
break;
}
// generate input data
double min = (vtype == ValueType.CONST) ? 10 : -10;
double[][] X = TestUtils.generateTestMatrix(rows, cols, min, 10, sparsity, 7);
if (vtype == ValueType.RAND_ROUND_OLE || vtype == ValueType.RAND_ROUND_DDC) {
CompressedMatrixBlock.ALLOW_DDC_ENCODING = (vtype == ValueType.RAND_ROUND_DDC);
X = TestUtils.round(X);
}
writeInputMatrixWithMTD("X", X, true);
// run tests
runTest(true, false, null, -1);
runRScript(true);
// compare matrices
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("R");
HashMap<CellIndex, Double> rfile = readRMatrixFromFS("R");
TestUtils.compareMatrices(dmlfile, rfile, eps, "Stat-DML", "Stat-R");
Assert.assertTrue(heavyHittersContainsSubString("spoofCell") || heavyHittersContainsSubString("sp_spoofCell"));
Assert.assertTrue(heavyHittersContainsSubString("compress") || heavyHittersContainsSubString("sp_compress"));
} finally {
rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
OptimizerUtils.ALLOW_ALGEBRAIC_SIMPLIFICATION = oldRewrites;
OptimizerUtils.ALLOW_AUTO_VECTORIZATION = true;
OptimizerUtils.ALLOW_OPERATOR_FUSION = true;
CompressedMatrixBlock.ALLOW_DDC_ENCODING = true;
}
}
use of org.apache.sysml.runtime.matrix.data.MatrixValue.CellIndex in project incubator-systemml by apache.
the class CompressedRowAggregateTest method testCompressedRowAggregate.
private void testCompressedRowAggregate(String testname, SparsityType stype, ValueType vtype, ExecType et) {
boolean oldRewrites = OptimizerUtils.ALLOW_ALGEBRAIC_SIMPLIFICATION;
RUNTIME_PLATFORM platformOld = rtplatform;
switch(et) {
case MR:
rtplatform = RUNTIME_PLATFORM.HADOOP;
break;
case SPARK:
rtplatform = RUNTIME_PLATFORM.SPARK;
break;
default:
rtplatform = RUNTIME_PLATFORM.HYBRID_SPARK;
break;
}
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if (rtplatform == RUNTIME_PLATFORM.SPARK || rtplatform == RUNTIME_PLATFORM.HYBRID_SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
try {
OptimizerUtils.ALLOW_ALGEBRAIC_SIMPLIFICATION = true;
TestConfiguration config = getTestConfiguration(testname);
loadTestConfiguration(config);
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + testname + ".dml";
programArgs = new String[] { "-explain", "-stats", "-args", input("X"), output("R") };
fullRScriptName = HOME + testname + ".R";
rCmd = getRCmd(inputDir(), expectedDir());
// generate input data
double sparsity = -1;
switch(stype) {
case DENSE:
sparsity = sparsity1;
break;
case SPARSE:
sparsity = sparsity2;
break;
case EMPTY:
sparsity = sparsity3;
break;
}
// generate input data
double min = (vtype == ValueType.CONST) ? 10 : -10;
double[][] X = TestUtils.generateTestMatrix(rows, cols, min, 10, sparsity, 7);
if (vtype == ValueType.RAND_ROUND_OLE || vtype == ValueType.RAND_ROUND_DDC) {
CompressedMatrixBlock.ALLOW_DDC_ENCODING = (vtype == ValueType.RAND_ROUND_DDC);
X = TestUtils.round(X);
}
writeInputMatrixWithMTD("X", X, true);
// run tests
runTest(true, false, null, -1);
runRScript(true);
// compare matrices
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("R");
HashMap<CellIndex, Double> rfile = readRMatrixFromFS("R");
TestUtils.compareMatrices(dmlfile, rfile, eps, "Stat-DML", "Stat-R");
Assert.assertTrue(heavyHittersContainsSubString("spoofRA", 2) || heavyHittersContainsSubString("sp_spoofRA", 2));
Assert.assertTrue(heavyHittersContainsSubString("compress") || heavyHittersContainsSubString("sp_compress"));
} finally {
rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
OptimizerUtils.ALLOW_ALGEBRAIC_SIMPLIFICATION = oldRewrites;
OptimizerUtils.ALLOW_AUTO_VECTORIZATION = true;
OptimizerUtils.ALLOW_OPERATOR_FUSION = true;
CompressedMatrixBlock.ALLOW_DDC_ENCODING = true;
}
}
Aggregations