use of org.apache.sysml.runtime.matrix.data.MatrixValue.CellIndex in project incubator-systemml by apache.
the class RewriteBinaryMV2OuterTest method testRewriteBinaryMV2Outer.
private void testRewriteBinaryMV2Outer(String opcode, boolean rewrites) {
boolean oldFlag = OptimizerUtils.ALLOW_ALGEBRAIC_SIMPLIFICATION;
try {
TestConfiguration config = getTestConfiguration(TEST_NAME);
loadTestConfiguration(config);
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + TEST_NAME + ".dml";
programArgs = new String[] { "-stats", "-args", input("A"), input("B"), opcode, output("R") };
fullRScriptName = HOME + TEST_NAME + ".R";
rCmd = getRCmd(inputDir(), opcode, expectedDir());
OptimizerUtils.ALLOW_ALGEBRAIC_SIMPLIFICATION = rewrites;
// generate actual dataset
double[][] A = getRandomMatrix(128, 1, -5, 5, 0.9, 123);
double[][] B = getRandomMatrix(1, 256, -5, 5, 0.9, 456);
writeInputMatrixWithMTD("A", A, true);
writeInputMatrixWithMTD("B", B, true);
// run test
runTest(true, false, null, -1);
runRScript(true);
// compare matrices
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("R");
HashMap<CellIndex, Double> rfile = readRMatrixFromFS("R");
TestUtils.compareMatrices(dmlfile, rfile, eps, "Stat-DML", "Stat-R");
// check for applied rewrites
if (rewrites)
Assert.assertTrue(!heavyHittersContainsSubString("ba+*"));
} finally {
OptimizerUtils.ALLOW_ALGEBRAIC_SIMPLIFICATION = oldFlag;
}
}
use of org.apache.sysml.runtime.matrix.data.MatrixValue.CellIndex in project incubator-systemml by apache.
the class RewriteIndexingVectorizationTest method testRewriteIndexingVectorization.
private void testRewriteIndexingVectorization(String testname, boolean vectorize) {
boolean oldFlag = OptimizerUtils.ALLOW_AUTO_VECTORIZATION;
try {
TestConfiguration config = getTestConfiguration(testname);
loadTestConfiguration(config);
int rows = testname.equals(TEST_NAME1) ? dim2 : dim1;
int cols = testname.equals(TEST_NAME1) ? dim1 : dim2;
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + testname + ".dml";
programArgs = new String[] { "-stats", "-args", String.valueOf(rows), String.valueOf(cols), output("R") };
OptimizerUtils.ALLOW_AUTO_VECTORIZATION = vectorize;
runTest(true, false, null, -1);
// compare output
double ret = readDMLMatrixFromHDFS("R").get(new CellIndex(1, 1));
Assert.assertTrue(ret == (711 * 5));
// check for applied rewrite
int expected = vectorize ? 1 : 5;
Assert.assertTrue(Statistics.getCPHeavyHitterCount("rightIndex") == expected + 1);
Assert.assertTrue(Statistics.getCPHeavyHitterCount("leftIndex") == expected);
} finally {
OptimizerUtils.ALLOW_AUTO_VECTORIZATION = oldFlag;
}
}
use of org.apache.sysml.runtime.matrix.data.MatrixValue.CellIndex in project incubator-systemml by apache.
the class RewriteLoopVectorization method testRewriteLoopVectorizationSum.
/**
* @param testname
* @param rewrites
*/
private void testRewriteLoopVectorizationSum(String testname, boolean rewrites) {
boolean oldFlag = OptimizerUtils.ALLOW_ALGEBRAIC_SIMPLIFICATION;
try {
TestConfiguration config = getTestConfiguration(testname);
loadTestConfiguration(config);
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + testname + ".dml";
programArgs = new String[] { "-stats", "-args", output("Scalar") };
fullRScriptName = HOME + testname + ".R";
rCmd = getRCmd(inputDir(), expectedDir());
OptimizerUtils.ALLOW_ALGEBRAIC_SIMPLIFICATION = rewrites;
runTest(true, false, null, -1);
runRScript(true);
// compare scalars
HashMap<CellIndex, Double> dmlfile = readDMLScalarFromHDFS("Scalar");
HashMap<CellIndex, Double> rfile = readRScalarFromFS("Scalar");
TestUtils.compareScalars(dmlfile.toString(), rfile.toString());
} finally {
OptimizerUtils.ALLOW_ALGEBRAIC_SIMPLIFICATION = oldFlag;
}
}
use of org.apache.sysml.runtime.matrix.data.MatrixValue.CellIndex in project incubator-systemml by apache.
the class RandVarSeedTest method runRandVarMinMaxTest.
/**
* @param TEST_NAME
* @param instType
*/
private void runRandVarMinMaxTest(String TEST_NAME, ExecType instType) {
// rtplatform for MR
RUNTIME_PLATFORM platformOld = rtplatform;
switch(instType) {
case MR:
rtplatform = RUNTIME_PLATFORM.HADOOP;
break;
case SPARK:
rtplatform = RUNTIME_PLATFORM.SPARK;
break;
default:
rtplatform = RUNTIME_PLATFORM.HYBRID;
break;
}
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if (rtplatform == RUNTIME_PLATFORM.SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
try {
getAndLoadTestConfiguration(TEST_NAME);
long seed = new Random(7).nextLong();
String HOME = SCRIPT_DIR + TEST_DIR;
String fnameSeed = input("s");
fullDMLScriptName = HOME + TEST_NAME + ".dml";
programArgs = new String[] { "-args", Integer.toString(rows), Integer.toString(cols), fnameSeed, output("R") };
// write seed as input scalar (to force treatment as variable)
MapReduceTool.writeIntToHDFS(seed, fnameSeed);
MapReduceTool.writeScalarMetaDataFile(fnameSeed + ".mtd", ValueType.INT);
// run test
runTest(true, false, null, -1);
// generate expected matrix
MatrixBlock expectedMB = MatrixBlock.randOperations(rows, cols, 1.0, 0, 1, "uniform", seed);
double[][] expectedMatrix = DataConverter.convertToDoubleMatrix(expectedMB);
// compare matrices
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("R");
double[][] resultMatrix = TestUtils.convertHashMapToDoubleArray(dmlfile);
TestUtils.compareMatrices(expectedMatrix, resultMatrix, rows, cols, 0);
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
}
}
use of org.apache.sysml.runtime.matrix.data.MatrixValue.CellIndex in project incubator-systemml by apache.
the class SequenceTest method testSequence.
@Test
public void testSequence() {
RUNTIME_PLATFORM platformOld = rtplatform;
try {
getAndLoadTestConfiguration(TEST_NAME);
String HOME = SCRIPT_DIR + TEST_DIR;
boolean exceptionExpected = false;
if (test_type == TEST_TYPE.THREE_INPUTS || test_type == TEST_TYPE.ERROR) {
if (isPyDml) {
fullDMLScriptName = HOME + "Range" + ".pydml";
} else {
fullDMLScriptName = HOME + TEST_NAME + ".dml";
}
fullRScriptName = HOME + TEST_NAME + ".R";
programArgs = new String[] { "-args", Double.toString(from), Double.toString(to), Double.toString(incr), output("A") };
rCmd = "Rscript" + " " + fullRScriptName + " " + from + " " + to + " " + incr + " " + expectedDir();
if (test_type == TEST_TYPE.ERROR)
exceptionExpected = true;
} else {
if (isPyDml) {
fullDMLScriptName = HOME + "Range" + "2inputs.pydml";
} else {
fullDMLScriptName = HOME + TEST_NAME + "2inputs.dml";
}
fullRScriptName = HOME + TEST_NAME + "2inputs.R";
programArgs = new String[] { "-args", Double.toString(from), Double.toString(to), output("A") };
rCmd = "Rscript" + " " + fullRScriptName + " " + from + " " + to + " " + expectedDir();
}
int outputIndex = programArgs.length - 1;
rtplatform = RUNTIME_PLATFORM.SINGLE_NODE;
programArgs[outputIndex] = output("A_CP");
runTest(true, exceptionExpected, null, -1);
rtplatform = RUNTIME_PLATFORM.HADOOP;
programArgs[outputIndex] = output("A_HADOOP");
runTest(true, exceptionExpected, null, -1);
rtplatform = RUNTIME_PLATFORM.HYBRID;
programArgs[outputIndex] = output("A_HYBRID");
runTest(true, exceptionExpected, null, -1);
rtplatform = RUNTIME_PLATFORM.SPARK;
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
try {
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
programArgs[outputIndex] = output("A_SPARK");
runTest(true, exceptionExpected, null, -1);
} finally {
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
}
if (!exceptionExpected) {
runRScript(true);
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("A_CP");
HashMap<CellIndex, Double> rfile = readRMatrixFromFS("A");
TestUtils.compareMatrices(dmlfile, rfile, eps, "A-CP", "A-R");
dmlfile = readDMLMatrixFromHDFS("A_HYBRID");
TestUtils.compareMatrices(dmlfile, rfile, eps, "A-HYBRID", "A-R");
dmlfile = readDMLMatrixFromHDFS("A_HADOOP");
TestUtils.compareMatrices(dmlfile, rfile, eps, "A-HADOOP", "A-R");
dmlfile = readDMLMatrixFromHDFS("A_SPARK");
TestUtils.compareMatrices(dmlfile, rfile, eps, "A-SPARK", "A-R");
}
} finally {
rtplatform = platformOld;
}
}
Aggregations