use of org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM in project incubator-systemml by apache.
the class SequenceTest method testSequence.
@Test
public void testSequence() {
RUNTIME_PLATFORM platformOld = rtplatform;
try {
getAndLoadTestConfiguration(TEST_NAME);
String HOME = SCRIPT_DIR + TEST_DIR;
boolean exceptionExpected = false;
if (test_type == TEST_TYPE.THREE_INPUTS || test_type == TEST_TYPE.ERROR) {
if (isPyDml) {
fullDMLScriptName = HOME + "Range" + ".pydml";
} else {
fullDMLScriptName = HOME + TEST_NAME + ".dml";
}
fullRScriptName = HOME + TEST_NAME + ".R";
programArgs = new String[] { "-args", Double.toString(from), Double.toString(to), Double.toString(incr), output("A") };
rCmd = "Rscript" + " " + fullRScriptName + " " + from + " " + to + " " + incr + " " + expectedDir();
if (test_type == TEST_TYPE.ERROR)
exceptionExpected = true;
} else {
if (isPyDml) {
fullDMLScriptName = HOME + "Range" + "2inputs.pydml";
} else {
fullDMLScriptName = HOME + TEST_NAME + "2inputs.dml";
}
fullRScriptName = HOME + TEST_NAME + "2inputs.R";
programArgs = new String[] { "-args", Double.toString(from), Double.toString(to), output("A") };
rCmd = "Rscript" + " " + fullRScriptName + " " + from + " " + to + " " + expectedDir();
}
int outputIndex = programArgs.length - 1;
rtplatform = RUNTIME_PLATFORM.SINGLE_NODE;
programArgs[outputIndex] = output("A_CP");
runTest(true, exceptionExpected, null, -1);
rtplatform = RUNTIME_PLATFORM.HADOOP;
programArgs[outputIndex] = output("A_HADOOP");
runTest(true, exceptionExpected, null, -1);
rtplatform = RUNTIME_PLATFORM.HYBRID;
programArgs[outputIndex] = output("A_HYBRID");
runTest(true, exceptionExpected, null, -1);
rtplatform = RUNTIME_PLATFORM.SPARK;
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
try {
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
programArgs[outputIndex] = output("A_SPARK");
runTest(true, exceptionExpected, null, -1);
} finally {
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
}
if (!exceptionExpected) {
runRScript(true);
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("A_CP");
HashMap<CellIndex, Double> rfile = readRMatrixFromFS("A");
TestUtils.compareMatrices(dmlfile, rfile, eps, "A-CP", "A-R");
dmlfile = readDMLMatrixFromHDFS("A_HYBRID");
TestUtils.compareMatrices(dmlfile, rfile, eps, "A-HYBRID", "A-R");
dmlfile = readDMLMatrixFromHDFS("A_HADOOP");
TestUtils.compareMatrices(dmlfile, rfile, eps, "A-HADOOP", "A-R");
dmlfile = readDMLMatrixFromHDFS("A_SPARK");
TestUtils.compareMatrices(dmlfile, rfile, eps, "A-SPARK", "A-R");
}
} finally {
rtplatform = platformOld;
}
}
use of org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM in project incubator-systemml by apache.
the class FrameConverterTest method runFrameConverterTest.
/**
* @param schema
* @param type
* @param instType
*/
private void runFrameConverterTest(ValueType[] schema, ConvType type) {
RUNTIME_PLATFORM platformOld = rtplatform;
DMLScript.rtplatform = RUNTIME_PLATFORM.SPARK;
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
try {
TestConfiguration config = getTestConfiguration(TEST_NAME);
loadTestConfiguration(config);
// data generation
double[][] A = getRandomMatrix(rows, schema.length, -10, 10, 0.9, 2373);
// prepare input/output infos
OutputInfo oinfo = null;
InputInfo iinfo = null;
switch(type) {
case CSV2BIN:
case DFRM2BIN:
oinfo = OutputInfo.CSVOutputInfo;
iinfo = InputInfo.BinaryBlockInputInfo;
break;
case BIN2CSV:
oinfo = OutputInfo.BinaryBlockOutputInfo;
iinfo = InputInfo.CSVInputInfo;
break;
case TXTCELL2BIN:
oinfo = OutputInfo.TextCellOutputInfo;
iinfo = InputInfo.BinaryBlockInputInfo;
break;
case BIN2TXTCELL:
oinfo = OutputInfo.BinaryBlockOutputInfo;
iinfo = InputInfo.TextCellInputInfo;
break;
case MAT2BIN:
case BIN2DFRM:
oinfo = OutputInfo.BinaryBlockOutputInfo;
iinfo = InputInfo.BinaryBlockInputInfo;
break;
case BIN2MAT:
oinfo = OutputInfo.BinaryBlockOutputInfo;
iinfo = InputInfo.BinaryBlockInputInfo;
break;
default:
throw new RuntimeException("Unsuported converter type: " + type.toString());
}
if (type == ConvType.MAT2BIN || type == ConvType.BIN2MAT)
runMatrixConverterAndVerify(schema, A, type, iinfo, oinfo);
else
runConverterAndVerify(schema, A, type, iinfo, oinfo);
} catch (Exception ex) {
ex.printStackTrace();
throw new RuntimeException(ex);
} finally {
DMLScript.rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
}
}
use of org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM in project incubator-systemml by apache.
the class FrameIndexingDistTest method runTestLeftIndexing.
private void runTestLeftIndexing(ExecType et, LeftIndexingOp.LeftIndexingMethod indexingMethod, ValueType[] schema, IXType itype, boolean bSparse) throws IOException {
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
RUNTIME_PLATFORM oldRTP = rtplatform;
TestConfiguration config = null;
HashMap<String, ValueType[]> outputSchema = new HashMap<String, ValueType[]>();
if (itype == IXType.LIX)
config = getTestConfiguration("FrameLeftIndexing");
else
config = getTestConfiguration("FrameRightIndexing");
try {
if (indexingMethod != null) {
LeftIndexingOp.FORCED_LEFT_INDEXING = indexingMethod;
}
if (et == ExecType.SPARK) {
rtplatform = RUNTIME_PLATFORM.SPARK;
} else {
// rtplatform = (et==ExecType.MR)? RUNTIME_PLATFORM.HADOOP : RUNTIME_PLATFORM.SINGLE_NODE;
rtplatform = RUNTIME_PLATFORM.HYBRID;
}
if (rtplatform == RUNTIME_PLATFORM.SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
config.addVariable("rows", rows);
config.addVariable("cols", cols);
long rowstart = 816, rowend = 1229, colstart = 109, /*967*/
colend = 1009;
config.addVariable("rowstart", rowstart);
config.addVariable("rowend", rowend);
config.addVariable("colstart", colstart);
config.addVariable("colend", colend);
loadTestConfiguration(config);
if (itype == IXType.LIX) {
/* This is for running the junit test the new way, i.e., construct the arguments directly */
String LI_HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = LI_HOME + TEST_NAME + ".dml";
programArgs = new String[] { "-args", input("A"), Long.toString(rows), Long.toString(cols), Long.toString(rowstart), Long.toString(rowend), Long.toString(colstart), Long.toString(colend), output("AB"), output("AC"), output("AD"), input("B"), input("C"), input("D"), Long.toString(rowend - rowstart + 1), Long.toString(colend - colstart + 1), Long.toString(cols - colstart + 1) };
fullRScriptName = LI_HOME + TEST_NAME + ".R";
rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " " + rowstart + " " + rowend + " " + colstart + " " + colend + " " + expectedDir();
// initialize the frame data.
// rand.nextDouble();
double sparsity = sparsity1;
double[][] A = getRandomMatrix(rows, cols, min, max, sparsity, 1111);
writeInputFrameWithMTD("A", A, true, schema, OutputInfo.BinaryBlockOutputInfo);
// rand.nextDouble();
sparsity = sparsity3;
double[][] B = getRandomMatrix((int) (rowend - rowstart + 1), (int) (colend - colstart + 1), min, max, sparsity, 2345);
ValueType[] lschemaB = Arrays.copyOfRange(schema, (int) colstart - 1, (int) colend);
writeInputFrameWithMTD("B", B, true, lschemaB, OutputInfo.BinaryBlockOutputInfo);
// rand.nextDouble();
sparsity = sparsity2;
double[][] C = getRandomMatrix((int) (rowend), (int) (cols - colstart + 1), min, max, sparsity, 3267);
ValueType[] lschemaC = Arrays.copyOfRange(schema, (int) colstart - 1, (int) cols);
writeInputFrameWithMTD("C", C, true, lschemaC, OutputInfo.BinaryBlockOutputInfo);
// rand.nextDoublBe();
sparsity = sparsity4;
double[][] D = getRandomMatrix(rows, (int) (colend - colstart + 1), min, max, sparsity, 4856);
writeInputFrameWithMTD("D", D, true, lschemaB, OutputInfo.BinaryBlockOutputInfo);
boolean exceptionExpected = false;
int expectedNumberOfJobs = -1;
runTest(true, exceptionExpected, null, expectedNumberOfJobs);
for (String file : config.getOutputFiles()) outputSchema.put(file, schema);
} else {
/* This is for running the junit test the new way, i.e., construct the arguments directly */
String RI_HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = RI_HOME + RTEST_NAME + ".dml";
programArgs = new String[] { "-stats", "-explain", "-args", input("A"), Long.toString(rows), Long.toString(cols), Long.toString(rowstart), Long.toString(rowend), Long.toString(colstart), Long.toString(colend), output("B"), output("C"), output("D") };
fullRScriptName = RI_HOME + RTEST_NAME + ".R";
rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " " + rowstart + " " + rowend + " " + colstart + " " + colend + " " + expectedDir();
// initialize the frame data.
double sparsity = bSparse ? sparsity4 : sparsity2;
double[][] A = getRandomMatrix(rows, cols, min, max, sparsity, 1111);
writeInputFrameWithMTD("A", A, true, schema, OutputInfo.BinaryBlockOutputInfo);
ValueType[] schemaB = new ValueType[(int) (colend - colstart + 1)];
System.arraycopy(schema, (int) (colstart - 1), schemaB, 0, (int) (colend - colstart + 1));
outputSchema.put(config.getOutputFiles()[0], schemaB);
ValueType[] schemaC = new ValueType[(int) (cols - colstart + 1)];
System.arraycopy(schema, (int) (colstart - 1), schemaC, 0, (int) (cols - colstart + 1));
outputSchema.put(config.getOutputFiles()[1], schemaC);
outputSchema.put(config.getOutputFiles()[2], schemaB);
boolean exceptionExpected = false;
int expectedNumberOfJobs = -1;
runTest(true, exceptionExpected, null, expectedNumberOfJobs);
}
} catch (Exception ex) {
ex.printStackTrace();
throw new RuntimeException(ex);
} finally {
rtplatform = oldRTP;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
LeftIndexingOp.FORCED_LEFT_INDEXING = null;
}
runRScript(true);
for (String file : config.getOutputFiles()) {
FrameBlock frameBlock = readDMLFrameFromHDFS(file, InputInfo.BinaryBlockInputInfo);
MatrixCharacteristics md = new MatrixCharacteristics(frameBlock.getNumRows(), frameBlock.getNumColumns(), -1, -1);
FrameBlock frameRBlock = readRFrameFromHDFS(file + ".csv", InputInfo.CSVInputInfo, md);
ValueType[] schemaOut = outputSchema.get(file);
verifyFrameData(frameBlock, frameRBlock, schemaOut);
System.out.println("File processed is " + file);
}
}
use of org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM in project incubator-systemml by apache.
the class FrameMatrixCastingTest method runFrameCastingTest.
/**
* @param testname
* @param schema
* @param wildcard
*/
private void runFrameCastingTest(String testname, boolean multColBlks, ValueType vt, ExecType et) {
// rtplatform for MR
RUNTIME_PLATFORM platformOld = rtplatform;
switch(et) {
case MR:
rtplatform = RUNTIME_PLATFORM.HADOOP;
break;
case SPARK:
rtplatform = RUNTIME_PLATFORM.SPARK;
break;
default:
rtplatform = RUNTIME_PLATFORM.HYBRID;
break;
}
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if (rtplatform == RUNTIME_PLATFORM.SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
try {
int cols = multColBlks ? cols2 : cols1;
TestConfiguration config = getTestConfiguration(testname);
loadTestConfiguration(config);
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + testname + ".dml";
programArgs = new String[] { "-explain", "-args", input("A"), output("B") };
// data generation
double[][] A = getRandomMatrix(rows, cols, -1, 1, 0.9, 7);
DataType dtin = testname.equals(TEST_NAME1) ? DataType.FRAME : DataType.MATRIX;
ValueType vtin = testname.equals(TEST_NAME1) ? vt : ValueType.DOUBLE;
writeMatrixOrFrameInput(input("A"), A, rows, cols, dtin, vtin);
// run testcase
runTest(true, false, null, -1);
// compare matrices
DataType dtout = testname.equals(TEST_NAME1) ? DataType.MATRIX : DataType.FRAME;
double[][] B = readMatrixOrFrameInput(output("B"), rows, cols, dtout);
TestUtils.compareMatrices(A, B, rows, cols, 0);
} catch (Exception ex) {
throw new RuntimeException(ex);
} finally {
rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
}
}
use of org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM in project incubator-systemml by apache.
the class FrameMatrixWriteTest method runFrameWriteTest.
/**
* @param testname
* @param multColBlks
* @param ofmt
* @param et
*/
private void runFrameWriteTest(String testname, boolean multColBlks, String ofmt, ExecType et) {
// rtplatform for MR
RUNTIME_PLATFORM platformOld = rtplatform;
switch(et) {
case MR:
rtplatform = RUNTIME_PLATFORM.HADOOP;
break;
case SPARK:
rtplatform = RUNTIME_PLATFORM.SPARK;
break;
default:
rtplatform = RUNTIME_PLATFORM.HYBRID;
break;
}
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if (rtplatform == RUNTIME_PLATFORM.SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
try {
int cols = multColBlks ? cols2 : cols1;
TestConfiguration config = getTestConfiguration(testname);
loadTestConfiguration(config);
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + testname + ".dml";
programArgs = new String[] { "-explain", "-args", String.valueOf(rows), String.valueOf(cols), output("B"), ofmt };
// run testcase
runTest(true, false, null, -1);
// generate compare data
double[][] A = new double[rows][cols];
for (int i = 0; i < rows; i++) for (int j = 0; j < cols; j++) A[i][j] = (i + 1) + (j + 1);
// compare matrices
double[][] B = readFrameInput(output("B"), ofmt, rows, cols);
TestUtils.compareMatrices(A, B, rows, cols, 0);
} catch (Exception ex) {
throw new RuntimeException(ex);
} finally {
rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
}
}
Aggregations