use of org.apache.sysml.runtime.matrix.data.FrameBlock in project incubator-systemml by apache.
the class TransformFrameEncodeDecodeTokenTest method runTransformTest.
/**
*
* @param rt
* @param ofmt
* @param dataset
*/
private void runTransformTest(RUNTIME_PLATFORM rt, String ofmt) {
//set runtime platform
RUNTIME_PLATFORM rtold = rtplatform;
boolean csvReblockOld = OptimizerUtils.ALLOW_FRAME_CSV_REBLOCK;
rtplatform = rt;
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if (rtplatform == RUNTIME_PLATFORM.SPARK || rtplatform == RUNTIME_PLATFORM.HYBRID_SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
if (!ofmt.equals("csv"))
throw new RuntimeException("Unsupported test output format");
try {
getAndLoadTestConfiguration(TEST_NAME1);
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + TEST_NAME1 + ".dml";
programArgs = new String[] { "-explain", "-nvargs", "DATA=" + HOME + "input/" + DATASET1, "TFSPEC=" + HOME + "input/" + SPEC1, "TFDATA=" + output("tfout"), "SEP= ", "OFMT=" + ofmt, "OSEP= " };
OptimizerUtils.ALLOW_FRAME_CSV_REBLOCK = true;
runTest(true, false, null, -1);
//read input/output and compare
FrameReader reader1 = FrameReaderFactory.createFrameReader(InputInfo.CSVInputInfo, new CSVFileFormatProperties(false, " ", false));
FrameBlock fb1 = reader1.readFrameFromHDFS(HOME + "input/" + DATASET1, -1L, -1L);
FrameReader reader2 = FrameReaderFactory.createFrameReader(InputInfo.CSVInputInfo, new CSVFileFormatProperties(false, " ", false));
FrameBlock fb2 = reader2.readFrameFromHDFS(output("tfout"), -1L, -1L);
String[][] R1 = DataConverter.convertToStringFrame(fb1);
String[][] R2 = DataConverter.convertToStringFrame(fb2);
TestUtils.compareFrames(R1, R2, R1.length, R1[0].length);
if (rt == RUNTIME_PLATFORM.HYBRID_SPARK) {
Assert.assertEquals("Wrong number of executed Spark instructions: " + Statistics.getNoOfExecutedSPInst(), new Long(2), new Long(Statistics.getNoOfExecutedSPInst()));
}
} catch (Exception ex) {
throw new RuntimeException(ex);
} finally {
rtplatform = rtold;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
OptimizerUtils.ALLOW_FRAME_CSV_REBLOCK = csvReblockOld;
}
}
use of org.apache.sysml.runtime.matrix.data.FrameBlock in project incubator-systemml by apache.
the class TransformReadMetaTest method runTransformReadMetaTest.
/**
*
* @param sparseM1
* @param sparseM2
* @param instType
* @throws IOException
* @throws DMLRuntimeException
*/
private void runTransformReadMetaTest(RUNTIME_PLATFORM rt, String ofmt, String delim) throws IOException, DMLRuntimeException {
RUNTIME_PLATFORM platformOld = rtplatform;
rtplatform = rt;
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if (rtplatform == RUNTIME_PLATFORM.SPARK || rtplatform == RUNTIME_PLATFORM.HYBRID_SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
try {
String testname = delim.equals(",") ? TEST_NAME1 : TEST_NAME2;
getAndLoadTestConfiguration(testname);
//generate input data
double[][] X = DataConverter.convertToDoubleMatrix(MatrixBlock.seqOperations(0.5, rows / 2, 0.5).appendOperations(MatrixBlock.seqOperations(0.5, rows / 2, 0.5), new MatrixBlock()));
MatrixBlock mbX = DataConverter.convertToMatrixBlock(X);
CSVFileFormatProperties fprops = new CSVFileFormatProperties(false, delim, false);
MatrixWriter writer = MatrixWriterFactory.createMatrixWriter(OutputInfo.CSVOutputInfo, 1, fprops);
writer.writeMatrixToHDFS(mbX, input("X"), rows, 2, -1, -1, -1);
//read specs transform X and Y
String specX = MapReduceTool.readStringFromHDFSFile(SCRIPT_DIR + TEST_DIR + SPEC_X);
fullDMLScriptName = SCRIPT_DIR + TEST_DIR + testname + ".dml";
programArgs = new String[] { "-args", input("X"), specX, output("M1"), output("M"), ofmt, delim };
//run test
runTest(true, false, null, -1);
//compare meta data frames
InputInfo iinfo = InputInfo.stringExternalToInputInfo(ofmt);
FrameReader reader = FrameReaderFactory.createFrameReader(iinfo);
FrameBlock mExpected = TfMetaUtils.readTransformMetaDataFromFile(specX, output("M1"), delim);
FrameBlock mRet = reader.readFrameFromHDFS(output("M"), rows, 2);
for (int i = 0; i < rows; i++) for (int j = 0; j < 2; j++) {
Assert.assertTrue("Wrong result: " + mRet.get(i, j) + ".", UtilFunctions.compareTo(ValueType.STRING, mExpected.get(i, j), mRet.get(i, j)) == 0);
}
} catch (Exception ex) {
throw new IOException(ex);
} finally {
rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
}
}
Aggregations