use of org.apache.sysml.runtime.io.FrameWriter in project incubator-systemml by apache.
the class TestUtils method writeTestFrame.
/**
* <p>
* Writes a frame to a file using the text format.
* </p>
*
* @param file
* file name
* @param data
* frame data
* @param isR
* @throws IOException
*/
public static void writeTestFrame(String file, double[][] data, ValueType[] schema, OutputInfo oi, boolean isR) throws IOException {
FrameWriter writer = FrameWriterFactory.createFrameWriter(oi);
FrameBlock frame = new FrameBlock(schema);
initFrameData(frame, data, schema, data.length);
writer.writeFrameToHDFS(frame, file, data.length, schema.length);
}
use of org.apache.sysml.runtime.io.FrameWriter in project incubator-systemml by apache.
the class FrameConverterTest method runMatrixConverterAndVerify.
/**
* @param schema
* @param A
* @param type
* @param iinfo
* @param oinfo
* @param instType
*/
private void runMatrixConverterAndVerify(ValueType[] schema, double[][] A, ConvType type, InputInfo iinfo, OutputInfo oinfo) throws IOException {
try {
MatrixCharacteristics mcMatrix = new MatrixCharacteristics(rows, schema.length, 1000, 1000, 0);
MatrixCharacteristics mcFrame = new MatrixCharacteristics(rows, schema.length, -1, -1, -1);
MatrixBlock matrixBlock1 = null;
FrameBlock frame1 = null;
if (type == ConvType.MAT2BIN) {
// initialize the matrix (dense) data.
matrixBlock1 = new MatrixBlock(rows, schema.length, false);
matrixBlock1.init(A, rows, schema.length);
// write matrix data to hdfs
MatrixWriter matWriter = MatrixWriterFactory.createMatrixWriter(oinfo);
matWriter.writeMatrixToHDFS(matrixBlock1, input("A"), rows, schema.length, mcMatrix.getRowsPerBlock(), mcMatrix.getColsPerBlock(), mcMatrix.getNonZeros());
} else {
// initialize the frame data.
frame1 = new FrameBlock(schema);
initFrameData(frame1, A, schema);
// write frame data to hdfs
FrameWriter writer = FrameWriterFactory.createFrameWriter(oinfo);
writer.writeFrameToHDFS(frame1, input("A"), rows, schema.length);
}
// run converter under test
runConverter(type, mcFrame, mcMatrix, Arrays.asList(schema), input("A"), output("B"));
if (type == ConvType.MAT2BIN) {
// read frame data from hdfs
FrameReader reader = FrameReaderFactory.createFrameReader(iinfo);
FrameBlock frame2 = reader.readFrameFromHDFS(output("B"), rows, schema.length);
// verify input and output frame/matrix
verifyFrameMatrixData(frame2, matrixBlock1);
} else {
// read matrix data from hdfs
MatrixReader matReader = MatrixReaderFactory.createMatrixReader(iinfo);
MatrixBlock matrixBlock2 = matReader.readMatrixFromHDFS(output("B"), rows, schema.length, mcMatrix.getRowsPerBlock(), mcMatrix.getColsPerBlock(), mcMatrix.getNonZeros());
// verify input and output frame/matrix
verifyFrameMatrixData(frame1, matrixBlock2);
}
} catch (Exception ex) {
ex.printStackTrace();
throw new RuntimeException(ex);
} finally {
MapReduceTool.deleteFileIfExistOnHDFS(input("A"));
MapReduceTool.deleteFileIfExistOnHDFS(output("B"));
}
}
use of org.apache.sysml.runtime.io.FrameWriter in project incubator-systemml by apache.
the class FrameConverterTest method runConverterAndVerify.
/**
* @param schema
* @param A
* @param type
* @param iinfo
* @param oinfo
* @param instType
*/
private void runConverterAndVerify(ValueType[] schema, double[][] A, ConvType type, InputInfo iinfo, OutputInfo oinfo) throws IOException {
try {
// initialize the frame data.
FrameBlock frame1 = new FrameBlock(schema);
initFrameData(frame1, A, schema);
// write frame data to hdfs
FrameWriter writer = FrameWriterFactory.createFrameWriter(oinfo);
writer.writeFrameToHDFS(frame1, input("A"), rows, schema.length);
// run converter under test
MatrixCharacteristics mc = new MatrixCharacteristics(rows, schema.length, -1, -1, -1);
runConverter(type, mc, null, Arrays.asList(schema), input("A"), output("B"));
// read frame data from hdfs
FrameReader reader = FrameReaderFactory.createFrameReader(iinfo);
FrameBlock frame2 = reader.readFrameFromHDFS(output("B"), rows, schema.length);
// verify input and output frame
verifyFrameData(frame1, frame2);
} catch (Exception ex) {
ex.printStackTrace();
throw new RuntimeException(ex);
} finally {
MapReduceTool.deleteFileIfExistOnHDFS(input("A"));
MapReduceTool.deleteFileIfExistOnHDFS(output("B"));
}
}
use of org.apache.sysml.runtime.io.FrameWriter in project incubator-systemml by apache.
the class FrameSchemaReadTest method runFrameSchemaReadTest.
/**
* @param testname
* @param schema
* @param wildcard
*/
private void runFrameSchemaReadTest(String testname, ValueType[] schema, boolean wildcard) {
try {
TestConfiguration config = getTestConfiguration(testname);
loadTestConfiguration(config);
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + testname + ".dml";
programArgs = new String[] { "-explain", "-args", input("A"), getSchemaString(schema, wildcard), Integer.toString(rows), Integer.toString(schema.length), output("B") };
// data generation
double[][] A = getRandomMatrix(rows, schema.length, -10, 10, 0.9, 2373);
// prepare input/output infos
FrameBlock frame1 = new FrameBlock(schema);
initFrameData(frame1, A, schema);
// write frame data to hdfs
FrameWriter writer = FrameWriterFactory.createFrameWriter(OutputInfo.CSVOutputInfo);
writer.writeFrameToHDFS(frame1, input("A"), rows, schema.length);
// run testcase
runTest(true, false, null, -1);
// read frame data from hdfs (not via readers to test physical schema)
FrameReader reader = FrameReaderFactory.createFrameReader(InputInfo.BinaryBlockInputInfo);
FrameBlock frame2 = ((FrameReaderBinaryBlock) reader).readFirstBlock(output("B"));
// verify output schema
ValueType[] schemaExpected = (testname.equals(TEST_NAME2) || wildcard) ? Collections.nCopies(schema.length, ValueType.STRING).toArray(new ValueType[0]) : schema;
for (int i = 0; i < schemaExpected.length; i++) {
Assert.assertEquals("Wrong result: " + frame2.getSchema()[i] + ".", schemaExpected[i], frame2.getSchema()[i]);
}
} catch (Exception ex) {
ex.printStackTrace();
throw new RuntimeException(ex);
}
}
use of org.apache.sysml.runtime.io.FrameWriter in project incubator-systemml by apache.
the class TransformEncodeDecodeTest method runTransformEncodeDecodeTest.
private void runTransformEncodeDecodeTest(ExecType et, boolean sparse, String fmt) {
RUNTIME_PLATFORM platformOld = rtplatform;
// only CP supported
rtplatform = RUNTIME_PLATFORM.HYBRID;
try {
getAndLoadTestConfiguration(TEST_NAME1);
// get input/output info
InputInfo iinfo = InputInfo.stringExternalToInputInfo(fmt);
OutputInfo oinfo = InputInfo.getMatchingOutputInfo(iinfo);
// generate and write input data
double[][] A = TestUtils.round(getRandomMatrix(rows, cols, 1, 15, sparse ? sparsity2 : sparsity1, 7));
FrameBlock FA = DataConverter.convertToFrameBlock(DataConverter.convertToMatrixBlock(A));
FrameWriter writer = FrameWriterFactory.createFrameWriter(oinfo);
writer.writeFrameToHDFS(FA, input("F"), rows, cols);
fullDMLScriptName = SCRIPT_DIR + TEST_DIR + TEST_NAME1 + ".dml";
programArgs = new String[] { "-explain", "-args", input("F"), fmt, String.valueOf(rows), String.valueOf(cols), SCRIPT_DIR + TEST_DIR + SPEC, output("FO") };
// run test
runTest(true, false, null, -1);
// compare matrices (values recoded to identical codes)
FrameReader reader = FrameReaderFactory.createFrameReader(iinfo);
FrameBlock FO = reader.readFrameFromHDFS(output("FO"), 16, 2);
HashMap<String, Long> cFA = getCounts(FA, 1);
Iterator<String[]> iterFO = FO.getStringRowIterator();
while (iterFO.hasNext()) {
String[] row = iterFO.next();
Double expected = (double) cFA.get(row[1]);
Double val = (row[0] != null) ? Double.valueOf(row[0]) : 0;
Assert.assertEquals("Output aggregates don't match: " + expected + " vs " + val, expected, val);
}
} catch (Exception ex) {
ex.printStackTrace();
Assert.fail(ex.getMessage());
} finally {
rtplatform = platformOld;
}
}
Aggregations