use of org.apache.sysml.runtime.io.MatrixReader in project incubator-systemml by apache.
the class Matrix method getMatrixAsDoubleArray.
/**
* Method to get matrix as double array. This should only be used if the
* user knows the matrix fits in memory. We are using the dense
* representation.
*
* @return matrix as two-dimensional double array
* @throws DMLRuntimeException if DMLRuntimeException occurs
* @throws IOException if IOException occurs
*/
public double[][] getMatrixAsDoubleArray() throws DMLRuntimeException, IOException {
double[][] ret = null;
if (_mo != null) {
//CP ext function
MatrixBlock mb = _mo.acquireRead();
ret = DataConverter.convertToDoubleMatrix(mb);
_mo.release();
} else {
//traditional ext function (matrix file produced by reblock)
MatrixReader reader = MatrixReaderFactory.createMatrixReader(InputInfo.TextCellInputInfo);
MatrixBlock mb = reader.readMatrixFromHDFS(this.getFilePath(), _rows, _cols, -1, -1, -1);
ret = DataConverter.convertToDoubleMatrix(mb);
}
return ret;
}
use of org.apache.sysml.runtime.io.MatrixReader in project incubator-systemml by apache.
the class Connection method convertToMatrix.
/**
* Converts an input stream of a string matrix in csv or textcell format
* into a matrix block.
*
* @param input InputStream to a string matrix in csv or textcell format
* @param rows number of rows in the matrix
* @param cols number of columns in the matrix
* @param format input format of the given stream
* @return matrix as a matrix block
* @throws IOException if IOException occurs
*/
public MatrixBlock convertToMatrix(InputStream input, int rows, int cols, String format) throws IOException {
MatrixBlock ret = null;
//sanity check input format
if (!(DataExpression.FORMAT_TYPE_VALUE_TEXT.equals(format) || DataExpression.FORMAT_TYPE_VALUE_MATRIXMARKET.equals(format) || DataExpression.FORMAT_TYPE_VALUE_CSV.equals(format))) {
throw new IOException("Invalid input format (expected: csv, text or mm): " + format);
}
try {
//read input matrix
InputInfo iinfo = DataExpression.FORMAT_TYPE_VALUE_CSV.equals(format) ? InputInfo.CSVInputInfo : InputInfo.TextCellInputInfo;
MatrixReader reader = MatrixReaderFactory.createMatrixReader(iinfo);
int blksz = ConfigurationManager.getBlocksize();
ret = reader.readMatrixFromInputStream(input, rows, cols, blksz, blksz, (long) rows * cols);
} catch (DMLRuntimeException rex) {
throw new IOException(rex);
}
return ret;
}
use of org.apache.sysml.runtime.io.MatrixReader in project incubator-systemml by apache.
the class MapReduceTool method readMatrixFromHDFS.
public static double[][] readMatrixFromHDFS(String dir, InputInfo inputinfo, long rlen, long clen, int brlen, int bclen) throws IOException, DMLRuntimeException {
MatrixReader reader = MatrixReaderFactory.createMatrixReader(inputinfo);
MatrixBlock mb = reader.readMatrixFromHDFS(dir, rlen, clen, brlen, bclen, rlen * clen);
return DataConverter.convertToDoubleMatrix(mb);
}
use of org.apache.sysml.runtime.io.MatrixReader in project incubator-systemml by apache.
the class FrameMatrixCastingTest method readMatrixOrFrameInput.
/**
*
* @param fname
* @param rows
* @param cols
* @param dt
* @return
* @throws DMLRuntimeException
* @throws IOException
*/
private double[][] readMatrixOrFrameInput(String fname, int rows, int cols, DataType dt) throws DMLRuntimeException, IOException {
MatrixBlock ret = null;
//read input data
if (dt == DataType.FRAME) {
FrameReader reader = FrameReaderFactory.createFrameReader(InputInfo.BinaryBlockInputInfo);
FrameBlock fb = reader.readFrameFromHDFS(fname, rows, cols);
ret = DataConverter.convertToMatrixBlock(fb);
} else {
int blksize = ConfigurationManager.getBlocksize();
MatrixReader reader = MatrixReaderFactory.createMatrixReader(InputInfo.BinaryBlockInputInfo);
ret = reader.readMatrixFromHDFS(fname, rows, cols, blksize, blksize, -1);
}
return DataConverter.convertToDoubleMatrix(ret);
}
use of org.apache.sysml.runtime.io.MatrixReader in project incubator-systemml by apache.
the class FrameConverterTest method runMatrixConverterAndVerify.
/**
*
* @param schema
* @param A
* @param type
* @param iinfo
* @param oinfo
* @param instType
*/
private void runMatrixConverterAndVerify(ValueType[] schema, double[][] A, ConvType type, InputInfo iinfo, OutputInfo oinfo) throws IOException {
try {
MatrixCharacteristics mcMatrix = new MatrixCharacteristics(rows, schema.length, 1000, 1000, 0);
MatrixCharacteristics mcFrame = new MatrixCharacteristics(rows, schema.length, -1, -1, -1);
MatrixBlock matrixBlock1 = null;
FrameBlock frame1 = null;
if (type == ConvType.MAT2BIN) {
//initialize the matrix (dense) data.
matrixBlock1 = new MatrixBlock(rows, schema.length, false);
matrixBlock1.init(A, rows, schema.length);
//write matrix data to hdfs
MatrixWriter matWriter = MatrixWriterFactory.createMatrixWriter(oinfo);
matWriter.writeMatrixToHDFS(matrixBlock1, input("A"), rows, schema.length, mcMatrix.getRowsPerBlock(), mcMatrix.getColsPerBlock(), mcMatrix.getNonZeros());
} else {
//initialize the frame data.
frame1 = new FrameBlock(schema);
initFrameData(frame1, A, schema);
//write frame data to hdfs
FrameWriter writer = FrameWriterFactory.createFrameWriter(oinfo);
writer.writeFrameToHDFS(frame1, input("A"), rows, schema.length);
}
//run converter under test
runConverter(type, mcFrame, mcMatrix, Arrays.asList(schema), input("A"), output("B"));
if (type == ConvType.MAT2BIN) {
//read frame data from hdfs
FrameReader reader = FrameReaderFactory.createFrameReader(iinfo);
FrameBlock frame2 = reader.readFrameFromHDFS(output("B"), rows, schema.length);
//verify input and output frame/matrix
verifyFrameMatrixData(frame2, matrixBlock1);
} else {
//read matrix data from hdfs
MatrixReader matReader = MatrixReaderFactory.createMatrixReader(iinfo);
MatrixBlock matrixBlock2 = matReader.readMatrixFromHDFS(output("B"), rows, schema.length, mcMatrix.getRowsPerBlock(), mcMatrix.getColsPerBlock(), mcMatrix.getNonZeros());
//verify input and output frame/matrix
verifyFrameMatrixData(frame1, matrixBlock2);
}
} catch (Exception ex) {
ex.printStackTrace();
throw new RuntimeException(ex);
} finally {
MapReduceTool.deleteFileIfExistOnHDFS(input("A"));
MapReduceTool.deleteFileIfExistOnHDFS(output("B"));
}
}
Aggregations