use of org.apache.sysml.runtime.matrix.data.MatrixCell in project systemml by apache.
the class WriterBinaryCell method writeEmptyMatrixToHDFS.
@Override
@SuppressWarnings("deprecation")
public void writeEmptyMatrixToHDFS(String fname, long rlen, long clen, int brlen, int bclen) throws IOException, DMLRuntimeException {
JobConf job = new JobConf(ConfigurationManager.getCachedJobConf());
Path path = new Path(fname);
FileSystem fs = IOUtilFunctions.getFileSystem(path, job);
SequenceFile.Writer writer = null;
try {
writer = new SequenceFile.Writer(fs, job, path, MatrixIndexes.class, MatrixCell.class);
MatrixIndexes index = new MatrixIndexes(1, 1);
MatrixCell cell = new MatrixCell(0);
writer.append(index, cell);
} finally {
IOUtilFunctions.closeSilently(writer);
}
IOUtilFunctions.deleteCrcFilesFromLocalFileSystem(fs, path);
}
use of org.apache.sysml.runtime.matrix.data.MatrixCell in project systemml by apache.
the class WriterBinaryCell method writeBinaryCellMatrixToHDFS.
@SuppressWarnings("deprecation")
protected void writeBinaryCellMatrixToHDFS(Path path, JobConf job, MatrixBlock src, long rlen, long clen, int brlen, int bclen) throws IOException {
boolean sparse = src.isInSparseFormat();
boolean entriesWritten = false;
FileSystem fs = IOUtilFunctions.getFileSystem(path, job);
SequenceFile.Writer writer = new SequenceFile.Writer(fs, job, path, MatrixIndexes.class, MatrixCell.class);
MatrixIndexes indexes = new MatrixIndexes();
MatrixCell cell = new MatrixCell();
int rows = src.getNumRows();
int cols = src.getNumColumns();
try {
// bound check per block
if (rows > rlen || cols > clen) {
throw new IOException("Matrix block [1:" + rows + ",1:" + cols + "] " + "out of overall matrix range [1:" + rlen + ",1:" + clen + "].");
}
if (// SPARSE
sparse) {
Iterator<IJV> iter = src.getSparseBlockIterator();
while (iter.hasNext()) {
IJV lcell = iter.next();
indexes.setIndexes(lcell.getI() + 1, lcell.getJ() + 1);
cell.setValue(lcell.getV());
writer.append(indexes, cell);
entriesWritten = true;
}
} else // DENSE
{
for (int i = 0; i < rows; i++) for (int j = 0; j < cols; j++) {
double lvalue = src.getValueDenseUnsafe(i, j);
if (// for nnz
lvalue != 0) {
indexes.setIndexes(i + 1, j + 1);
cell.setValue(lvalue);
writer.append(indexes, cell);
entriesWritten = true;
}
}
}
// handle empty result
if (!entriesWritten) {
writer.append(new MatrixIndexes(1, 1), new MatrixCell(0));
}
} finally {
IOUtilFunctions.closeSilently(writer);
}
}
use of org.apache.sysml.runtime.matrix.data.MatrixCell in project systemml by apache.
the class ReaderBinaryCell method readBinaryCellMatrixFromHDFS.
@SuppressWarnings("deprecation")
private static void readBinaryCellMatrixFromHDFS(Path path, JobConf job, FileSystem fs, MatrixBlock dest, long rlen, long clen, int brlen, int bclen) throws IOException {
boolean sparse = dest.isInSparseFormat();
MatrixIndexes key = new MatrixIndexes();
MatrixCell value = new MatrixCell();
int row = -1;
int col = -1;
try {
for (// 1..N files
Path lpath : // 1..N files
IOUtilFunctions.getSequenceFilePaths(fs, path)) {
// directly read from sequence files (individual partfiles)
SequenceFile.Reader reader = new SequenceFile.Reader(fs, lpath, job);
try {
if (sparse) {
while (reader.next(key, value)) {
row = (int) key.getRowIndex() - 1;
col = (int) key.getColumnIndex() - 1;
double lvalue = value.getValue();
dest.appendValue(row, col, lvalue);
}
} else {
while (reader.next(key, value)) {
row = (int) key.getRowIndex() - 1;
col = (int) key.getColumnIndex() - 1;
double lvalue = value.getValue();
dest.appendValue(row, col, lvalue);
}
}
} finally {
IOUtilFunctions.closeSilently(reader);
}
}
if (sparse)
dest.sortSparseRows();
} catch (Exception ex) {
// post-mortem error handling and bounds checking
if (row < 0 || row + 1 > rlen || col < 0 || col + 1 > clen) {
throw new IOException("Matrix cell [" + (row + 1) + "," + (col + 1) + "] " + "out of overall matrix range [1:" + rlen + ",1:" + clen + "].");
} else {
throw new IOException("Unable to read matrix in binary cell format.", ex);
}
}
}
use of org.apache.sysml.runtime.matrix.data.MatrixCell in project incubator-systemml by apache.
the class SparkExecutionContext method toMatrixBlock.
/**
* Utility method for creating a single matrix block out of a binary cell RDD.
* Note that this collect call might trigger execution of any pending transformations.
*
* @param rdd JavaPairRDD for matrix block
* @param rlen number of rows
* @param clen number of columns
* @param nnz number of non-zeros
* @return matrix block
*/
public static MatrixBlock toMatrixBlock(JavaPairRDD<MatrixIndexes, MatrixCell> rdd, int rlen, int clen, long nnz) {
long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
MatrixBlock out = null;
// determine target sparse/dense representation
long lnnz = (nnz >= 0) ? nnz : (long) rlen * clen;
boolean sparse = MatrixBlock.evalSparseFormatInMemory(rlen, clen, lnnz);
// create output matrix block (w/ lazy allocation)
out = new MatrixBlock(rlen, clen, sparse);
List<Tuple2<MatrixIndexes, MatrixCell>> list = rdd.collect();
// copy blocks one-at-a-time into output matrix block
for (Tuple2<MatrixIndexes, MatrixCell> keyval : list) {
// unpack index-block pair
MatrixIndexes ix = keyval._1();
MatrixCell cell = keyval._2();
// append cell to dense/sparse target in order to avoid shifting for sparse
// note: this append requires a final sort of sparse rows
out.appendValue((int) ix.getRowIndex() - 1, (int) ix.getColumnIndex() - 1, cell.getValue());
}
// post-processing output matrix
if (sparse)
out.sortSparseRows();
out.recomputeNonZeros();
out.examSparsity();
if (DMLScript.STATISTICS) {
Statistics.accSparkCollectTime(System.nanoTime() - t0);
Statistics.incSparkCollectCount(1);
}
return out;
}
use of org.apache.sysml.runtime.matrix.data.MatrixCell in project incubator-systemml by apache.
the class DataPartitionerLocal method partitionBinaryCell.
@SuppressWarnings("deprecation")
private void partitionBinaryCell(String fname, String fnameStaging, String fnameNew, long rlen, long clen, int brlen, int bclen) {
long row = -1;
long col = -1;
try {
// STEP 1: read matrix from HDFS and write blocks to local staging area
// check and add input path
JobConf job = new JobConf(ConfigurationManager.getCachedJobConf());
Path path = new Path(fname);
FileSystem fs = IOUtilFunctions.getFileSystem(path, job);
// prepare sequence file reader, and write to local staging area
LinkedList<Cell> buffer = new LinkedList<>();
MatrixIndexes key = new MatrixIndexes();
MatrixCell value = new MatrixCell();
for (Path lpath : IOUtilFunctions.getSequenceFilePaths(fs, path)) {
SequenceFile.Reader reader = new SequenceFile.Reader(fs, lpath, job);
try {
while (reader.next(key, value)) {
row = key.getRowIndex();
col = key.getColumnIndex();
Cell tmp = new Cell(row, col, value.getValue());
buffer.addLast(tmp);
if (// periodic flush
buffer.size() > StagingFileUtils.CELL_BUFFER_SIZE) {
appendCellBufferToStagingArea(fnameStaging, buffer, brlen, bclen);
buffer.clear();
}
}
// final flush
if (!buffer.isEmpty()) {
appendCellBufferToStagingArea(fnameStaging, buffer, brlen, bclen);
buffer.clear();
}
} finally {
IOUtilFunctions.closeSilently(reader);
}
}
// STEP 2: read matrix blocks from staging area and write matrix to HDFS
String[] fnamesPartitions = new File(fnameStaging).list();
if (PARALLEL) {
int len = Math.min(fnamesPartitions.length, _par);
Thread[] threads = new Thread[len];
for (int i = 0; i < len; i++) {
int start = i * (int) Math.ceil(((double) fnamesPartitions.length) / len);
int end = (i + 1) * (int) Math.ceil(((double) fnamesPartitions.length) / len) - 1;
end = Math.min(end, fnamesPartitions.length - 1);
threads[i] = new Thread(new DataPartitionerWorkerBinaryCell(job, fnameNew, fnameStaging, fnamesPartitions, start, end));
threads[i].start();
}
for (Thread t : threads) t.join();
} else {
for (String pdir : fnamesPartitions) writeBinaryCellSequenceFileToHDFS(job, fnameNew, fnameStaging + "/" + pdir);
}
} catch (Exception e) {
// post-mortem error handling and bounds checking
if (row < 1 || row > rlen || col < 1 || col > clen) {
throw new DMLRuntimeException("Matrix cell [" + (row) + "," + (col) + "] " + "out of overall matrix range [1:" + rlen + ",1:" + clen + "].");
} else
throw new DMLRuntimeException("Unable to partition binary cell matrix.", e);
}
}
Aggregations