use of org.apache.sysml.runtime.matrix.MatrixCharacteristics in project incubator-systemml by apache.
the class ResultMergeLocalFile method createBinaryBlockResultFile.
@SuppressWarnings("deprecation")
private void createBinaryBlockResultFile(String fnameStaging, String fnameStagingCompare, String fnameNew, MetaDataFormat metadata, boolean withCompare) throws IOException, DMLRuntimeException {
JobConf job = new JobConf(ConfigurationManager.getCachedJobConf());
Path path = new Path(fnameNew);
FileSystem fs = IOUtilFunctions.getFileSystem(path, job);
MatrixCharacteristics mc = metadata.getMatrixCharacteristics();
long rlen = mc.getRows();
long clen = mc.getCols();
int brlen = mc.getRowsPerBlock();
int bclen = mc.getColsPerBlock();
// beware ca 50ms
SequenceFile.Writer writer = new SequenceFile.Writer(fs, job, path, MatrixIndexes.class, MatrixBlock.class);
try {
MatrixIndexes indexes = new MatrixIndexes();
for (long brow = 1; brow <= (long) Math.ceil(rlen / (double) brlen); brow++) for (long bcol = 1; bcol <= (long) Math.ceil(clen / (double) bclen); bcol++) {
File dir = new File(fnameStaging + "/" + brow + "_" + bcol);
File dir2 = new File(fnameStagingCompare + "/" + brow + "_" + bcol);
MatrixBlock mb = null;
if (dir.exists()) {
if (// WITH COMPARE BLOCK
withCompare && dir2.exists()) {
// copy only values that are different from the original
String[] lnames2 = dir2.list();
if (// there should be exactly 1 compare block
lnames2.length != 1)
throw new DMLRuntimeException("Unable to merge results because multiple compare blocks found.");
mb = LocalFileUtils.readMatrixBlockFromLocal(dir2 + "/" + lnames2[0]);
boolean appendOnly = mb.isInSparseFormat();
DenseBlock compare = DataConverter.convertToDenseBlock(mb, false);
for (String lname : dir.list()) {
MatrixBlock tmp = LocalFileUtils.readMatrixBlockFromLocal(dir + "/" + lname);
mergeWithComp(mb, tmp, compare);
}
// sort sparse due to append-only
if (appendOnly && !_isAccum)
mb.sortSparseRows();
// change sparsity if required after
mb.examSparsity();
} else // WITHOUT COMPARE BLOCK
{
// copy all non-zeros from all workers
boolean appendOnly = false;
for (String lname : dir.list()) {
if (mb == null) {
mb = LocalFileUtils.readMatrixBlockFromLocal(dir + "/" + lname);
appendOnly = mb.isInSparseFormat();
} else {
MatrixBlock tmp = LocalFileUtils.readMatrixBlockFromLocal(dir + "/" + lname);
mergeWithoutComp(mb, tmp, appendOnly);
}
}
// sort sparse due to append-only
if (appendOnly && !_isAccum)
mb.sortSparseRows();
// change sparsity if required after
mb.examSparsity();
}
} else {
// NOTE: whenever runtime does not need all blocks anymore, this can be removed
int maxRow = (int) (((brow - 1) * brlen + brlen < rlen) ? brlen : rlen - (brow - 1) * brlen);
int maxCol = (int) (((bcol - 1) * bclen + bclen < clen) ? bclen : clen - (bcol - 1) * bclen);
mb = new MatrixBlock(maxRow, maxCol, true);
}
// mb.examSparsity(); //done on write anyway and mb not reused
indexes.setIndexes(brow, bcol);
writer.append(indexes, mb);
}
} finally {
IOUtilFunctions.closeSilently(writer);
}
}
use of org.apache.sysml.runtime.matrix.MatrixCharacteristics in project incubator-systemml by apache.
the class ResultMergeLocalFile method createBinaryCellResultFile.
@SuppressWarnings("deprecation")
private void createBinaryCellResultFile(String fnameStaging, String fnameStagingCompare, String fnameNew, MetaDataFormat metadata, boolean withCompare) throws IOException, DMLRuntimeException {
JobConf job = new JobConf(ConfigurationManager.getCachedJobConf());
Path path = new Path(fnameNew);
FileSystem fs = IOUtilFunctions.getFileSystem(path, job);
MatrixCharacteristics mc = metadata.getMatrixCharacteristics();
long rlen = mc.getRows();
long clen = mc.getCols();
int brlen = mc.getRowsPerBlock();
int bclen = mc.getColsPerBlock();
MatrixIndexes indexes = new MatrixIndexes(1, 1);
MatrixCell cell = new MatrixCell(0);
// beware ca 50ms
SequenceFile.Writer out = new SequenceFile.Writer(fs, job, path, MatrixIndexes.class, MatrixCell.class);
try {
boolean written = false;
for (long brow = 1; brow <= (long) Math.ceil(rlen / (double) brlen); brow++) for (long bcol = 1; bcol <= (long) Math.ceil(clen / (double) bclen); bcol++) {
File dir = new File(fnameStaging + "/" + brow + "_" + bcol);
File dir2 = new File(fnameStagingCompare + "/" + brow + "_" + bcol);
MatrixBlock mb = null;
long row_offset = (brow - 1) * brlen + 1;
long col_offset = (bcol - 1) * bclen + 1;
if (dir.exists()) {
if (// WITH COMPARE BLOCK
withCompare && dir2.exists()) {
// copy only values that are different from the original
String[] lnames2 = dir2.list();
if (// there should be exactly 1 compare block
lnames2.length != 1)
throw new DMLRuntimeException("Unable to merge results because multiple compare blocks found.");
mb = StagingFileUtils.readCellList2BlockFromLocal(dir2 + "/" + lnames2[0], brlen, bclen);
boolean appendOnly = mb.isInSparseFormat();
DenseBlock compare = DataConverter.convertToDenseBlock(mb, false);
for (String lname : dir.list()) {
MatrixBlock tmp = StagingFileUtils.readCellList2BlockFromLocal(dir + "/" + lname, brlen, bclen);
mergeWithComp(mb, tmp, compare);
}
// sort sparse due to append-only
if (appendOnly && !_isAccum)
mb.sortSparseRows();
// change sparsity if required after
mb.examSparsity();
} else // WITHOUT COMPARE BLOCK
{
// copy all non-zeros from all workers
boolean appendOnly = false;
for (String lname : dir.list()) {
if (mb == null) {
mb = StagingFileUtils.readCellList2BlockFromLocal(dir + "/" + lname, brlen, bclen);
appendOnly = mb.isInSparseFormat();
} else {
MatrixBlock tmp = StagingFileUtils.readCellList2BlockFromLocal(dir + "/" + lname, brlen, bclen);
mergeWithoutComp(mb, tmp, appendOnly);
}
}
// sort sparse due to append-only
if (appendOnly && !_isAccum)
mb.sortSparseRows();
// change sparsity if required after
mb.examSparsity();
}
}
// write the block to binary cell
if (mb != null) {
if (mb.isInSparseFormat()) {
Iterator<IJV> iter = mb.getSparseBlockIterator();
while (iter.hasNext()) {
IJV lcell = iter.next();
indexes.setIndexes(row_offset + lcell.getI(), col_offset + lcell.getJ());
cell.setValue(lcell.getV());
out.append(indexes, cell);
written = true;
}
} else {
for (int i = 0; i < brlen; i++) for (int j = 0; j < bclen; j++) {
double lvalue = mb.getValueDenseUnsafe(i, j);
if (// for nnz
lvalue != 0) {
indexes.setIndexes(row_offset + i, col_offset + j);
cell.setValue(lvalue);
out.append(indexes, cell);
written = true;
}
}
}
}
}
if (!written)
out.append(indexes, cell);
} finally {
IOUtilFunctions.closeSilently(out);
}
}
use of org.apache.sysml.runtime.matrix.MatrixCharacteristics in project incubator-systemml by apache.
the class ResultMergeRemoteSpark method executeParallelMerge.
@Override
public MatrixObject executeParallelMerge(int par) {
// always create new matrix object (required for nested parallelism)
MatrixObject moNew = null;
if (LOG.isTraceEnabled())
LOG.trace("ResultMerge (remote, spark): Execute serial merge for output " + _output.hashCode() + " (fname=" + _output.getFileName() + ")");
try {
if (_inputs != null && _inputs.length > 0) {
// prepare compare
MetaDataFormat metadata = (MetaDataFormat) _output.getMetaData();
MatrixCharacteristics mcOld = metadata.getMatrixCharacteristics();
MatrixObject compare = (mcOld.getNonZeros() == 0) ? null : _output;
// actual merge
RDDObject ro = executeMerge(compare, _inputs, mcOld.getRows(), mcOld.getCols(), mcOld.getRowsPerBlock(), mcOld.getColsPerBlock());
// create new output matrix (e.g., to prevent potential export<->read file access conflict
moNew = new MatrixObject(_output.getValueType(), _outputFName);
OutputInfo oiOld = metadata.getOutputInfo();
InputInfo iiOld = metadata.getInputInfo();
MatrixCharacteristics mc = new MatrixCharacteristics(mcOld);
mc.setNonZeros(_isAccum ? -1 : computeNonZeros(_output, Arrays.asList(_inputs)));
MetaDataFormat meta = new MetaDataFormat(mc, oiOld, iiOld);
moNew.setMetaData(meta);
moNew.setRDDHandle(ro);
} else {
// return old matrix, to prevent copy
moNew = _output;
}
} catch (Exception ex) {
throw new DMLRuntimeException(ex);
}
return moNew;
}
use of org.apache.sysml.runtime.matrix.MatrixCharacteristics in project incubator-systemml by apache.
the class VariableCPInstruction method writeCSVFile.
/**
* Helper function to write CSV files to HDFS.
*
* @param ec execution context
* @param fname file name
*/
private void writeCSVFile(ExecutionContext ec, String fname) {
MatrixObject mo = ec.getMatrixObject(getInput1().getName());
String outFmt = "csv";
if (mo.isDirty()) {
// there exist data computed in CP that is not backed up on HDFS
// i.e., it is either in-memory or in evicted space
mo.exportData(fname, outFmt, _formatProperties);
} else {
try {
OutputInfo oi = ((MetaDataFormat) mo.getMetaData()).getOutputInfo();
MatrixCharacteristics mc = ((MetaDataFormat) mo.getMetaData()).getMatrixCharacteristics();
if (oi == OutputInfo.CSVOutputInfo) {
WriterTextCSV writer = new WriterTextCSV((CSVFileFormatProperties) _formatProperties);
writer.addHeaderToCSV(mo.getFileName(), fname, mc.getRows(), mc.getCols());
} else if (oi == OutputInfo.BinaryBlockOutputInfo || oi == OutputInfo.TextCellOutputInfo) {
mo.exportData(fname, outFmt, _formatProperties);
} else {
throw new DMLRuntimeException("Unexpected data format (" + OutputInfo.outputInfoToString(oi) + "): can not export into CSV format.");
}
// Write Metadata file
MapReduceTool.writeMetaDataFile(fname + ".mtd", mo.getValueType(), mc, OutputInfo.CSVOutputInfo, _formatProperties);
} catch (IOException e) {
throw new DMLRuntimeException(e);
}
}
}
use of org.apache.sysml.runtime.matrix.MatrixCharacteristics in project incubator-systemml by apache.
the class VariableCPInstruction method writeMMFile.
/**
* Helper function to write MM files to HDFS.
*
* @param ec execution context
* @param fname file name
*/
private void writeMMFile(ExecutionContext ec, String fname) {
MatrixObject mo = ec.getMatrixObject(getInput1().getName());
String outFmt = "matrixmarket";
if (mo.isDirty()) {
// there exist data computed in CP that is not backed up on HDFS
// i.e., it is either in-memory or in evicted space
mo.exportData(fname, outFmt);
} else {
OutputInfo oi = ((MetaDataFormat) mo.getMetaData()).getOutputInfo();
MatrixCharacteristics mc = mo.getMatrixCharacteristics();
if (oi == OutputInfo.TextCellOutputInfo) {
try {
WriterMatrixMarket.mergeTextcellToMatrixMarket(mo.getFileName(), fname, mc.getRows(), mc.getCols(), mc.getNonZeros());
} catch (IOException e) {
throw new DMLRuntimeException(e);
}
} else if (oi == OutputInfo.BinaryBlockOutputInfo) {
mo.exportData(fname, outFmt);
} else {
throw new DMLRuntimeException("Unexpected data format (" + OutputInfo.outputInfoToString(oi) + "): can not export into MatrixMarket format.");
}
}
}
Aggregations