Search in sources :

Example 46 with DenseBlock

use of org.apache.sysml.runtime.matrix.data.DenseBlock in project systemml by apache.

the class LinearAlgebraUtils method copyNonZerosToUpperTriangle.

public static void copyNonZerosToUpperTriangle(MatrixBlock ret, MatrixBlock tmp, int ix) {
    double[] a = tmp.getDenseBlockValues();
    DenseBlock c = ret.getDenseBlock();
    for (int i = 0; i < tmp.getNumColumns(); i++) if (a[i] != 0)
        c.set((ix < i) ? ix : i, (ix < i) ? i : ix, a[i]);
}
Also used : DenseBlock(org.apache.sysml.runtime.matrix.data.DenseBlock)

Example 47 with DenseBlock

use of org.apache.sysml.runtime.matrix.data.DenseBlock in project systemml by apache.

the class ResultMergeLocalFile method createBinaryBlockResultFile.

@SuppressWarnings("deprecation")
private void createBinaryBlockResultFile(String fnameStaging, String fnameStagingCompare, String fnameNew, MetaDataFormat metadata, boolean withCompare) throws IOException, DMLRuntimeException {
    JobConf job = new JobConf(ConfigurationManager.getCachedJobConf());
    Path path = new Path(fnameNew);
    FileSystem fs = IOUtilFunctions.getFileSystem(path, job);
    MatrixCharacteristics mc = metadata.getMatrixCharacteristics();
    long rlen = mc.getRows();
    long clen = mc.getCols();
    int brlen = mc.getRowsPerBlock();
    int bclen = mc.getColsPerBlock();
    // beware ca 50ms
    SequenceFile.Writer writer = new SequenceFile.Writer(fs, job, path, MatrixIndexes.class, MatrixBlock.class);
    try {
        MatrixIndexes indexes = new MatrixIndexes();
        for (long brow = 1; brow <= (long) Math.ceil(rlen / (double) brlen); brow++) for (long bcol = 1; bcol <= (long) Math.ceil(clen / (double) bclen); bcol++) {
            File dir = new File(fnameStaging + "/" + brow + "_" + bcol);
            File dir2 = new File(fnameStagingCompare + "/" + brow + "_" + bcol);
            MatrixBlock mb = null;
            if (dir.exists()) {
                if (// WITH COMPARE BLOCK
                withCompare && dir2.exists()) {
                    // copy only values that are different from the original
                    String[] lnames2 = dir2.list();
                    if (// there should be exactly 1 compare block
                    lnames2.length != 1)
                        throw new DMLRuntimeException("Unable to merge results because multiple compare blocks found.");
                    mb = LocalFileUtils.readMatrixBlockFromLocal(dir2 + "/" + lnames2[0]);
                    boolean appendOnly = mb.isInSparseFormat();
                    DenseBlock compare = DataConverter.convertToDenseBlock(mb, false);
                    for (String lname : dir.list()) {
                        MatrixBlock tmp = LocalFileUtils.readMatrixBlockFromLocal(dir + "/" + lname);
                        mergeWithComp(mb, tmp, compare);
                    }
                    // sort sparse due to append-only
                    if (appendOnly && !_isAccum)
                        mb.sortSparseRows();
                    // change sparsity if required after
                    mb.examSparsity();
                } else // WITHOUT COMPARE BLOCK
                {
                    // copy all non-zeros from all workers
                    boolean appendOnly = false;
                    for (String lname : dir.list()) {
                        if (mb == null) {
                            mb = LocalFileUtils.readMatrixBlockFromLocal(dir + "/" + lname);
                            appendOnly = mb.isInSparseFormat();
                        } else {
                            MatrixBlock tmp = LocalFileUtils.readMatrixBlockFromLocal(dir + "/" + lname);
                            mergeWithoutComp(mb, tmp, appendOnly);
                        }
                    }
                    // sort sparse due to append-only
                    if (appendOnly && !_isAccum)
                        mb.sortSparseRows();
                    // change sparsity if required after
                    mb.examSparsity();
                }
            } else {
                // NOTE: whenever runtime does not need all blocks anymore, this can be removed
                int maxRow = (int) (((brow - 1) * brlen + brlen < rlen) ? brlen : rlen - (brow - 1) * brlen);
                int maxCol = (int) (((bcol - 1) * bclen + bclen < clen) ? bclen : clen - (bcol - 1) * bclen);
                mb = new MatrixBlock(maxRow, maxCol, true);
            }
            // mb.examSparsity(); //done on write anyway and mb not reused
            indexes.setIndexes(brow, bcol);
            writer.append(indexes, mb);
        }
    } finally {
        IOUtilFunctions.closeSilently(writer);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) MatrixBlock(org.apache.sysml.runtime.matrix.data.MatrixBlock) MatrixIndexes(org.apache.sysml.runtime.matrix.data.MatrixIndexes) MatrixCharacteristics(org.apache.sysml.runtime.matrix.MatrixCharacteristics) DMLRuntimeException(org.apache.sysml.runtime.DMLRuntimeException) DenseBlock(org.apache.sysml.runtime.matrix.data.DenseBlock) SequenceFile(org.apache.hadoop.io.SequenceFile) FileSystem(org.apache.hadoop.fs.FileSystem) JobConf(org.apache.hadoop.mapred.JobConf) SequenceFile(org.apache.hadoop.io.SequenceFile) File(java.io.File) OutputStreamWriter(java.io.OutputStreamWriter) BufferedWriter(java.io.BufferedWriter)

Example 48 with DenseBlock

use of org.apache.sysml.runtime.matrix.data.DenseBlock in project systemml by apache.

the class ReaderTextCell method readTextCellMatrixFromHDFS.

private static void readTextCellMatrixFromHDFS(Path path, JobConf job, MatrixBlock dest, long rlen, long clen, int brlen, int bclen) throws IOException {
    boolean sparse = dest.isInSparseFormat();
    FileInputFormat.addInputPath(job, path);
    TextInputFormat informat = new TextInputFormat();
    informat.configure(job);
    InputSplit[] splits = informat.getSplits(job, 1);
    LongWritable key = new LongWritable();
    Text value = new Text();
    int row = -1;
    int col = -1;
    try {
        FastStringTokenizer st = new FastStringTokenizer(' ');
        for (InputSplit split : splits) {
            RecordReader<LongWritable, Text> reader = informat.getRecordReader(split, job, Reporter.NULL);
            try {
                if (// SPARSE<-value
                sparse) {
                    while (reader.next(key, value)) {
                        // reinit tokenizer
                        st.reset(value.toString());
                        row = st.nextInt() - 1;
                        col = st.nextInt() - 1;
                        if (row == -1 || col == -1)
                            continue;
                        double lvalue = st.nextDouble();
                        dest.appendValue(row, col, lvalue);
                    }
                    dest.sortSparseRows();
                } else // DENSE<-value
                {
                    DenseBlock a = dest.getDenseBlock();
                    while (reader.next(key, value)) {
                        // reinit tokenizer
                        st.reset(value.toString());
                        row = st.nextInt() - 1;
                        col = st.nextInt() - 1;
                        if (row == -1 || col == -1)
                            continue;
                        double lvalue = st.nextDouble();
                        a.set(row, col, lvalue);
                    }
                }
            } finally {
                IOUtilFunctions.closeSilently(reader);
            }
        }
    } catch (Exception ex) {
        // post-mortem error handling and bounds checking
        if (row < 0 || row + 1 > rlen || col < 0 || col + 1 > clen)
            throw new IOException("Matrix cell [" + (row + 1) + "," + (col + 1) + "] " + "out of overall matrix range [1:" + rlen + ",1:" + clen + "].");
        else
            throw new IOException("Unable to read matrix in text cell format.", ex);
    }
}
Also used : Text(org.apache.hadoop.io.Text) IOException(java.io.IOException) DMLRuntimeException(org.apache.sysml.runtime.DMLRuntimeException) IOException(java.io.IOException) DenseBlock(org.apache.sysml.runtime.matrix.data.DenseBlock) FastStringTokenizer(org.apache.sysml.runtime.util.FastStringTokenizer) TextInputFormat(org.apache.hadoop.mapred.TextInputFormat) LongWritable(org.apache.hadoop.io.LongWritable) InputSplit(org.apache.hadoop.mapred.InputSplit)

Aggregations

DenseBlock (org.apache.sysml.runtime.matrix.data.DenseBlock)48 DMLRuntimeException (org.apache.sysml.runtime.DMLRuntimeException)22 MatrixBlock (org.apache.sysml.runtime.matrix.data.MatrixBlock)20 CompressedMatrixBlock (org.apache.sysml.runtime.compress.CompressedMatrixBlock)10 KahanPlus (org.apache.sysml.runtime.functionobjects.KahanPlus)10 KahanObject (org.apache.sysml.runtime.instructions.cp.KahanObject)10 BufferedReader (java.io.BufferedReader)6 BufferedWriter (java.io.BufferedWriter)6 File (java.io.File)6 InputStreamReader (java.io.InputStreamReader)6 OutputStreamWriter (java.io.OutputStreamWriter)6 ArrayList (java.util.ArrayList)6 ExecutorService (java.util.concurrent.ExecutorService)6 Future (java.util.concurrent.Future)6 FileSystem (org.apache.hadoop.fs.FileSystem)6 Path (org.apache.hadoop.fs.Path)6 SequenceFile (org.apache.hadoop.io.SequenceFile)6 JobConf (org.apache.hadoop.mapred.JobConf)6 MatrixCharacteristics (org.apache.sysml.runtime.matrix.MatrixCharacteristics)6 IJV (org.apache.sysml.runtime.matrix.data.IJV)6