use of org.apache.sysml.udf.Matrix in project incubator-systemml by apache.
the class GatherWrapper method execute.
@Override
public void execute() {
try {
// get input and meta information
Matrix inM = (Matrix) getFunctionInput(0);
MatrixObject mo = inM.getMatrixObject();
MatrixBlock mb = mo.acquireRead();
int len1 = mb.getNumRows();
int len2 = (int) mb.getNonZeros();
// create condensed position vector
double[][] outM = new double[len2][1];
int pos = 0;
for (int i = 0; i < len1; i++) {
double val = mb.quickGetValue(i, 0);
if (val != 0)
outM[pos++][0] = i + 1;
}
mo.release();
// create and copy output matrix
String dir = createOutputFilePathAndName(OUTPUT_FILE);
ret = new Matrix(dir, mb.getNonZeros(), 1, ValueType.Double);
ret.setMatrixDoubleArray(outM);
} catch (Exception e) {
throw new RuntimeException("Error executing external order function", e);
}
}
use of org.apache.sysml.udf.Matrix in project systemml by apache.
the class DynamicWriteMatrixCP method execute.
@Override
public void execute() {
boolean success = false;
try {
Matrix mat = (Matrix) this.getFunctionInput(0);
String fname = ((Scalar) this.getFunctionInput(1)).getValue();
String format = ((Scalar) this.getFunctionInput(2)).getValue();
MatrixObject mo = mat.getMatrixObject();
MatrixCharacteristics mc = mo.getMatrixCharacteristics();
OutputInfo oi = OutputInfo.stringToOutputInfo(format);
MatrixBlock mb = mo.acquireRead();
DataConverter.writeMatrixToHDFS(mb, fname, oi, mc);
mo.release();
success = true;
} catch (Exception e) {
throw new RuntimeException("Error executing dynamic write of matrix", e);
}
_success = new Scalar(ScalarValueType.Boolean, String.valueOf(success));
}
use of org.apache.sysml.udf.Matrix in project systemml by apache.
the class GatherWrapper method execute.
@Override
public void execute() {
try {
// get input and meta information
Matrix inM = (Matrix) getFunctionInput(0);
MatrixObject mo = inM.getMatrixObject();
MatrixBlock mb = mo.acquireRead();
int len1 = mb.getNumRows();
int len2 = (int) mb.getNonZeros();
// create condensed position vector
double[][] outM = new double[len2][1];
int pos = 0;
for (int i = 0; i < len1; i++) {
double val = mb.quickGetValue(i, 0);
if (val != 0)
outM[pos++][0] = i + 1;
}
mo.release();
// create and copy output matrix
String dir = createOutputFilePathAndName(OUTPUT_FILE);
ret = new Matrix(dir, mb.getNonZeros(), 1, ValueType.Double);
ret.setMatrixDoubleArray(outM);
} catch (Exception e) {
throw new RuntimeException("Error executing external order function", e);
}
}
use of org.apache.sysml.udf.Matrix in project systemml by apache.
the class OrderWrapper method execute.
@Override
public void execute() {
try {
Matrix inM = (Matrix) getFunctionInput(0);
double[][] inData = inM.getMatrixAsDoubleArray();
int col = Integer.parseInt(((Scalar) getFunctionInput(1)).getValue());
boolean desc = Boolean.parseBoolean(((Scalar) getFunctionInput(2)).getValue());
// sort input matrix (in-place)
if (// asc
!desc)
Arrays.sort(inData, new AscRowComparator(col - 1));
else
// desc
Arrays.sort(inData, new DescRowComparator(col - 1));
// create and copy output matrix
String dir = createOutputFilePathAndName(OUTPUT_FILE);
ret = new Matrix(dir, inM.getNumRows(), inM.getNumCols(), ValueType.Double);
ret.setMatrixDoubleArray(inData);
} catch (Exception e) {
throw new RuntimeException("Error executing external order function", e);
}
}
use of org.apache.sysml.udf.Matrix in project systemml by apache.
the class RemoveEmptyRows method execute.
@Override
public void execute() {
Matrix mat = (Matrix) this.getFunctionInput(0);
String fnameOld = mat.getFilePath();
// old,new rowID
HashMap<Long, Long> keyMap = new HashMap<>();
try {
// prepare input
JobConf job = new JobConf(ConfigurationManager.getCachedJobConf());
Path path = new Path(fnameOld);
FileSystem fs = IOUtilFunctions.getFileSystem(path, job);
if (!fs.exists(path))
throw new IOException("File " + fnameOld + " does not exist on HDFS.");
FileInputFormat.addInputPath(job, path);
TextInputFormat informat = new TextInputFormat();
informat.configure(job);
// prepare output
String fnameNew = createOutputFilePathAndName(OUTPUT_FILE);
DataOutputStream ostream = MapReduceTool.getHDFSDataOutputStream(fnameNew, true);
// read and write if necessary
InputSplit[] splits = informat.getSplits(job, 1);
LongWritable key = new LongWritable();
Text value = new Text();
long ID = 1;
try {
// for obj reuse and preventing repeated buffer re-allocations
StringBuilder sb = new StringBuilder();
for (InputSplit split : splits) {
RecordReader<LongWritable, Text> reader = informat.getRecordReader(split, job, Reporter.NULL);
try {
while (reader.next(key, value)) {
String cellStr = value.toString().trim();
StringTokenizer st = new StringTokenizer(cellStr, " ");
long row = Integer.parseInt(st.nextToken());
long col = Integer.parseInt(st.nextToken());
double lvalue = Double.parseDouble(st.nextToken());
if (!keyMap.containsKey(row))
keyMap.put(row, ID++);
long rowNew = keyMap.get(row);
sb.append(rowNew);
sb.append(' ');
sb.append(col);
sb.append(' ');
sb.append(lvalue);
sb.append('\n');
ostream.writeBytes(sb.toString());
sb.setLength(0);
}
} finally {
if (reader != null)
reader.close();
}
}
_ret = new Matrix(fnameNew, keyMap.size(), mat.getNumCols(), ValueType.Double);
} finally {
if (ostream != null)
ostream.close();
}
} catch (Exception ex) {
throw new RuntimeException("Unable to execute external function.", ex);
}
}
Aggregations