use of org.apache.sysml.udf.Scalar in project incubator-systemml by apache.
the class DynamicReadMatrixRcCP method execute.
@Override
public void execute() {
try {
String fname = ((Scalar) this.getFunctionInput(0)).getValue();
Integer m = Integer.parseInt(((Scalar) this.getFunctionInput(1)).getValue());
Integer n = Integer.parseInt(((Scalar) this.getFunctionInput(2)).getValue());
String format = ((Scalar) this.getFunctionInput(3)).getValue();
InputInfo ii = InputInfo.stringToInputInfo(format);
OutputInfo oi = OutputInfo.BinaryBlockOutputInfo;
String fnameTmp = createOutputFilePathAndName("TMP");
_ret = new Matrix(fnameTmp, m, n, ValueType.Double);
MatrixBlock mbTmp = DataConverter.readMatrixFromHDFS(fname, ii, m, n, ConfigurationManager.getBlocksize(), ConfigurationManager.getBlocksize());
_ret.setMatrixDoubleArray(mbTmp, oi, ii);
_rc = new Scalar(ScalarValueType.Integer, "0");
// NOTE: The packagesupport wrapper creates a new MatrixObjectNew with the given
// matrix block. This leads to a dirty state of the new object. Hence, the resulting
// intermediate plan variable will be exported in front of MR jobs and during this export
// the format will be changed to binary block (the contract of external functions),
// no matter in which format the original matrix was.
} catch (Exception e) {
_rc = new Scalar(ScalarValueType.Integer, "1");
// throw new PackageRuntimeException("Error executing dynamic read of matrix",e);
}
}
use of org.apache.sysml.udf.Scalar in project incubator-systemml by apache.
the class DynamicWriteMatrixCP method execute.
@Override
public void execute() {
boolean success = false;
try {
Matrix mat = (Matrix) this.getFunctionInput(0);
String fname = ((Scalar) this.getFunctionInput(1)).getValue();
String format = ((Scalar) this.getFunctionInput(2)).getValue();
MatrixObject mo = mat.getMatrixObject();
MatrixCharacteristics mc = mo.getMatrixCharacteristics();
OutputInfo oi = OutputInfo.stringToOutputInfo(format);
MatrixBlock mb = mo.acquireRead();
DataConverter.writeMatrixToHDFS(mb, fname, oi, mc);
mo.release();
success = true;
} catch (Exception e) {
throw new RuntimeException("Error executing dynamic write of matrix", e);
}
_success = new Scalar(ScalarValueType.Boolean, String.valueOf(success));
}
use of org.apache.sysml.udf.Scalar in project incubator-systemml by apache.
the class MultiInputCbind method execute.
@Override
public void execute() {
int numInputs = Integer.parseInt(((Scalar) getFunctionInput(0)).getValue());
spagetize = Boolean.parseBoolean(((Scalar) getFunctionInput(1)).getValue());
// Compute output dimensions
numRetCols = 0;
if (spagetize) {
// Assumption the inputs are of same shape
MatrixBlock in = ((Matrix) getFunctionInput(2)).getMatrixObject().acquireRead();
numRetRows = in.getNumRows() * in.getNumColumns();
numRetCols = numInputs;
((Matrix) getFunctionInput(2)).getMatrixObject().release();
} else {
for (int inputID = 2; inputID < numInputs + 2; inputID++) {
MatrixBlock in = ((Matrix) getFunctionInput(inputID)).getMatrixObject().acquireRead();
numRetRows = in.getNumRows();
numRetCols += in.getNumColumns();
((Matrix) getFunctionInput(inputID)).getMatrixObject().release();
}
}
allocateOutput();
// Performs cbind (cbind (cbind ( X1, X2 ), X3 ), X4)
double[] retData = retMB.getDenseBlockValues();
int startColumn = 0;
for (int inputID = 2; inputID < numInputs + 2; inputID++) {
MatrixBlock in = ((Matrix) getFunctionInput(inputID)).getMatrixObject().acquireRead();
if (spagetize && in.getNumRows() * in.getNumColumns() != numRetRows) {
throw new RuntimeException("Expected the inputs to be of same size when spagetization is turned on.");
}
int inputNumCols = in.getNumColumns();
if (in.isInSparseFormat()) {
Iterator<IJV> iter = in.getSparseBlockIterator();
while (iter.hasNext()) {
IJV ijv = iter.next();
if (spagetize) {
// Perform matrix(X1, rows=length(X1), cols=1) operation before cbind
// Output Column ID = inputID-2 for all elements of inputs
int outputRowIndex = ijv.getI() * inputNumCols + ijv.getJ();
int outputColIndex = inputID - 2;
retData[(int) (outputRowIndex * retMB.getNumColumns() + outputColIndex)] = ijv.getV();
} else {
// Traditional cbind
// Row ID remains the same as that of input
int outputRowIndex = ijv.getI();
int outputColIndex = ijv.getJ() + startColumn;
retData[(int) (outputRowIndex * retMB.getNumColumns() + outputColIndex)] = ijv.getV();
}
}
} else {
double[] denseBlock = in.getDenseBlockValues();
if (denseBlock != null) {
if (spagetize) {
// Perform matrix(X1, rows=length(X1), cols=1) operation before cbind
// Output Column ID = inputID-2 for all elements of inputs
int j = inputID - 2;
for (int i = 0; i < numRetRows; i++) {
retData[(int) (i * numRetCols + j)] = denseBlock[i];
}
} else {
// Row ID remains the same as that of input
for (int i = 0; i < retMB.getNumRows(); i++) {
for (int j = 0; j < inputNumCols; j++) {
int outputColIndex = j + startColumn;
retData[(int) (i * numRetCols + outputColIndex)] = denseBlock[i * inputNumCols + j];
}
}
}
}
}
((Matrix) getFunctionInput(inputID)).getMatrixObject().release();
startColumn += inputNumCols;
}
retMB.recomputeNonZeros();
try {
retMB.examSparsity();
ret.setMatrixDoubleArray(retMB, OutputInfo.BinaryBlockOutputInfo, InputInfo.BinaryBlockInputInfo);
} catch (DMLRuntimeException e) {
throw new RuntimeException("Error while executing MultiInputCbind", e);
} catch (IOException e) {
throw new RuntimeException("Error while executing MultiInputCbind", e);
}
}
use of org.apache.sysml.udf.Scalar in project systemml by apache.
the class BinningWrapper method execute.
@Override
public void execute() {
try {
// get input parameters (input matrix assumed to be sorted)
Matrix inM = (Matrix) getFunctionInput(0);
double[][] col = inM.getMatrixAsDoubleArray();
int binsize = Integer.parseInt(((Scalar) getFunctionInput(1)).getValue());
int numbins = Integer.parseInt(((Scalar) getFunctionInput(2)).getValue());
int nrowX = (int) inM.getNumRows();
// execute binning (extend bins for duplicates)
double[] col_bins = new double[numbins + 1];
int pos_col = 0;
int bin_id = 0;
col_bins[0] = col[0][0];
while (pos_col < nrowX - 1 && bin_id < numbins) {
// for all bins
pos_col = (pos_col + binsize >= nrowX) ? nrowX - 1 : pos_col + binsize;
double end_val = col[pos_col][0];
col_bins[bin_id + 1] = end_val;
// pull all duplicates in current bin
boolean cont = true;
while (cont && pos_col < nrowX - 1) {
if (end_val == col[pos_col + 1][0])
pos_col++;
else
cont = false;
}
bin_id++;
}
// prepare results
int num_bins_defined = bin_id;
for (int i = 0; i < num_bins_defined; i++) col_bins[i] = (col_bins[i] + col_bins[i + 1]) / 2;
// create and copy output matrix
String dir = createOutputFilePathAndName(OUTPUT_FILE);
_bins = new Matrix(dir, col_bins.length, 1, ValueType.Double);
_bins.setMatrixDoubleArray(col_bins);
_defBins = new Scalar(ScalarValueType.Integer, String.valueOf(num_bins_defined));
} catch (Exception e) {
throw new RuntimeException("Error executing external order function", e);
}
}
use of org.apache.sysml.udf.Scalar in project systemml by apache.
the class DynamicReadMatrixCP method execute.
@Override
public void execute() {
try {
String fname = ((Scalar) this.getFunctionInput(0)).getValue();
Integer m = Integer.parseInt(((Scalar) this.getFunctionInput(1)).getValue());
Integer n = Integer.parseInt(((Scalar) this.getFunctionInput(2)).getValue());
String format = ((Scalar) this.getFunctionInput(3)).getValue();
InputInfo ii = InputInfo.stringToInputInfo(format);
OutputInfo oi = OutputInfo.BinaryBlockOutputInfo;
MatrixBlock mbTmp = DataConverter.readMatrixFromHDFS(fname, ii, m, n, ConfigurationManager.getBlocksize(), ConfigurationManager.getBlocksize());
String fnameTmp = createOutputFilePathAndName("TMP");
_ret = new Matrix(fnameTmp, m, n, ValueType.Double);
_ret.setMatrixDoubleArray(mbTmp, oi, ii);
// NOTE: The packagesupport wrapper creates a new MatrixObjectNew with the given
// matrix block. This leads to a dirty state of the new object. Hence, the resulting
// intermediate plan variable will be exported in front of MR jobs and during this export
// the format will be changed to binary block (the contract of external functions),
// no matter in which format the original matrix was.
} catch (Exception e) {
throw new RuntimeException("Error executing dynamic read of matrix", e);
}
}
Aggregations