use of org.apache.sysml.runtime.matrix.MatrixCharacteristics in project incubator-systemml by apache.
the class QuaternaryOp method inferOutputCharacteristics.
@Override
protected long[] inferOutputCharacteristics(MemoTable memo) {
long[] ret = null;
switch(_op) {
case // always scalar output
WSLOSS:
ret = null;
break;
case WSIGMOID:
case WUMM:
{
MatrixCharacteristics mcW = memo.getAllInputStats(getInput().get(0));
ret = new long[] { mcW.getRows(), mcW.getCols(), mcW.getNonZeros() };
break;
}
case WDIVMM:
{
if (_baseType == 0) {
// basic
MatrixCharacteristics mcW = memo.getAllInputStats(getInput().get(0));
ret = new long[] { mcW.getRows(), mcW.getCols(), mcW.getNonZeros() };
} else if (_baseType == 1 || _baseType == 3) {
// left (w/ transpose or w/ epsilon)
MatrixCharacteristics mcV = memo.getAllInputStats(getInput().get(2));
ret = new long[] { mcV.getRows(), mcV.getCols(), -1 };
} else {
// right
MatrixCharacteristics mcU = memo.getAllInputStats(getInput().get(1));
ret = new long[] { mcU.getRows(), mcU.getCols(), -1 };
}
break;
}
default:
throw new RuntimeException("Memory for operation (" + _op + ") can not be estimated.");
}
return ret;
}
use of org.apache.sysml.runtime.matrix.MatrixCharacteristics in project incubator-systemml by apache.
the class TernaryOp method inferOutputCharacteristics.
@Override
protected long[] inferOutputCharacteristics(MemoTable memo) {
long[] ret = null;
MatrixCharacteristics[] mc = memo.getAllInputStats(getInput());
switch(_op) {
case CTABLE:
boolean dimsSpec = (getInput().size() > 3);
// Step 1: general dimension info inputs
long worstCaseDim = -1;
// since the dimensions of both inputs must be the same, checking for one input is sufficient
if (mc[0].dimsKnown() || mc[1].dimsKnown()) {
// Output dimensions are completely data dependent. In the worst case,
// #categories in each attribute = #rows (e.g., an ID column, say EmployeeID).
// both inputs are one-dimensional matrices with exact same dimensions, m = size of longer dimension
worstCaseDim = (mc[0].dimsKnown()) ? (mc[0].getRows() > 1 ? mc[0].getRows() : mc[0].getCols()) : (mc[1].getRows() > 1 ? mc[1].getRows() : mc[1].getCols());
// note: for ctable histogram dim2 known but automatically replaces m
// ret = new long[]{m, m, m};
}
// Step 2: special handling specified dims
if (dimsSpec && getInput().get(3) instanceof LiteralOp && getInput().get(4) instanceof LiteralOp) {
long outputDim1 = HopRewriteUtils.getIntValueSafe((LiteralOp) getInput().get(3));
long outputDim2 = HopRewriteUtils.getIntValueSafe((LiteralOp) getInput().get(4));
long outputNNZ = (outputDim1 * outputDim2 > outputDim1 ? outputDim1 : outputDim1 * outputDim2);
_dim1 = outputDim1;
_dim2 = outputDim2;
return new long[] { outputDim1, outputDim2, outputNNZ };
}
// note: for ctable histogram dim2 known but automatically replaces m
return new long[] { worstCaseDim, worstCaseDim, worstCaseDim };
case QUANTILE:
if (mc[2].dimsKnown())
return new long[] { mc[2].getRows(), 1, mc[2].getRows() };
break;
case IFELSE:
for (MatrixCharacteristics lmc : mc) if (// known matrix
lmc.dimsKnown() && lmc.getRows() >= 0)
return new long[] { lmc.getRows(), lmc.getCols(), -1 };
break;
case PLUS_MULT:
case MINUS_MULT:
// compute back NNz
double sp1 = OptimizerUtils.getSparsity(mc[0].getRows(), mc[0].getRows(), mc[0].getNonZeros());
double sp2 = OptimizerUtils.getSparsity(mc[2].getRows(), mc[2].getRows(), mc[2].getNonZeros());
return new long[] { mc[0].getRows(), mc[0].getCols(), (long) Math.min(sp1 + sp2, 1) };
default:
throw new RuntimeException("Memory for operation (" + _op + ") can not be estimated.");
}
return ret;
}
use of org.apache.sysml.runtime.matrix.MatrixCharacteristics in project incubator-systemml by apache.
the class SpoofFusedOp method inferOutputCharacteristics.
@Override
protected long[] inferOutputCharacteristics(MemoTable memo) {
long[] ret = null;
// get statistics of main input
MatrixCharacteristics mc = memo.getAllInputStats(getInput().get(0));
if (mc.dimsKnown()) {
switch(_dimsType) {
case ROW_DIMS:
ret = new long[] { mc.getRows(), 1, -1 };
break;
case COLUMN_DIMS_ROWS:
ret = new long[] { mc.getCols(), 1, -1 };
break;
case COLUMN_DIMS_COLS:
ret = new long[] { 1, mc.getCols(), -1 };
break;
case RANK_DIMS_COLS:
{
MatrixCharacteristics mc2 = memo.getAllInputStats(getInput().get(1));
if (mc2.dimsKnown())
ret = new long[] { 1, mc2.getCols(), -1 };
break;
}
case INPUT_DIMS:
ret = new long[] { mc.getRows(), mc.getCols(), -1 };
break;
case INPUT_DIMS_CONST2:
ret = new long[] { mc.getRows(), _constDim2, -1 };
break;
case VECT_CONST2:
ret = new long[] { 1, _constDim2, -1 };
break;
case SCALAR:
ret = new long[] { 0, 0, -1 };
break;
case MULTI_SCALAR:
// dim2 statically set from outside
ret = new long[] { 1, _dim2, -1 };
break;
case ROW_RANK_DIMS:
{
MatrixCharacteristics mc2 = memo.getAllInputStats(getInput().get(1));
if (mc2.dimsKnown())
ret = new long[] { mc.getRows(), mc2.getCols(), -1 };
break;
}
case COLUMN_RANK_DIMS:
{
MatrixCharacteristics mc2 = memo.getAllInputStats(getInput().get(1));
if (mc2.dimsKnown())
ret = new long[] { mc.getCols(), mc2.getCols(), -1 };
break;
}
case COLUMN_RANK_DIMS_T:
{
MatrixCharacteristics mc2 = memo.getAllInputStats(getInput().get(1));
if (mc2.dimsKnown())
ret = new long[] { mc2.getCols(), mc.getCols(), -1 };
break;
}
default:
throw new RuntimeException("Failed to infer worst-case size information " + "for type: " + _dimsType.toString());
}
}
return ret;
}
use of org.apache.sysml.runtime.matrix.MatrixCharacteristics in project incubator-systemml by apache.
the class AggUnaryOp method inferOutputCharacteristics.
@Override
protected long[] inferOutputCharacteristics(MemoTable memo) {
long[] ret = null;
Hop input = getInput().get(0);
MatrixCharacteristics mc = memo.getAllInputStats(input);
if (_direction == Direction.Col && mc.colsKnown())
ret = new long[] { 1, mc.getCols(), -1 };
else if (_direction == Direction.Row && mc.rowsKnown())
ret = new long[] { mc.getRows(), 1, -1 };
return ret;
}
use of org.apache.sysml.runtime.matrix.MatrixCharacteristics in project incubator-systemml by apache.
the class CumulativeAggregateSPInstruction method processInstruction.
@Override
public void processInstruction(ExecutionContext ec) {
SparkExecutionContext sec = (SparkExecutionContext) ec;
MatrixCharacteristics mc = sec.getMatrixCharacteristics(input1.getName());
long rlen = mc.getRows();
int brlen = mc.getRowsPerBlock();
int bclen = mc.getColsPerBlock();
// get input
JavaPairRDD<MatrixIndexes, MatrixBlock> in = sec.getBinaryBlockRDDHandleForVariable(input1.getName());
// execute unary aggregate (w/ implicit drop correction)
AggregateUnaryOperator auop = (AggregateUnaryOperator) _optr;
JavaPairRDD<MatrixIndexes, MatrixBlock> out = in.mapToPair(new RDDCumAggFunction(auop, rlen, brlen, bclen));
out = RDDAggregateUtils.mergeByKey(out, false);
// put output handle in symbol table
sec.setRDDHandleForVariable(output.getName(), out);
sec.addLineageRDD(output.getName(), input1.getName());
}
Aggregations