use of org.apache.sysml.lops.Aggregate in project incubator-systemml by apache.
the class AggUnaryOp method constructLops.
@Override
public Lop constructLops() {
// return already created lops
if (getLops() != null)
return getLops();
try {
ExecType et = optFindExecType();
Hop input = getInput().get(0);
if (et == ExecType.CP || et == ExecType.GPU) {
Lop agg1 = null;
long numChannels = isChannelSumRewriteApplicable() ? Hop.computeSizeInformation(getInput().get(0).getInput().get(1)) : -1;
if (numChannels > 0 && numChannels < 1000000) {
// Apply channel sums only if rewrite is applicable and if the dimension of C is known at compile time
// and if numChannels is less than 8 MB.
ReorgOp in = ((ReorgOp) getInput().get(0));
agg1 = new ConvolutionTransform(in.getInput().get(0).getInput().get(0).constructLops(), in.getInput().get(1).constructLops(), in.getInput().get(2).constructLops(), ConvolutionTransform.OperationTypes.CHANNEL_SUMS, getDataType(), getValueType(), et, -1);
agg1.getOutputParameters().setDimensions(numChannels, 1, getRowsInBlock(), getColsInBlock(), -1);
setLineNumbers(agg1);
setLops(agg1);
} else {
if (isTernaryAggregateRewriteApplicable()) {
agg1 = constructLopsTernaryAggregateRewrite(et);
} else if (isUnaryAggregateOuterCPRewriteApplicable()) {
OperationTypes op = HopsAgg2Lops.get(_op);
DirectionTypes dir = HopsDirection2Lops.get(_direction);
BinaryOp binput = (BinaryOp) getInput().get(0);
agg1 = new UAggOuterChain(binput.getInput().get(0).constructLops(), binput.getInput().get(1).constructLops(), op, dir, HopsOpOp2LopsB.get(binput.getOp()), DataType.MATRIX, getValueType(), ExecType.CP);
PartialAggregate.setDimensionsBasedOnDirection(agg1, getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock(), dir);
if (getDataType() == DataType.SCALAR) {
UnaryCP unary1 = new UnaryCP(agg1, HopsOpOp1LopsUS.get(OpOp1.CAST_AS_SCALAR), getDataType(), getValueType());
unary1.getOutputParameters().setDimensions(0, 0, 0, 0, -1);
setLineNumbers(unary1);
setLops(unary1);
}
} else {
// general case
int k = OptimizerUtils.getConstrainedNumThreads(_maxNumThreads);
agg1 = new PartialAggregate(input.constructLops(), HopsAgg2Lops.get(_op), HopsDirection2Lops.get(_direction), getDataType(), getValueType(), et, k);
}
setOutputDimensions(agg1);
setLineNumbers(agg1);
setLops(agg1);
if (getDataType() == DataType.SCALAR) {
agg1.getOutputParameters().setDimensions(1, 1, getRowsInBlock(), getColsInBlock(), getNnz());
}
}
} else if (et == ExecType.MR) {
OperationTypes op = HopsAgg2Lops.get(_op);
DirectionTypes dir = HopsDirection2Lops.get(_direction);
// unary aggregate operation
Lop transform1 = null;
if (isUnaryAggregateOuterRewriteApplicable()) {
BinaryOp binput = (BinaryOp) getInput().get(0);
transform1 = new UAggOuterChain(binput.getInput().get(0).constructLops(), binput.getInput().get(1).constructLops(), op, dir, HopsOpOp2LopsB.get(binput.getOp()), DataType.MATRIX, getValueType(), ExecType.MR);
PartialAggregate.setDimensionsBasedOnDirection(transform1, getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock(), dir);
} else // default
{
transform1 = new PartialAggregate(input.constructLops(), op, dir, DataType.MATRIX, getValueType());
((PartialAggregate) transform1).setDimensionsBasedOnDirection(getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock());
}
setLineNumbers(transform1);
// aggregation if required
Lop aggregate = null;
Group group1 = null;
Aggregate agg1 = null;
if (requiresAggregation(input, _direction) || transform1 instanceof UAggOuterChain) {
group1 = new Group(transform1, Group.OperationTypes.Sort, DataType.MATRIX, getValueType());
group1.getOutputParameters().setDimensions(getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock(), getNnz());
setLineNumbers(group1);
agg1 = new Aggregate(group1, HopsAgg2Lops.get(_op), DataType.MATRIX, getValueType(), et);
agg1.getOutputParameters().setDimensions(getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock(), getNnz());
agg1.setupCorrectionLocation(PartialAggregate.getCorrectionLocation(op, dir));
setLineNumbers(agg1);
aggregate = agg1;
} else {
((PartialAggregate) transform1).setDropCorrection();
aggregate = transform1;
}
setLops(aggregate);
// cast if required
if (getDataType() == DataType.SCALAR) {
// Set the dimensions of PartialAggregate LOP based on the
// direction in which aggregation is performed
PartialAggregate.setDimensionsBasedOnDirection(transform1, input.getDim1(), input.getDim2(), input.getRowsInBlock(), input.getColsInBlock(), dir);
if (group1 != null && agg1 != null) {
// if aggregation required
group1.getOutputParameters().setDimensions(input.getDim1(), input.getDim2(), input.getRowsInBlock(), input.getColsInBlock(), getNnz());
agg1.getOutputParameters().setDimensions(1, 1, input.getRowsInBlock(), input.getColsInBlock(), getNnz());
}
UnaryCP unary1 = new UnaryCP(aggregate, HopsOpOp1LopsUS.get(OpOp1.CAST_AS_SCALAR), getDataType(), getValueType());
unary1.getOutputParameters().setDimensions(0, 0, 0, 0, -1);
setLineNumbers(unary1);
setLops(unary1);
}
} else if (et == ExecType.SPARK) {
OperationTypes op = HopsAgg2Lops.get(_op);
DirectionTypes dir = HopsDirection2Lops.get(_direction);
// unary aggregate
if (isTernaryAggregateRewriteApplicable()) {
Lop aggregate = constructLopsTernaryAggregateRewrite(et);
// 0x0 (scalar)
setOutputDimensions(aggregate);
setLineNumbers(aggregate);
setLops(aggregate);
} else if (isUnaryAggregateOuterSPRewriteApplicable()) {
BinaryOp binput = (BinaryOp) getInput().get(0);
Lop transform1 = new UAggOuterChain(binput.getInput().get(0).constructLops(), binput.getInput().get(1).constructLops(), op, dir, HopsOpOp2LopsB.get(binput.getOp()), DataType.MATRIX, getValueType(), ExecType.SPARK);
PartialAggregate.setDimensionsBasedOnDirection(transform1, getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock(), dir);
setLineNumbers(transform1);
setLops(transform1);
if (getDataType() == DataType.SCALAR) {
UnaryCP unary1 = new UnaryCP(transform1, HopsOpOp1LopsUS.get(OpOp1.CAST_AS_SCALAR), getDataType(), getValueType());
unary1.getOutputParameters().setDimensions(0, 0, 0, 0, -1);
setLineNumbers(unary1);
setLops(unary1);
}
} else // default
{
boolean needAgg = requiresAggregation(input, _direction);
SparkAggType aggtype = getSparkUnaryAggregationType(needAgg);
PartialAggregate aggregate = new PartialAggregate(input.constructLops(), HopsAgg2Lops.get(_op), HopsDirection2Lops.get(_direction), DataType.MATRIX, getValueType(), aggtype, et);
aggregate.setDimensionsBasedOnDirection(getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock());
setLineNumbers(aggregate);
setLops(aggregate);
if (getDataType() == DataType.SCALAR) {
UnaryCP unary1 = new UnaryCP(aggregate, HopsOpOp1LopsUS.get(OpOp1.CAST_AS_SCALAR), getDataType(), getValueType());
unary1.getOutputParameters().setDimensions(0, 0, 0, 0, -1);
setLineNumbers(unary1);
setLops(unary1);
}
}
}
} catch (Exception e) {
throw new HopsException(this.printErrorLocation() + "In AggUnary Hop, error constructing Lops ", e);
}
// add reblock/checkpoint lops if necessary
constructAndSetLopsDataFlowProperties();
// return created lops
return getLops();
}
use of org.apache.sysml.lops.Aggregate in project incubator-systemml by apache.
the class IndexingOp method constructLops.
@Override
public Lop constructLops() {
// return already created lops
if (getLops() != null)
return getLops();
Hop input = getInput().get(0);
// rewrite remove unnecessary right indexing
if (HopRewriteUtils.isUnnecessaryRightIndexing(this)) {
setLops(input.constructLops());
} else // actual lop construction, incl operator selection
{
try {
ExecType et = optFindExecType();
if (et == ExecType.MR) {
IndexingMethod method = optFindIndexingMethod(_rowLowerEqualsUpper, _colLowerEqualsUpper, input._dim1, input._dim2, _dim1, _dim2);
Lop dummy = Data.createLiteralLop(ValueType.INT, Integer.toString(-1));
RightIndex reindex = new RightIndex(input.constructLops(), getInput().get(1).constructLops(), getInput().get(2).constructLops(), getInput().get(3).constructLops(), getInput().get(4).constructLops(), dummy, dummy, getDataType(), getValueType(), et);
setOutputDimensions(reindex);
setLineNumbers(reindex);
if (method == IndexingMethod.MR_RIX) {
Group group1 = new Group(reindex, Group.OperationTypes.Sort, DataType.MATRIX, getValueType());
setOutputDimensions(group1);
setLineNumbers(group1);
Aggregate agg1 = new Aggregate(group1, Aggregate.OperationTypes.Sum, DataType.MATRIX, getValueType(), et);
setOutputDimensions(agg1);
setLineNumbers(agg1);
setLops(agg1);
} else // method == IndexingMethod.MR_VRIX
{
setLops(reindex);
}
} else if (et == ExecType.SPARK) {
IndexingMethod method = optFindIndexingMethod(_rowLowerEqualsUpper, _colLowerEqualsUpper, input._dim1, input._dim2, _dim1, _dim2);
SparkAggType aggtype = (method == IndexingMethod.MR_VRIX || isBlockAligned()) ? SparkAggType.NONE : SparkAggType.MULTI_BLOCK;
Lop dummy = Data.createLiteralLop(ValueType.INT, Integer.toString(-1));
RightIndex reindex = new RightIndex(input.constructLops(), getInput().get(1).constructLops(), getInput().get(2).constructLops(), getInput().get(3).constructLops(), getInput().get(4).constructLops(), dummy, dummy, getDataType(), getValueType(), aggtype, et);
setOutputDimensions(reindex);
setLineNumbers(reindex);
setLops(reindex);
} else // CP or GPU
{
Lop dummy = Data.createLiteralLop(ValueType.INT, Integer.toString(-1));
RightIndex reindex = new RightIndex(input.constructLops(), getInput().get(1).constructLops(), getInput().get(2).constructLops(), getInput().get(3).constructLops(), getInput().get(4).constructLops(), dummy, dummy, getDataType(), getValueType(), et);
setOutputDimensions(reindex);
setLineNumbers(reindex);
setLops(reindex);
}
} catch (Exception e) {
throw new HopsException(this.printErrorLocation() + "In IndexingOp Hop, error constructing Lops ", e);
}
}
// add reblock/checkpoint lops if necessary
constructAndSetLopsDataFlowProperties();
return getLops();
}
use of org.apache.sysml.lops.Aggregate in project systemml by apache.
the class BinaryOp method constructMRAppendLop.
/**
* General case binary append.
*
* @param left high-level operator left
* @param right high-level operator right
* @param dt data type
* @param vt value type
* @param cbind true if cbind
* @param current current high-level operator
* @return low-level operator
*/
public static Lop constructMRAppendLop(Hop left, Hop right, DataType dt, ValueType vt, boolean cbind, Hop current) {
Lop ret = null;
long m1_dim1 = left.getDim1();
long m1_dim2 = left.getDim2();
long m2_dim1 = right.getDim1();
long m2_dim2 = right.getDim2();
// output rows
long m3_dim1 = cbind ? m1_dim1 : ((m1_dim1 >= 0 && m2_dim1 >= 0) ? (m1_dim1 + m2_dim1) : -1);
// output cols
long m3_dim2 = cbind ? ((m1_dim2 >= 0 && m2_dim2 >= 0) ? (m1_dim2 + m2_dim2) : -1) : m1_dim2;
// output nnz
long m3_nnz = (left.getNnz() > 0 && right.getNnz() > 0) ? (left.getNnz() + right.getNnz()) : -1;
long brlen = left.getRowsInBlock();
long bclen = left.getColsInBlock();
// offset 1st input
Lop offset = createOffsetLop(left, cbind);
AppendMethod am = optFindAppendMethod(m1_dim1, m1_dim2, m2_dim1, m2_dim2, brlen, bclen, cbind);
switch(am) {
case // special case map-only append
MR_MAPPEND:
{
boolean needPart = requiresPartitioning(right);
// pre partitioning
Lop dcInput = right.constructLops();
if (needPart) {
// right side in distributed cache
ExecType etPart = (OptimizerUtils.estimateSizeExactSparsity(right.getDim1(), right.getDim2(), OptimizerUtils.getSparsity(right.getDim1(), right.getDim2(), right.getNnz())) < OptimizerUtils.getLocalMemBudget()) ? ExecType.CP : // operator selection
ExecType.MR;
dcInput = new DataPartition(dcInput, DataType.MATRIX, ValueType.DOUBLE, etPart, PDataPartitionFormat.ROW_BLOCK_WISE_N);
dcInput.getOutputParameters().setDimensions(right.getDim1(), right.getDim2(), right.getRowsInBlock(), right.getColsInBlock(), right.getNnz());
dcInput.setAllPositions(right.getFilename(), right.getBeginLine(), right.getBeginColumn(), right.getEndLine(), right.getEndColumn());
}
AppendM appM = new AppendM(left.constructLops(), dcInput, offset, dt, vt, cbind, needPart, ExecType.MR);
appM.setAllPositions(current.getFilename(), current.getBeginLine(), current.getBeginColumn(), current.getEndLine(), current.getEndColumn());
appM.getOutputParameters().setDimensions(m3_dim1, m3_dim2, brlen, bclen, m3_nnz);
ret = appM;
break;
}
case // special case reduce append w/ one column block
MR_RAPPEND:
{
// group
Group group1 = new Group(left.constructLops(), Group.OperationTypes.Sort, DataType.MATRIX, vt);
group1.getOutputParameters().setDimensions(m1_dim1, m1_dim2, brlen, bclen, left.getNnz());
group1.setAllPositions(left.getFilename(), left.getBeginLine(), left.getBeginColumn(), left.getEndLine(), left.getEndColumn());
Group group2 = new Group(right.constructLops(), Group.OperationTypes.Sort, DataType.MATRIX, vt);
group1.getOutputParameters().setDimensions(m2_dim1, m2_dim2, brlen, bclen, right.getNnz());
group1.setAllPositions(right.getFilename(), right.getBeginLine(), right.getBeginColumn(), right.getEndLine(), right.getEndColumn());
AppendR appR = new AppendR(group1, group2, dt, vt, cbind, ExecType.MR);
appR.getOutputParameters().setDimensions(m3_dim1, m3_dim2, brlen, bclen, m3_nnz);
appR.setAllPositions(current.getFilename(), current.getBeginLine(), current.getBeginColumn(), current.getEndLine(), current.getEndColumn());
ret = appR;
break;
}
case MR_GAPPEND:
{
// general case: map expand append, reduce aggregate
// offset second input
Lop offset2 = createOffsetLop(right, cbind);
AppendG appG = new AppendG(left.constructLops(), right.constructLops(), offset, offset2, dt, vt, cbind, ExecType.MR);
appG.getOutputParameters().setDimensions(m3_dim1, m3_dim2, brlen, bclen, m3_nnz);
appG.setAllPositions(current.getFilename(), current.getBeginLine(), current.getBeginColumn(), current.getEndLine(), current.getEndColumn());
// group
Group group1 = new Group(appG, Group.OperationTypes.Sort, DataType.MATRIX, vt);
group1.getOutputParameters().setDimensions(m3_dim1, m3_dim2, brlen, bclen, m3_nnz);
group1.setAllPositions(current.getFilename(), current.getBeginLine(), current.getBeginColumn(), current.getEndLine(), current.getEndColumn());
// aggregate
Aggregate agg1 = new Aggregate(group1, Aggregate.OperationTypes.Sum, DataType.MATRIX, vt, ExecType.MR);
agg1.getOutputParameters().setDimensions(m3_dim1, m3_dim2, brlen, bclen, m3_nnz);
agg1.setAllPositions(current.getFilename(), current.getBeginLine(), current.getBeginColumn(), current.getEndLine(), current.getEndColumn());
ret = agg1;
break;
}
default:
throw new HopsException("Invalid MR append method: " + am);
}
return ret;
}
use of org.apache.sysml.lops.Aggregate in project systemml by apache.
the class BinaryOp method constructLopsIQM.
private void constructLopsIQM(ExecType et) {
if (et == ExecType.MR) {
CombineBinary combine = CombineBinary.constructCombineLop(OperationTypes.PreSort, (Lop) getInput().get(0).constructLops(), (Lop) getInput().get(1).constructLops(), DataType.MATRIX, getValueType());
combine.getOutputParameters().setDimensions(getInput().get(0).getDim1(), getInput().get(0).getDim2(), getInput().get(0).getRowsInBlock(), getInput().get(0).getColsInBlock(), getInput().get(0).getNnz());
SortKeys sort = SortKeys.constructSortByValueLop(combine, SortKeys.OperationTypes.WithWeights, DataType.MATRIX, ValueType.DOUBLE, ExecType.MR);
// Sort dimensions are same as the first input
sort.getOutputParameters().setDimensions(getInput().get(0).getDim1(), getInput().get(0).getDim2(), getInput().get(0).getRowsInBlock(), getInput().get(0).getColsInBlock(), getInput().get(0).getNnz());
Data lit = Data.createLiteralLop(ValueType.DOUBLE, Double.toString(0.25));
setLineNumbers(lit);
PickByCount pick = new PickByCount(sort, lit, DataType.MATRIX, getValueType(), PickByCount.OperationTypes.RANGEPICK);
pick.getOutputParameters().setDimensions(-1, -1, getRowsInBlock(), getColsInBlock(), -1);
setLineNumbers(pick);
PartialAggregate pagg = new PartialAggregate(pick, HopsAgg2Lops.get(Hop.AggOp.SUM), HopsDirection2Lops.get(Hop.Direction.RowCol), DataType.MATRIX, getValueType());
setLineNumbers(pagg);
// Set the dimensions of PartialAggregate LOP based on the
// direction in which aggregation is performed
pagg.setDimensionsBasedOnDirection(getDim1(), getDim2(), getRowsInBlock(), getColsInBlock());
Group group1 = new Group(pagg, Group.OperationTypes.Sort, DataType.MATRIX, getValueType());
setOutputDimensions(group1);
setLineNumbers(group1);
Aggregate agg1 = new Aggregate(group1, HopsAgg2Lops.get(Hop.AggOp.SUM), DataType.MATRIX, getValueType(), ExecType.MR);
setOutputDimensions(agg1);
agg1.setupCorrectionLocation(pagg.getCorrectionLocation());
setLineNumbers(agg1);
UnaryCP unary1 = new UnaryCP(agg1, HopsOpOp1LopsUS.get(OpOp1.CAST_AS_SCALAR), DataType.SCALAR, getValueType());
unary1.getOutputParameters().setDimensions(0, 0, 0, 0, -1);
setLineNumbers(unary1);
Unary iqm = new Unary(sort, unary1, Unary.OperationTypes.MR_IQM, DataType.SCALAR, ValueType.DOUBLE, ExecType.CP);
iqm.getOutputParameters().setDimensions(0, 0, 0, 0, -1);
setLineNumbers(iqm);
setLops(iqm);
} else {
SortKeys sort = SortKeys.constructSortByValueLop(getInput().get(0).constructLops(), getInput().get(1).constructLops(), SortKeys.OperationTypes.WithWeights, getInput().get(0).getDataType(), getInput().get(0).getValueType(), et);
sort.getOutputParameters().setDimensions(getInput().get(0).getDim1(), getInput().get(0).getDim2(), getInput().get(0).getRowsInBlock(), getInput().get(0).getColsInBlock(), getInput().get(0).getNnz());
PickByCount pick = new PickByCount(sort, null, getDataType(), getValueType(), PickByCount.OperationTypes.IQM, et, true);
setOutputDimensions(pick);
setLineNumbers(pick);
setLops(pick);
}
}
use of org.apache.sysml.lops.Aggregate in project systemml by apache.
the class IndexingOp method constructLops.
@Override
public Lop constructLops() {
// return already created lops
if (getLops() != null)
return getLops();
Hop input = getInput().get(0);
// rewrite remove unnecessary right indexing
if (HopRewriteUtils.isUnnecessaryRightIndexing(this)) {
setLops(input.constructLops());
} else // actual lop construction, incl operator selection
{
try {
ExecType et = optFindExecType();
if (et == ExecType.MR) {
IndexingMethod method = optFindIndexingMethod(_rowLowerEqualsUpper, _colLowerEqualsUpper, input._dim1, input._dim2, _dim1, _dim2);
Lop dummy = Data.createLiteralLop(ValueType.INT, Integer.toString(-1));
RightIndex reindex = new RightIndex(input.constructLops(), getInput().get(1).constructLops(), getInput().get(2).constructLops(), getInput().get(3).constructLops(), getInput().get(4).constructLops(), dummy, dummy, getDataType(), getValueType(), et);
setOutputDimensions(reindex);
setLineNumbers(reindex);
if (method == IndexingMethod.MR_RIX) {
Group group1 = new Group(reindex, Group.OperationTypes.Sort, DataType.MATRIX, getValueType());
setOutputDimensions(group1);
setLineNumbers(group1);
Aggregate agg1 = new Aggregate(group1, Aggregate.OperationTypes.Sum, DataType.MATRIX, getValueType(), et);
setOutputDimensions(agg1);
setLineNumbers(agg1);
setLops(agg1);
} else // method == IndexingMethod.MR_VRIX
{
setLops(reindex);
}
} else if (et == ExecType.SPARK) {
IndexingMethod method = optFindIndexingMethod(_rowLowerEqualsUpper, _colLowerEqualsUpper, input._dim1, input._dim2, _dim1, _dim2);
SparkAggType aggtype = (method == IndexingMethod.MR_VRIX || isBlockAligned()) ? SparkAggType.NONE : SparkAggType.MULTI_BLOCK;
Lop dummy = Data.createLiteralLop(ValueType.INT, Integer.toString(-1));
RightIndex reindex = new RightIndex(input.constructLops(), getInput().get(1).constructLops(), getInput().get(2).constructLops(), getInput().get(3).constructLops(), getInput().get(4).constructLops(), dummy, dummy, getDataType(), getValueType(), aggtype, et);
setOutputDimensions(reindex);
setLineNumbers(reindex);
setLops(reindex);
} else // CP or GPU
{
Lop dummy = Data.createLiteralLop(ValueType.INT, Integer.toString(-1));
RightIndex reindex = new RightIndex(input.constructLops(), getInput().get(1).constructLops(), getInput().get(2).constructLops(), getInput().get(3).constructLops(), getInput().get(4).constructLops(), dummy, dummy, getDataType(), getValueType(), et);
setOutputDimensions(reindex);
setLineNumbers(reindex);
setLops(reindex);
}
} catch (Exception e) {
throw new HopsException(this.printErrorLocation() + "In IndexingOp Hop, error constructing Lops ", e);
}
}
// add reblock/checkpoint lops if necessary
constructAndSetLopsDataFlowProperties();
return getLops();
}
Aggregations