Search in sources :

Example 11 with Group

use of org.apache.sysml.lops.Group in project incubator-systemml by apache.

the class AggBinaryOp method constructMRLopsMapMMWithLeftTransposeRewrite.

private Lop constructMRLopsMapMMWithLeftTransposeRewrite() {
    // guaranteed to exists
    Hop X = getInput().get(0).getInput().get(0);
    Hop Y = getInput().get(1);
    // right vector transpose CP
    Lop tY = new Transform(Y.constructLops(), OperationTypes.Transpose, getDataType(), getValueType(), ExecType.CP);
    tY.getOutputParameters().setDimensions(Y.getDim2(), Y.getDim1(), getRowsInBlock(), getColsInBlock(), Y.getNnz());
    setLineNumbers(tY);
    // matrix mult
    // If number of columns is smaller than block size then explicit aggregation is not required.
    // i.e., entire matrix multiplication can be performed in the mappers.
    boolean needAgg = (X.getDim1() <= 0 || X.getDim1() > X.getRowsInBlock());
    // R disregarding transpose rewrite
    boolean needPart = requiresPartitioning(MMultMethod.MAPMM_R, true);
    // pre partitioning
    Lop dcinput = null;
    if (needPart) {
        ExecType etPart = (OptimizerUtils.estimateSizeExactSparsity(Y.getDim2(), Y.getDim1(), OptimizerUtils.getSparsity(Y.getDim2(), Y.getDim1(), Y.getNnz())) < OptimizerUtils.getLocalMemBudget()) ? ExecType.CP : // operator selection
        ExecType.MR;
        dcinput = new DataPartition(tY, DataType.MATRIX, ValueType.DOUBLE, etPart, PDataPartitionFormat.COLUMN_BLOCK_WISE_N);
        dcinput.getOutputParameters().setDimensions(Y.getDim2(), Y.getDim1(), getRowsInBlock(), getColsInBlock(), Y.getNnz());
        setLineNumbers(dcinput);
    } else
        dcinput = tY;
    MapMult mapmult = new MapMult(dcinput, X.constructLops(), getDataType(), getValueType(), false, needPart, false);
    mapmult.getOutputParameters().setDimensions(Y.getDim2(), X.getDim2(), getRowsInBlock(), getColsInBlock(), getNnz());
    setLineNumbers(mapmult);
    // post aggregation
    Lop mult = null;
    if (needAgg) {
        Group grp = new Group(mapmult, Group.OperationTypes.Sort, getDataType(), getValueType());
        grp.getOutputParameters().setDimensions(Y.getDim2(), X.getDim2(), getRowsInBlock(), getColsInBlock(), getNnz());
        setLineNumbers(grp);
        Aggregate agg1 = new Aggregate(grp, HopsAgg2Lops.get(outerOp), getDataType(), getValueType(), ExecType.MR);
        agg1.getOutputParameters().setDimensions(Y.getDim2(), X.getDim2(), getRowsInBlock(), getColsInBlock(), getNnz());
        setLineNumbers(agg1);
        agg1.setupCorrectionLocation(CorrectionLocationType.NONE);
        mult = agg1;
    } else
        mult = mapmult;
    // result transpose CP
    Lop out = new Transform(mult, OperationTypes.Transpose, getDataType(), getValueType(), ExecType.CP);
    out.getOutputParameters().setDimensions(X.getDim2(), Y.getDim2(), getRowsInBlock(), getColsInBlock(), getNnz());
    return out;
}
Also used : Group(org.apache.sysml.lops.Group) MultiThreadedHop(org.apache.sysml.hops.Hop.MultiThreadedHop) PMapMult(org.apache.sysml.lops.PMapMult) MapMult(org.apache.sysml.lops.MapMult) ExecType(org.apache.sysml.lops.LopProperties.ExecType) Lop(org.apache.sysml.lops.Lop) Transform(org.apache.sysml.lops.Transform) Aggregate(org.apache.sysml.lops.Aggregate) DataPartition(org.apache.sysml.lops.DataPartition)

Example 12 with Group

use of org.apache.sysml.lops.Group in project incubator-systemml by apache.

the class AggUnaryOp method constructLops.

@Override
public Lop constructLops() {
    // return already created lops
    if (getLops() != null)
        return getLops();
    try {
        ExecType et = optFindExecType();
        Hop input = getInput().get(0);
        if (et == ExecType.CP || et == ExecType.GPU) {
            Lop agg1 = null;
            long numChannels = isChannelSumRewriteApplicable() ? Hop.computeSizeInformation(getInput().get(0).getInput().get(1)) : -1;
            if (numChannels > 0 && numChannels < 1000000) {
                // Apply channel sums only if rewrite is applicable and if the dimension of C is known at compile time
                // and if numChannels is less than 8 MB.
                ReorgOp in = ((ReorgOp) getInput().get(0));
                agg1 = new ConvolutionTransform(in.getInput().get(0).getInput().get(0).constructLops(), in.getInput().get(1).constructLops(), in.getInput().get(2).constructLops(), ConvolutionTransform.OperationTypes.CHANNEL_SUMS, getDataType(), getValueType(), et, -1);
                agg1.getOutputParameters().setDimensions(numChannels, 1, getRowsInBlock(), getColsInBlock(), -1);
                setLineNumbers(agg1);
                setLops(agg1);
            } else {
                if (isTernaryAggregateRewriteApplicable()) {
                    agg1 = constructLopsTernaryAggregateRewrite(et);
                } else if (isUnaryAggregateOuterCPRewriteApplicable()) {
                    OperationTypes op = HopsAgg2Lops.get(_op);
                    DirectionTypes dir = HopsDirection2Lops.get(_direction);
                    BinaryOp binput = (BinaryOp) getInput().get(0);
                    agg1 = new UAggOuterChain(binput.getInput().get(0).constructLops(), binput.getInput().get(1).constructLops(), op, dir, HopsOpOp2LopsB.get(binput.getOp()), DataType.MATRIX, getValueType(), ExecType.CP);
                    PartialAggregate.setDimensionsBasedOnDirection(agg1, getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock(), dir);
                    if (getDataType() == DataType.SCALAR) {
                        UnaryCP unary1 = new UnaryCP(agg1, HopsOpOp1LopsUS.get(OpOp1.CAST_AS_SCALAR), getDataType(), getValueType());
                        unary1.getOutputParameters().setDimensions(0, 0, 0, 0, -1);
                        setLineNumbers(unary1);
                        setLops(unary1);
                    }
                } else {
                    // general case
                    int k = OptimizerUtils.getConstrainedNumThreads(_maxNumThreads);
                    agg1 = new PartialAggregate(input.constructLops(), HopsAgg2Lops.get(_op), HopsDirection2Lops.get(_direction), getDataType(), getValueType(), et, k);
                }
                setOutputDimensions(agg1);
                setLineNumbers(agg1);
                setLops(agg1);
                if (getDataType() == DataType.SCALAR) {
                    agg1.getOutputParameters().setDimensions(1, 1, getRowsInBlock(), getColsInBlock(), getNnz());
                }
            }
        } else if (et == ExecType.MR) {
            OperationTypes op = HopsAgg2Lops.get(_op);
            DirectionTypes dir = HopsDirection2Lops.get(_direction);
            // unary aggregate operation
            Lop transform1 = null;
            if (isUnaryAggregateOuterRewriteApplicable()) {
                BinaryOp binput = (BinaryOp) getInput().get(0);
                transform1 = new UAggOuterChain(binput.getInput().get(0).constructLops(), binput.getInput().get(1).constructLops(), op, dir, HopsOpOp2LopsB.get(binput.getOp()), DataType.MATRIX, getValueType(), ExecType.MR);
                PartialAggregate.setDimensionsBasedOnDirection(transform1, getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock(), dir);
            } else // default
            {
                transform1 = new PartialAggregate(input.constructLops(), op, dir, DataType.MATRIX, getValueType());
                ((PartialAggregate) transform1).setDimensionsBasedOnDirection(getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock());
            }
            setLineNumbers(transform1);
            // aggregation if required
            Lop aggregate = null;
            Group group1 = null;
            Aggregate agg1 = null;
            if (requiresAggregation(input, _direction) || transform1 instanceof UAggOuterChain) {
                group1 = new Group(transform1, Group.OperationTypes.Sort, DataType.MATRIX, getValueType());
                group1.getOutputParameters().setDimensions(getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock(), getNnz());
                setLineNumbers(group1);
                agg1 = new Aggregate(group1, HopsAgg2Lops.get(_op), DataType.MATRIX, getValueType(), et);
                agg1.getOutputParameters().setDimensions(getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock(), getNnz());
                agg1.setupCorrectionLocation(PartialAggregate.getCorrectionLocation(op, dir));
                setLineNumbers(agg1);
                aggregate = agg1;
            } else {
                ((PartialAggregate) transform1).setDropCorrection();
                aggregate = transform1;
            }
            setLops(aggregate);
            // cast if required
            if (getDataType() == DataType.SCALAR) {
                // Set the dimensions of PartialAggregate LOP based on the
                // direction in which aggregation is performed
                PartialAggregate.setDimensionsBasedOnDirection(transform1, input.getDim1(), input.getDim2(), input.getRowsInBlock(), input.getColsInBlock(), dir);
                if (group1 != null && agg1 != null) {
                    // if aggregation required
                    group1.getOutputParameters().setDimensions(input.getDim1(), input.getDim2(), input.getRowsInBlock(), input.getColsInBlock(), getNnz());
                    agg1.getOutputParameters().setDimensions(1, 1, input.getRowsInBlock(), input.getColsInBlock(), getNnz());
                }
                UnaryCP unary1 = new UnaryCP(aggregate, HopsOpOp1LopsUS.get(OpOp1.CAST_AS_SCALAR), getDataType(), getValueType());
                unary1.getOutputParameters().setDimensions(0, 0, 0, 0, -1);
                setLineNumbers(unary1);
                setLops(unary1);
            }
        } else if (et == ExecType.SPARK) {
            OperationTypes op = HopsAgg2Lops.get(_op);
            DirectionTypes dir = HopsDirection2Lops.get(_direction);
            // unary aggregate
            if (isTernaryAggregateRewriteApplicable()) {
                Lop aggregate = constructLopsTernaryAggregateRewrite(et);
                // 0x0 (scalar)
                setOutputDimensions(aggregate);
                setLineNumbers(aggregate);
                setLops(aggregate);
            } else if (isUnaryAggregateOuterSPRewriteApplicable()) {
                BinaryOp binput = (BinaryOp) getInput().get(0);
                Lop transform1 = new UAggOuterChain(binput.getInput().get(0).constructLops(), binput.getInput().get(1).constructLops(), op, dir, HopsOpOp2LopsB.get(binput.getOp()), DataType.MATRIX, getValueType(), ExecType.SPARK);
                PartialAggregate.setDimensionsBasedOnDirection(transform1, getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock(), dir);
                setLineNumbers(transform1);
                setLops(transform1);
                if (getDataType() == DataType.SCALAR) {
                    UnaryCP unary1 = new UnaryCP(transform1, HopsOpOp1LopsUS.get(OpOp1.CAST_AS_SCALAR), getDataType(), getValueType());
                    unary1.getOutputParameters().setDimensions(0, 0, 0, 0, -1);
                    setLineNumbers(unary1);
                    setLops(unary1);
                }
            } else // default
            {
                boolean needAgg = requiresAggregation(input, _direction);
                SparkAggType aggtype = getSparkUnaryAggregationType(needAgg);
                PartialAggregate aggregate = new PartialAggregate(input.constructLops(), HopsAgg2Lops.get(_op), HopsDirection2Lops.get(_direction), DataType.MATRIX, getValueType(), aggtype, et);
                aggregate.setDimensionsBasedOnDirection(getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock());
                setLineNumbers(aggregate);
                setLops(aggregate);
                if (getDataType() == DataType.SCALAR) {
                    UnaryCP unary1 = new UnaryCP(aggregate, HopsOpOp1LopsUS.get(OpOp1.CAST_AS_SCALAR), getDataType(), getValueType());
                    unary1.getOutputParameters().setDimensions(0, 0, 0, 0, -1);
                    setLineNumbers(unary1);
                    setLops(unary1);
                }
            }
        }
    } catch (Exception e) {
        throw new HopsException(this.printErrorLocation() + "In AggUnary Hop, error constructing Lops ", e);
    }
    // add reblock/checkpoint lops if necessary
    constructAndSetLopsDataFlowProperties();
    // return created lops
    return getLops();
}
Also used : PartialAggregate(org.apache.sysml.lops.PartialAggregate) Group(org.apache.sysml.lops.Group) SparkAggType(org.apache.sysml.hops.AggBinaryOp.SparkAggType) MultiThreadedHop(org.apache.sysml.hops.Hop.MultiThreadedHop) Lop(org.apache.sysml.lops.Lop) UAggOuterChain(org.apache.sysml.lops.UAggOuterChain) UnaryCP(org.apache.sysml.lops.UnaryCP) OperationTypes(org.apache.sysml.lops.Aggregate.OperationTypes) DirectionTypes(org.apache.sysml.lops.PartialAggregate.DirectionTypes) ExecType(org.apache.sysml.lops.LopProperties.ExecType) ConvolutionTransform(org.apache.sysml.lops.ConvolutionTransform) PartialAggregate(org.apache.sysml.lops.PartialAggregate) TernaryAggregate(org.apache.sysml.lops.TernaryAggregate) Aggregate(org.apache.sysml.lops.Aggregate)

Example 13 with Group

use of org.apache.sysml.lops.Group in project incubator-systemml by apache.

the class BinaryOp method constructAppendLopChain.

/**
 * Special case ternary append. Here, we also compile a MR_RAPPEND or MR_GAPPEND
 *
 * @param left ?
 * @param right1 ?
 * @param right2 ?
 * @param dt ?
 * @param vt ?
 * @param cbind ?
 * @param current ?
 * @return low-level operator
 */
public static Lop constructAppendLopChain(Hop left, Hop right1, Hop right2, DataType dt, ValueType vt, boolean cbind, Hop current) {
    long m1_dim1 = left.getDim1();
    long m1_dim2 = left.getDim2();
    long m2_dim1 = right1.getDim1();
    long m2_dim2 = right1.getDim2();
    long m3_dim1 = right2.getDim1();
    long m3_dim2 = right2.getDim2();
    // output cols
    long m41_dim2 = (m1_dim2 >= 0 && m2_dim2 >= 0) ? (m1_dim2 + m2_dim2) : -1;
    long m41_nnz = (left.getNnz() > 0 && right1.getNnz() > 0) ? (left.getNnz() + right1.getNnz()) : // output nnz
    -1;
    // output cols
    long m42_dim2 = (m1_dim2 >= 0 && m2_dim2 >= 0 && m3_dim2 >= 0) ? (m1_dim2 + m2_dim2 + m3_dim2) : -1;
    long m42_nnz = (left.getNnz() > 0 && right1.getNnz() > 0 && right2.getNnz() > 0) ? (left.getNnz() + right1.getNnz() + right2.getNnz()) : // output nnz
    -1;
    long brlen = left.getRowsInBlock();
    long bclen = left.getColsInBlock();
    // warn if assumption of blocksize>=3 does not hold
    if (bclen < 3)
        throw new HopsException("MR_RAPPEND requires a blocksize of >= 3.");
    // case MR_RAPPEND:
    // special case reduce append w/ one column block
    Group group1 = new Group(left.constructLops(), Group.OperationTypes.Sort, DataType.MATRIX, vt);
    group1.getOutputParameters().setDimensions(m1_dim1, m1_dim2, brlen, bclen, left.getNnz());
    group1.setAllPositions(left.getFilename(), left.getBeginLine(), left.getBeginColumn(), left.getEndLine(), left.getEndColumn());
    Group group2 = new Group(right1.constructLops(), Group.OperationTypes.Sort, DataType.MATRIX, vt);
    group1.getOutputParameters().setDimensions(m2_dim1, m2_dim2, brlen, bclen, right1.getNnz());
    group1.setAllPositions(right1.getFilename(), right1.getBeginLine(), right1.getBeginColumn(), right1.getEndLine(), right1.getEndColumn());
    Group group3 = new Group(right2.constructLops(), Group.OperationTypes.Sort, DataType.MATRIX, vt);
    group1.getOutputParameters().setDimensions(m3_dim1, m3_dim2, brlen, bclen, right2.getNnz());
    group1.setAllPositions(right2.getFilename(), right2.getBeginLine(), right2.getBeginColumn(), right2.getEndLine(), right2.getEndColumn());
    AppendR appR1 = new AppendR(group1, group2, dt, vt, cbind, ExecType.MR);
    appR1.getOutputParameters().setDimensions(m1_dim1, m41_dim2, brlen, bclen, m41_nnz);
    appR1.setAllPositions(current.getFilename(), current.getBeginLine(), current.getBeginColumn(), current.getEndLine(), current.getEndColumn());
    AppendR appR2 = new AppendR(appR1, group3, dt, vt, cbind, ExecType.MR);
    appR1.getOutputParameters().setDimensions(m1_dim1, m42_dim2, brlen, bclen, m42_nnz);
    appR1.setAllPositions(current.getFilename(), current.getBeginLine(), current.getBeginColumn(), current.getEndLine(), current.getEndColumn());
    return appR2;
}
Also used : Group(org.apache.sysml.lops.Group) AppendR(org.apache.sysml.lops.AppendR)

Example 14 with Group

use of org.apache.sysml.lops.Group in project incubator-systemml by apache.

the class IndexingOp method constructLops.

@Override
public Lop constructLops() {
    // return already created lops
    if (getLops() != null)
        return getLops();
    Hop input = getInput().get(0);
    // rewrite remove unnecessary right indexing
    if (HopRewriteUtils.isUnnecessaryRightIndexing(this)) {
        setLops(input.constructLops());
    } else // actual lop construction, incl operator selection
    {
        try {
            ExecType et = optFindExecType();
            if (et == ExecType.MR) {
                IndexingMethod method = optFindIndexingMethod(_rowLowerEqualsUpper, _colLowerEqualsUpper, input._dim1, input._dim2, _dim1, _dim2);
                Lop dummy = Data.createLiteralLop(ValueType.INT, Integer.toString(-1));
                RightIndex reindex = new RightIndex(input.constructLops(), getInput().get(1).constructLops(), getInput().get(2).constructLops(), getInput().get(3).constructLops(), getInput().get(4).constructLops(), dummy, dummy, getDataType(), getValueType(), et);
                setOutputDimensions(reindex);
                setLineNumbers(reindex);
                if (method == IndexingMethod.MR_RIX) {
                    Group group1 = new Group(reindex, Group.OperationTypes.Sort, DataType.MATRIX, getValueType());
                    setOutputDimensions(group1);
                    setLineNumbers(group1);
                    Aggregate agg1 = new Aggregate(group1, Aggregate.OperationTypes.Sum, DataType.MATRIX, getValueType(), et);
                    setOutputDimensions(agg1);
                    setLineNumbers(agg1);
                    setLops(agg1);
                } else // method == IndexingMethod.MR_VRIX
                {
                    setLops(reindex);
                }
            } else if (et == ExecType.SPARK) {
                IndexingMethod method = optFindIndexingMethod(_rowLowerEqualsUpper, _colLowerEqualsUpper, input._dim1, input._dim2, _dim1, _dim2);
                SparkAggType aggtype = (method == IndexingMethod.MR_VRIX || isBlockAligned()) ? SparkAggType.NONE : SparkAggType.MULTI_BLOCK;
                Lop dummy = Data.createLiteralLop(ValueType.INT, Integer.toString(-1));
                RightIndex reindex = new RightIndex(input.constructLops(), getInput().get(1).constructLops(), getInput().get(2).constructLops(), getInput().get(3).constructLops(), getInput().get(4).constructLops(), dummy, dummy, getDataType(), getValueType(), aggtype, et);
                setOutputDimensions(reindex);
                setLineNumbers(reindex);
                setLops(reindex);
            } else // CP or GPU
            {
                Lop dummy = Data.createLiteralLop(ValueType.INT, Integer.toString(-1));
                RightIndex reindex = new RightIndex(input.constructLops(), getInput().get(1).constructLops(), getInput().get(2).constructLops(), getInput().get(3).constructLops(), getInput().get(4).constructLops(), dummy, dummy, getDataType(), getValueType(), et);
                setOutputDimensions(reindex);
                setLineNumbers(reindex);
                setLops(reindex);
            }
        } catch (Exception e) {
            throw new HopsException(this.printErrorLocation() + "In IndexingOp Hop, error constructing Lops ", e);
        }
    }
    // add reblock/checkpoint lops if necessary
    constructAndSetLopsDataFlowProperties();
    return getLops();
}
Also used : Group(org.apache.sysml.lops.Group) SparkAggType(org.apache.sysml.hops.AggBinaryOp.SparkAggType) RightIndex(org.apache.sysml.lops.RightIndex) ExecType(org.apache.sysml.lops.LopProperties.ExecType) Lop(org.apache.sysml.lops.Lop) Aggregate(org.apache.sysml.lops.Aggregate)

Example 15 with Group

use of org.apache.sysml.lops.Group in project systemml by apache.

the class BinaryOp method constructMRAppendLop.

/**
 * General case binary append.
 *
 * @param left high-level operator left
 * @param right high-level operator right
 * @param dt data type
 * @param vt value type
 * @param cbind true if cbind
 * @param current current high-level operator
 * @return low-level operator
 */
public static Lop constructMRAppendLop(Hop left, Hop right, DataType dt, ValueType vt, boolean cbind, Hop current) {
    Lop ret = null;
    long m1_dim1 = left.getDim1();
    long m1_dim2 = left.getDim2();
    long m2_dim1 = right.getDim1();
    long m2_dim2 = right.getDim2();
    // output rows
    long m3_dim1 = cbind ? m1_dim1 : ((m1_dim1 >= 0 && m2_dim1 >= 0) ? (m1_dim1 + m2_dim1) : -1);
    // output cols
    long m3_dim2 = cbind ? ((m1_dim2 >= 0 && m2_dim2 >= 0) ? (m1_dim2 + m2_dim2) : -1) : m1_dim2;
    // output nnz
    long m3_nnz = (left.getNnz() > 0 && right.getNnz() > 0) ? (left.getNnz() + right.getNnz()) : -1;
    long brlen = left.getRowsInBlock();
    long bclen = left.getColsInBlock();
    // offset 1st input
    Lop offset = createOffsetLop(left, cbind);
    AppendMethod am = optFindAppendMethod(m1_dim1, m1_dim2, m2_dim1, m2_dim2, brlen, bclen, cbind);
    switch(am) {
        case // special case map-only append
        MR_MAPPEND:
            {
                boolean needPart = requiresPartitioning(right);
                // pre partitioning
                Lop dcInput = right.constructLops();
                if (needPart) {
                    // right side in distributed cache
                    ExecType etPart = (OptimizerUtils.estimateSizeExactSparsity(right.getDim1(), right.getDim2(), OptimizerUtils.getSparsity(right.getDim1(), right.getDim2(), right.getNnz())) < OptimizerUtils.getLocalMemBudget()) ? ExecType.CP : // operator selection
                    ExecType.MR;
                    dcInput = new DataPartition(dcInput, DataType.MATRIX, ValueType.DOUBLE, etPart, PDataPartitionFormat.ROW_BLOCK_WISE_N);
                    dcInput.getOutputParameters().setDimensions(right.getDim1(), right.getDim2(), right.getRowsInBlock(), right.getColsInBlock(), right.getNnz());
                    dcInput.setAllPositions(right.getFilename(), right.getBeginLine(), right.getBeginColumn(), right.getEndLine(), right.getEndColumn());
                }
                AppendM appM = new AppendM(left.constructLops(), dcInput, offset, dt, vt, cbind, needPart, ExecType.MR);
                appM.setAllPositions(current.getFilename(), current.getBeginLine(), current.getBeginColumn(), current.getEndLine(), current.getEndColumn());
                appM.getOutputParameters().setDimensions(m3_dim1, m3_dim2, brlen, bclen, m3_nnz);
                ret = appM;
                break;
            }
        case // special case reduce append w/ one column block
        MR_RAPPEND:
            {
                // group
                Group group1 = new Group(left.constructLops(), Group.OperationTypes.Sort, DataType.MATRIX, vt);
                group1.getOutputParameters().setDimensions(m1_dim1, m1_dim2, brlen, bclen, left.getNnz());
                group1.setAllPositions(left.getFilename(), left.getBeginLine(), left.getBeginColumn(), left.getEndLine(), left.getEndColumn());
                Group group2 = new Group(right.constructLops(), Group.OperationTypes.Sort, DataType.MATRIX, vt);
                group1.getOutputParameters().setDimensions(m2_dim1, m2_dim2, brlen, bclen, right.getNnz());
                group1.setAllPositions(right.getFilename(), right.getBeginLine(), right.getBeginColumn(), right.getEndLine(), right.getEndColumn());
                AppendR appR = new AppendR(group1, group2, dt, vt, cbind, ExecType.MR);
                appR.getOutputParameters().setDimensions(m3_dim1, m3_dim2, brlen, bclen, m3_nnz);
                appR.setAllPositions(current.getFilename(), current.getBeginLine(), current.getBeginColumn(), current.getEndLine(), current.getEndColumn());
                ret = appR;
                break;
            }
        case MR_GAPPEND:
            {
                // general case: map expand append, reduce aggregate
                // offset second input
                Lop offset2 = createOffsetLop(right, cbind);
                AppendG appG = new AppendG(left.constructLops(), right.constructLops(), offset, offset2, dt, vt, cbind, ExecType.MR);
                appG.getOutputParameters().setDimensions(m3_dim1, m3_dim2, brlen, bclen, m3_nnz);
                appG.setAllPositions(current.getFilename(), current.getBeginLine(), current.getBeginColumn(), current.getEndLine(), current.getEndColumn());
                // group
                Group group1 = new Group(appG, Group.OperationTypes.Sort, DataType.MATRIX, vt);
                group1.getOutputParameters().setDimensions(m3_dim1, m3_dim2, brlen, bclen, m3_nnz);
                group1.setAllPositions(current.getFilename(), current.getBeginLine(), current.getBeginColumn(), current.getEndLine(), current.getEndColumn());
                // aggregate
                Aggregate agg1 = new Aggregate(group1, Aggregate.OperationTypes.Sum, DataType.MATRIX, vt, ExecType.MR);
                agg1.getOutputParameters().setDimensions(m3_dim1, m3_dim2, brlen, bclen, m3_nnz);
                agg1.setAllPositions(current.getFilename(), current.getBeginLine(), current.getBeginColumn(), current.getEndLine(), current.getEndColumn());
                ret = agg1;
                break;
            }
        default:
            throw new HopsException("Invalid MR append method: " + am);
    }
    return ret;
}
Also used : AppendG(org.apache.sysml.lops.AppendG) Group(org.apache.sysml.lops.Group) AppendR(org.apache.sysml.lops.AppendR) ExecType(org.apache.sysml.lops.LopProperties.ExecType) Lop(org.apache.sysml.lops.Lop) PartialAggregate(org.apache.sysml.lops.PartialAggregate) Aggregate(org.apache.sysml.lops.Aggregate) DataPartition(org.apache.sysml.lops.DataPartition) AppendM(org.apache.sysml.lops.AppendM)

Aggregations

Group (org.apache.sysml.lops.Group)55 Lop (org.apache.sysml.lops.Lop)45 Aggregate (org.apache.sysml.lops.Aggregate)38 MultiThreadedHop (org.apache.sysml.hops.Hop.MultiThreadedHop)32 DataPartition (org.apache.sysml.lops.DataPartition)28 ExecType (org.apache.sysml.lops.LopProperties.ExecType)25 UnaryCP (org.apache.sysml.lops.UnaryCP)14 RepMat (org.apache.sysml.lops.RepMat)11 PartialAggregate (org.apache.sysml.lops.PartialAggregate)10 Unary (org.apache.sysml.lops.Unary)10 CombineUnary (org.apache.sysml.lops.CombineUnary)8 Transform (org.apache.sysml.lops.Transform)8 AppendR (org.apache.sysml.lops.AppendR)6 Data (org.apache.sysml.lops.Data)6 GroupedAggregate (org.apache.sysml.lops.GroupedAggregate)6 SortKeys (org.apache.sysml.lops.SortKeys)6 ArrayList (java.util.ArrayList)4 SparkAggType (org.apache.sysml.hops.AggBinaryOp.SparkAggType)4 OperationTypes (org.apache.sysml.lops.Aggregate.OperationTypes)4 Binary (org.apache.sysml.lops.Binary)4