Search in sources :

Example 26 with Lop

use of org.apache.sysml.lops.Lop in project incubator-systemml by apache.

the class AggBinaryOp method constructSparkLopsPMM.

private void constructSparkLopsPMM() {
    // PMM has two potential modes (a) w/ full permutation matrix input, and
    // (b) w/ already condensed input vector of target row positions.
    Hop pmInput = getInput().get(0);
    Hop rightInput = getInput().get(1);
    Lop lpmInput = pmInput.constructLops();
    Hop nrow = null;
    double mestPM = OptimizerUtils.estimateSize(pmInput.getDim1(), 1);
    ExecType etVect = (mestPM > OptimizerUtils.getLocalMemBudget()) ? ExecType.MR : ExecType.CP;
    // a) full permutation matrix input (potentially without empty block materialized)
    if (// not a vector
    pmInput.getDim2() != 1) {
        // compute condensed permutation matrix vector input
        // v = rowMaxIndex(t(pm)) * rowMax(t(pm))
        ReorgOp transpose = HopRewriteUtils.createTranspose(pmInput);
        transpose.setForcedExecType(ExecType.SPARK);
        AggUnaryOp agg1 = HopRewriteUtils.createAggUnaryOp(transpose, AggOp.MAXINDEX, Direction.Row);
        agg1.setForcedExecType(ExecType.SPARK);
        AggUnaryOp agg2 = HopRewriteUtils.createAggUnaryOp(transpose, AggOp.MAX, Direction.Row);
        agg2.setForcedExecType(ExecType.SPARK);
        BinaryOp mult = HopRewriteUtils.createBinary(agg1, agg2, OpOp2.MULT);
        mult.setForcedExecType(ExecType.SPARK);
        // compute NROW target via nrow(m)
        nrow = HopRewriteUtils.createValueHop(pmInput, true);
        nrow.setOutputBlocksizes(0, 0);
        nrow.setForcedExecType(ExecType.CP);
        HopRewriteUtils.copyLineNumbers(this, nrow);
        lpmInput = mult.constructLops();
        HopRewriteUtils.removeChildReference(pmInput, transpose);
    } else // input vector
    {
        // compute NROW target via max(v)
        nrow = HopRewriteUtils.createAggUnaryOp(pmInput, AggOp.MAX, Direction.RowCol);
        nrow.setOutputBlocksizes(0, 0);
        nrow.setForcedExecType(etVect);
        HopRewriteUtils.copyLineNumbers(this, nrow);
    }
    // b) condensed permutation matrix vector input (target rows)
    _outputEmptyBlocks = !OptimizerUtils.allowsToFilterEmptyBlockOutputs(this);
    PMMJ pmm = new PMMJ(lpmInput, rightInput.constructLops(), nrow.constructLops(), getDataType(), getValueType(), false, _outputEmptyBlocks, ExecType.SPARK);
    setOutputDimensions(pmm);
    setLineNumbers(pmm);
    setLops(pmm);
    HopRewriteUtils.removeChildReference(pmInput, nrow);
}
Also used : MultiThreadedHop(org.apache.sysml.hops.Hop.MultiThreadedHop) ExecType(org.apache.sysml.lops.LopProperties.ExecType) Lop(org.apache.sysml.lops.Lop) PMMJ(org.apache.sysml.lops.PMMJ)

Example 27 with Lop

use of org.apache.sysml.lops.Lop in project incubator-systemml by apache.

the class AggBinaryOp method constructSparkLopsMapMMWithLeftTransposeRewrite.

private Lop constructSparkLopsMapMMWithLeftTransposeRewrite() {
    // guaranteed to exists
    Hop X = getInput().get(0).getInput().get(0);
    Hop Y = getInput().get(1);
    // right vector transpose
    Lop tY = new Transform(Y.constructLops(), OperationTypes.Transpose, getDataType(), getValueType(), ExecType.CP);
    tY.getOutputParameters().setDimensions(Y.getDim2(), Y.getDim1(), getRowsInBlock(), getColsInBlock(), Y.getNnz());
    setLineNumbers(tY);
    // matrix mult spark
    boolean needAgg = requiresAggregation(MMultMethod.MAPMM_R);
    SparkAggType aggtype = getSparkMMAggregationType(needAgg);
    _outputEmptyBlocks = !OptimizerUtils.allowsToFilterEmptyBlockOutputs(this);
    Lop mult = new MapMult(tY, X.constructLops(), getDataType(), getValueType(), false, false, _outputEmptyBlocks, aggtype);
    mult.getOutputParameters().setDimensions(Y.getDim2(), X.getDim2(), getRowsInBlock(), getColsInBlock(), getNnz());
    setLineNumbers(mult);
    // result transpose (dimensions set outside)
    Lop out = new Transform(mult, OperationTypes.Transpose, getDataType(), getValueType(), ExecType.CP);
    return out;
}
Also used : MultiThreadedHop(org.apache.sysml.hops.Hop.MultiThreadedHop) PMapMult(org.apache.sysml.lops.PMapMult) MapMult(org.apache.sysml.lops.MapMult) Lop(org.apache.sysml.lops.Lop) Transform(org.apache.sysml.lops.Transform)

Example 28 with Lop

use of org.apache.sysml.lops.Lop in project incubator-systemml by apache.

the class AggBinaryOp method constructSparkLopsRMM.

private void constructSparkLopsRMM() {
    Lop rmm = new MMRJ(getInput().get(0).constructLops(), getInput().get(1).constructLops(), getDataType(), getValueType(), ExecType.SPARK);
    setOutputDimensions(rmm);
    setLineNumbers(rmm);
    setLops(rmm);
}
Also used : Lop(org.apache.sysml.lops.Lop) MMRJ(org.apache.sysml.lops.MMRJ)

Example 29 with Lop

use of org.apache.sysml.lops.Lop in project incubator-systemml by apache.

the class AggBinaryOp method constructCPLopsPMM.

/**
 * NOTE: exists for consistency since removeEmtpy might be scheduled to MR
 * but matrix mult on small output might be scheduled to CP. Hence, we
 * need to handle directly passed selection vectors in CP as well.
 */
private void constructCPLopsPMM() {
    Hop pmInput = getInput().get(0);
    Hop rightInput = getInput().get(1);
    // NROW
    Hop nrow = HopRewriteUtils.createValueHop(pmInput, true);
    nrow.setOutputBlocksizes(0, 0);
    nrow.setForcedExecType(ExecType.CP);
    HopRewriteUtils.copyLineNumbers(this, nrow);
    Lop lnrow = nrow.constructLops();
    PMMJ pmm = new PMMJ(pmInput.constructLops(), rightInput.constructLops(), lnrow, getDataType(), getValueType(), false, false, ExecType.CP);
    // set degree of parallelism
    int k = OptimizerUtils.getConstrainedNumThreads(_maxNumThreads);
    pmm.setNumThreads(k);
    pmm.getOutputParameters().setDimensions(getDim1(), getDim2(), getRowsInBlock(), getColsInBlock(), getNnz());
    setLineNumbers(pmm);
    setLops(pmm);
    HopRewriteUtils.removeChildReference(pmInput, nrow);
}
Also used : MultiThreadedHop(org.apache.sysml.hops.Hop.MultiThreadedHop) Lop(org.apache.sysml.lops.Lop) PMMJ(org.apache.sysml.lops.PMMJ)

Example 30 with Lop

use of org.apache.sysml.lops.Lop in project incubator-systemml by apache.

the class AggUnaryOp method constructLops.

@Override
public Lop constructLops() {
    // return already created lops
    if (getLops() != null)
        return getLops();
    try {
        ExecType et = optFindExecType();
        Hop input = getInput().get(0);
        if (et == ExecType.CP || et == ExecType.GPU) {
            Lop agg1 = null;
            long numChannels = isChannelSumRewriteApplicable() ? Hop.computeSizeInformation(getInput().get(0).getInput().get(1)) : -1;
            if (numChannels > 0 && numChannels < 1000000) {
                // Apply channel sums only if rewrite is applicable and if the dimension of C is known at compile time
                // and if numChannels is less than 8 MB.
                ReorgOp in = ((ReorgOp) getInput().get(0));
                agg1 = new ConvolutionTransform(in.getInput().get(0).getInput().get(0).constructLops(), in.getInput().get(1).constructLops(), in.getInput().get(2).constructLops(), ConvolutionTransform.OperationTypes.CHANNEL_SUMS, getDataType(), getValueType(), et, -1);
                agg1.getOutputParameters().setDimensions(numChannels, 1, getRowsInBlock(), getColsInBlock(), -1);
                setLineNumbers(agg1);
                setLops(agg1);
            } else {
                if (isTernaryAggregateRewriteApplicable()) {
                    agg1 = constructLopsTernaryAggregateRewrite(et);
                } else if (isUnaryAggregateOuterCPRewriteApplicable()) {
                    OperationTypes op = HopsAgg2Lops.get(_op);
                    DirectionTypes dir = HopsDirection2Lops.get(_direction);
                    BinaryOp binput = (BinaryOp) getInput().get(0);
                    agg1 = new UAggOuterChain(binput.getInput().get(0).constructLops(), binput.getInput().get(1).constructLops(), op, dir, HopsOpOp2LopsB.get(binput.getOp()), DataType.MATRIX, getValueType(), ExecType.CP);
                    PartialAggregate.setDimensionsBasedOnDirection(agg1, getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock(), dir);
                    if (getDataType() == DataType.SCALAR) {
                        UnaryCP unary1 = new UnaryCP(agg1, HopsOpOp1LopsUS.get(OpOp1.CAST_AS_SCALAR), getDataType(), getValueType());
                        unary1.getOutputParameters().setDimensions(0, 0, 0, 0, -1);
                        setLineNumbers(unary1);
                        setLops(unary1);
                    }
                } else {
                    // general case
                    int k = OptimizerUtils.getConstrainedNumThreads(_maxNumThreads);
                    agg1 = new PartialAggregate(input.constructLops(), HopsAgg2Lops.get(_op), HopsDirection2Lops.get(_direction), getDataType(), getValueType(), et, k);
                }
                setOutputDimensions(agg1);
                setLineNumbers(agg1);
                setLops(agg1);
                if (getDataType() == DataType.SCALAR) {
                    agg1.getOutputParameters().setDimensions(1, 1, getRowsInBlock(), getColsInBlock(), getNnz());
                }
            }
        } else if (et == ExecType.MR) {
            OperationTypes op = HopsAgg2Lops.get(_op);
            DirectionTypes dir = HopsDirection2Lops.get(_direction);
            // unary aggregate operation
            Lop transform1 = null;
            if (isUnaryAggregateOuterRewriteApplicable()) {
                BinaryOp binput = (BinaryOp) getInput().get(0);
                transform1 = new UAggOuterChain(binput.getInput().get(0).constructLops(), binput.getInput().get(1).constructLops(), op, dir, HopsOpOp2LopsB.get(binput.getOp()), DataType.MATRIX, getValueType(), ExecType.MR);
                PartialAggregate.setDimensionsBasedOnDirection(transform1, getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock(), dir);
            } else // default
            {
                transform1 = new PartialAggregate(input.constructLops(), op, dir, DataType.MATRIX, getValueType());
                ((PartialAggregate) transform1).setDimensionsBasedOnDirection(getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock());
            }
            setLineNumbers(transform1);
            // aggregation if required
            Lop aggregate = null;
            Group group1 = null;
            Aggregate agg1 = null;
            if (requiresAggregation(input, _direction) || transform1 instanceof UAggOuterChain) {
                group1 = new Group(transform1, Group.OperationTypes.Sort, DataType.MATRIX, getValueType());
                group1.getOutputParameters().setDimensions(getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock(), getNnz());
                setLineNumbers(group1);
                agg1 = new Aggregate(group1, HopsAgg2Lops.get(_op), DataType.MATRIX, getValueType(), et);
                agg1.getOutputParameters().setDimensions(getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock(), getNnz());
                agg1.setupCorrectionLocation(PartialAggregate.getCorrectionLocation(op, dir));
                setLineNumbers(agg1);
                aggregate = agg1;
            } else {
                ((PartialAggregate) transform1).setDropCorrection();
                aggregate = transform1;
            }
            setLops(aggregate);
            // cast if required
            if (getDataType() == DataType.SCALAR) {
                // Set the dimensions of PartialAggregate LOP based on the
                // direction in which aggregation is performed
                PartialAggregate.setDimensionsBasedOnDirection(transform1, input.getDim1(), input.getDim2(), input.getRowsInBlock(), input.getColsInBlock(), dir);
                if (group1 != null && agg1 != null) {
                    // if aggregation required
                    group1.getOutputParameters().setDimensions(input.getDim1(), input.getDim2(), input.getRowsInBlock(), input.getColsInBlock(), getNnz());
                    agg1.getOutputParameters().setDimensions(1, 1, input.getRowsInBlock(), input.getColsInBlock(), getNnz());
                }
                UnaryCP unary1 = new UnaryCP(aggregate, HopsOpOp1LopsUS.get(OpOp1.CAST_AS_SCALAR), getDataType(), getValueType());
                unary1.getOutputParameters().setDimensions(0, 0, 0, 0, -1);
                setLineNumbers(unary1);
                setLops(unary1);
            }
        } else if (et == ExecType.SPARK) {
            OperationTypes op = HopsAgg2Lops.get(_op);
            DirectionTypes dir = HopsDirection2Lops.get(_direction);
            // unary aggregate
            if (isTernaryAggregateRewriteApplicable()) {
                Lop aggregate = constructLopsTernaryAggregateRewrite(et);
                // 0x0 (scalar)
                setOutputDimensions(aggregate);
                setLineNumbers(aggregate);
                setLops(aggregate);
            } else if (isUnaryAggregateOuterSPRewriteApplicable()) {
                BinaryOp binput = (BinaryOp) getInput().get(0);
                Lop transform1 = new UAggOuterChain(binput.getInput().get(0).constructLops(), binput.getInput().get(1).constructLops(), op, dir, HopsOpOp2LopsB.get(binput.getOp()), DataType.MATRIX, getValueType(), ExecType.SPARK);
                PartialAggregate.setDimensionsBasedOnDirection(transform1, getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock(), dir);
                setLineNumbers(transform1);
                setLops(transform1);
                if (getDataType() == DataType.SCALAR) {
                    UnaryCP unary1 = new UnaryCP(transform1, HopsOpOp1LopsUS.get(OpOp1.CAST_AS_SCALAR), getDataType(), getValueType());
                    unary1.getOutputParameters().setDimensions(0, 0, 0, 0, -1);
                    setLineNumbers(unary1);
                    setLops(unary1);
                }
            } else // default
            {
                boolean needAgg = requiresAggregation(input, _direction);
                SparkAggType aggtype = getSparkUnaryAggregationType(needAgg);
                PartialAggregate aggregate = new PartialAggregate(input.constructLops(), HopsAgg2Lops.get(_op), HopsDirection2Lops.get(_direction), DataType.MATRIX, getValueType(), aggtype, et);
                aggregate.setDimensionsBasedOnDirection(getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock());
                setLineNumbers(aggregate);
                setLops(aggregate);
                if (getDataType() == DataType.SCALAR) {
                    UnaryCP unary1 = new UnaryCP(aggregate, HopsOpOp1LopsUS.get(OpOp1.CAST_AS_SCALAR), getDataType(), getValueType());
                    unary1.getOutputParameters().setDimensions(0, 0, 0, 0, -1);
                    setLineNumbers(unary1);
                    setLops(unary1);
                }
            }
        }
    } catch (Exception e) {
        throw new HopsException(this.printErrorLocation() + "In AggUnary Hop, error constructing Lops ", e);
    }
    // add reblock/checkpoint lops if necessary
    constructAndSetLopsDataFlowProperties();
    // return created lops
    return getLops();
}
Also used : PartialAggregate(org.apache.sysml.lops.PartialAggregate) Group(org.apache.sysml.lops.Group) SparkAggType(org.apache.sysml.hops.AggBinaryOp.SparkAggType) MultiThreadedHop(org.apache.sysml.hops.Hop.MultiThreadedHop) Lop(org.apache.sysml.lops.Lop) UAggOuterChain(org.apache.sysml.lops.UAggOuterChain) UnaryCP(org.apache.sysml.lops.UnaryCP) OperationTypes(org.apache.sysml.lops.Aggregate.OperationTypes) DirectionTypes(org.apache.sysml.lops.PartialAggregate.DirectionTypes) ExecType(org.apache.sysml.lops.LopProperties.ExecType) ConvolutionTransform(org.apache.sysml.lops.ConvolutionTransform) PartialAggregate(org.apache.sysml.lops.PartialAggregate) TernaryAggregate(org.apache.sysml.lops.TernaryAggregate) Aggregate(org.apache.sysml.lops.Aggregate)

Aggregations

Lop (org.apache.sysml.lops.Lop)171 MultiThreadedHop (org.apache.sysml.hops.Hop.MultiThreadedHop)66 ExecType (org.apache.sysml.lops.LopProperties.ExecType)52 Group (org.apache.sysml.lops.Group)45 ArrayList (java.util.ArrayList)35 Aggregate (org.apache.sysml.lops.Aggregate)32 DataPartition (org.apache.sysml.lops.DataPartition)30 LopsException (org.apache.sysml.lops.LopsException)30 Data (org.apache.sysml.lops.Data)24 Instruction (org.apache.sysml.runtime.instructions.Instruction)23 MRJobInstruction (org.apache.sysml.runtime.instructions.MRJobInstruction)18 Unary (org.apache.sysml.lops.Unary)16 Transform (org.apache.sysml.lops.Transform)15 HashMap (java.util.HashMap)14 UnaryCP (org.apache.sysml.lops.UnaryCP)14 Dag (org.apache.sysml.lops.compile.Dag)13 Hop (org.apache.sysml.hops.Hop)11 RepMat (org.apache.sysml.lops.RepMat)11 Binary (org.apache.sysml.lops.Binary)9 CPInstruction (org.apache.sysml.runtime.instructions.cp.CPInstruction)9