Search in sources :

Example 1 with UnaryCP

use of org.apache.sysml.lops.UnaryCP in project incubator-systemml by apache.

the class QuaternaryOp method constructMRLopsWeightedCeMM.

private void constructMRLopsWeightedCeMM(WCeMMType wtype) throws HopsException, LopsException {
    //NOTE: the common case for wcemm are factors U/V with a rank of 10s to 100s; the current runtime only
    //supports single block outer products (U/V rank <= blocksize, i.e., 1000 by default); we enforce this
    //by applying the hop rewrite for Weighted Cross Entropy only if this constraint holds. 
    Hop X = getInput().get(0);
    Hop U = getInput().get(1);
    Hop V = getInput().get(2);
    Hop eps = getInput().get(3);
    //MR operator selection, part1
    //size U
    double m1Size = OptimizerUtils.estimateSize(U.getDim1(), U.getDim2());
    //size V
    double m2Size = OptimizerUtils.estimateSize(V.getDim1(), V.getDim2());
    boolean isMapWcemm = (m1Size + m2Size < OptimizerUtils.getRemoteMemBudgetMap(true));
    if (//broadcast
    !FORCE_REPLICATION && isMapWcemm) {
        //partitioning of U
        boolean needPartU = !U.dimsKnown() || U.getDim1() * U.getDim2() > DistributedCacheInput.PARTITION_SIZE;
        Lop lU = U.constructLops();
        if (needPartU) {
            //requires partitioning
            lU = new DataPartition(lU, DataType.MATRIX, ValueType.DOUBLE, (m1Size > OptimizerUtils.getLocalMemBudget()) ? ExecType.MR : ExecType.CP, PDataPartitionFormat.ROW_BLOCK_WISE_N);
            lU.getOutputParameters().setDimensions(U.getDim1(), U.getDim2(), getRowsInBlock(), getColsInBlock(), U.getNnz());
            setLineNumbers(lU);
        }
        //partitioning of V
        boolean needPartV = !V.dimsKnown() || V.getDim1() * V.getDim2() > DistributedCacheInput.PARTITION_SIZE;
        Lop lV = V.constructLops();
        if (needPartV) {
            //requires partitioning
            lV = new DataPartition(lV, DataType.MATRIX, ValueType.DOUBLE, (m2Size > OptimizerUtils.getLocalMemBudget()) ? ExecType.MR : ExecType.CP, PDataPartitionFormat.ROW_BLOCK_WISE_N);
            lV.getOutputParameters().setDimensions(V.getDim1(), V.getDim2(), getRowsInBlock(), getColsInBlock(), V.getNnz());
            setLineNumbers(lV);
        }
        //map-side wcemm always with broadcast
        Lop wcemm = new WeightedCrossEntropy(X.constructLops(), lU, lV, eps.constructLops(), DataType.MATRIX, ValueType.DOUBLE, wtype, ExecType.MR);
        wcemm.getOutputParameters().setDimensions(1, 1, X.getRowsInBlock(), X.getColsInBlock(), -1);
        setLineNumbers(wcemm);
        Group grp = new Group(wcemm, Group.OperationTypes.Sort, DataType.MATRIX, ValueType.DOUBLE);
        grp.getOutputParameters().setDimensions(1, 1, X.getRowsInBlock(), X.getColsInBlock(), -1);
        setLineNumbers(grp);
        Aggregate agg1 = new Aggregate(grp, HopsAgg2Lops.get(AggOp.SUM), DataType.MATRIX, ValueType.DOUBLE, ExecType.MR);
        // aggregation uses kahanSum 
        agg1.setupCorrectionLocation(CorrectionLocationType.NONE);
        agg1.getOutputParameters().setDimensions(1, 1, X.getRowsInBlock(), X.getColsInBlock(), -1);
        setLineNumbers(agg1);
        UnaryCP unary1 = new UnaryCP(agg1, HopsOpOp1LopsUS.get(OpOp1.CAST_AS_SCALAR), getDataType(), getValueType());
        unary1.getOutputParameters().setDimensions(0, 0, 0, 0, -1);
        setLineNumbers(unary1);
        setLops(unary1);
    } else //general case
    {
        //MR operator selection part 2
        boolean cacheU = !FORCE_REPLICATION && (m1Size < OptimizerUtils.getRemoteMemBudgetReduce());
        boolean cacheV = !FORCE_REPLICATION && ((!cacheU && m2Size < OptimizerUtils.getRemoteMemBudgetReduce()) || (cacheU && m1Size + m2Size < OptimizerUtils.getRemoteMemBudgetReduce()));
        Group grpX = new Group(X.constructLops(), Group.OperationTypes.Sort, DataType.MATRIX, ValueType.DOUBLE);
        grpX.getOutputParameters().setDimensions(X.getDim1(), X.getDim2(), X.getRowsInBlock(), X.getColsInBlock(), -1);
        setLineNumbers(grpX);
        Lop lU = null;
        if (cacheU) {
            //partitioning of U for read through distributed cache
            boolean needPartU = !U.dimsKnown() || U.getDim1() * U.getDim2() > DistributedCacheInput.PARTITION_SIZE;
            lU = U.constructLops();
            if (needPartU) {
                //requires partitioning
                lU = new DataPartition(lU, DataType.MATRIX, ValueType.DOUBLE, (m1Size > OptimizerUtils.getLocalMemBudget()) ? ExecType.MR : ExecType.CP, PDataPartitionFormat.ROW_BLOCK_WISE_N);
                lU.getOutputParameters().setDimensions(U.getDim1(), U.getDim2(), getRowsInBlock(), getColsInBlock(), U.getNnz());
                setLineNumbers(lU);
            }
        } else {
            //replication of U for shuffle to target block
            //ncol of t(V) -> nrow of V determines num replicates
            Lop offset = createOffsetLop(V, false);
            lU = new RepMat(U.constructLops(), offset, true, V.getDataType(), V.getValueType());
            lU.getOutputParameters().setDimensions(U.getDim1(), U.getDim2(), U.getRowsInBlock(), U.getColsInBlock(), U.getNnz());
            setLineNumbers(lU);
            Group grpU = new Group(lU, Group.OperationTypes.Sort, DataType.MATRIX, ValueType.DOUBLE);
            grpU.getOutputParameters().setDimensions(U.getDim1(), U.getDim2(), U.getRowsInBlock(), U.getColsInBlock(), -1);
            setLineNumbers(grpU);
            lU = grpU;
        }
        Lop lV = null;
        if (cacheV) {
            //partitioning of V for read through distributed cache
            boolean needPartV = !V.dimsKnown() || V.getDim1() * V.getDim2() > DistributedCacheInput.PARTITION_SIZE;
            lV = V.constructLops();
            if (needPartV) {
                //requires partitioning
                lV = new DataPartition(lV, DataType.MATRIX, ValueType.DOUBLE, (m2Size > OptimizerUtils.getLocalMemBudget()) ? ExecType.MR : ExecType.CP, PDataPartitionFormat.ROW_BLOCK_WISE_N);
                lV.getOutputParameters().setDimensions(V.getDim1(), V.getDim2(), getRowsInBlock(), getColsInBlock(), V.getNnz());
                setLineNumbers(lV);
            }
        } else {
            //replication of t(V) for shuffle to target block
            Transform ltV = new Transform(V.constructLops(), HopsTransf2Lops.get(ReOrgOp.TRANSPOSE), getDataType(), getValueType(), ExecType.MR);
            ltV.getOutputParameters().setDimensions(V.getDim2(), V.getDim1(), V.getColsInBlock(), V.getRowsInBlock(), V.getNnz());
            setLineNumbers(ltV);
            //nrow of U determines num replicates
            Lop offset = createOffsetLop(U, false);
            lV = new RepMat(ltV, offset, false, V.getDataType(), V.getValueType());
            lV.getOutputParameters().setDimensions(V.getDim2(), V.getDim1(), V.getColsInBlock(), V.getRowsInBlock(), V.getNnz());
            setLineNumbers(lV);
            Group grpV = new Group(lV, Group.OperationTypes.Sort, DataType.MATRIX, ValueType.DOUBLE);
            grpV.getOutputParameters().setDimensions(V.getDim2(), V.getDim1(), V.getColsInBlock(), V.getRowsInBlock(), -1);
            setLineNumbers(grpV);
            lV = grpV;
        }
        //reduce-side wcemm w/ or without broadcast
        Lop wcemm = new WeightedCrossEntropyR(grpX, lU, lV, eps.constructLops(), DataType.MATRIX, ValueType.DOUBLE, wtype, cacheU, cacheV, ExecType.MR);
        wcemm.getOutputParameters().setDimensions(1, 1, X.getRowsInBlock(), X.getColsInBlock(), -1);
        setLineNumbers(wcemm);
        Group grp = new Group(wcemm, Group.OperationTypes.Sort, DataType.MATRIX, ValueType.DOUBLE);
        grp.getOutputParameters().setDimensions(1, 1, X.getRowsInBlock(), X.getColsInBlock(), -1);
        setLineNumbers(grp);
        Aggregate agg1 = new Aggregate(grp, HopsAgg2Lops.get(AggOp.SUM), DataType.MATRIX, ValueType.DOUBLE, ExecType.MR);
        // aggregation uses kahanSum 
        agg1.setupCorrectionLocation(CorrectionLocationType.NONE);
        agg1.getOutputParameters().setDimensions(1, 1, X.getRowsInBlock(), X.getColsInBlock(), -1);
        setLineNumbers(agg1);
        UnaryCP unary1 = new UnaryCP(agg1, HopsOpOp1LopsUS.get(OpOp1.CAST_AS_SCALAR), getDataType(), getValueType());
        unary1.getOutputParameters().setDimensions(0, 0, 0, 0, -1);
        setLineNumbers(unary1);
        setLops(unary1);
    }
}
Also used : Group(org.apache.sysml.lops.Group) RepMat(org.apache.sysml.lops.RepMat) WeightedCrossEntropyR(org.apache.sysml.lops.WeightedCrossEntropyR) MultiThreadedHop(org.apache.sysml.hops.Hop.MultiThreadedHop) WeightedCrossEntropy(org.apache.sysml.lops.WeightedCrossEntropy) Lop(org.apache.sysml.lops.Lop) Aggregate(org.apache.sysml.lops.Aggregate) Transform(org.apache.sysml.lops.Transform) DataPartition(org.apache.sysml.lops.DataPartition) UnaryCP(org.apache.sysml.lops.UnaryCP)

Example 2 with UnaryCP

use of org.apache.sysml.lops.UnaryCP in project incubator-systemml by apache.

the class UnaryOp method constructLops.

@Override
public Lop constructLops() throws HopsException, LopsException {
    //reuse existing lop
    if (getLops() != null)
        return getLops();
    try {
        Hop input = getInput().get(0);
        if (//value type casts or matrix to scalar
        getDataType() == DataType.SCALAR || (_op == OpOp1.CAST_AS_MATRIX && getInput().get(0).getDataType() == DataType.SCALAR) || (_op == OpOp1.CAST_AS_FRAME && getInput().get(0).getDataType() == DataType.SCALAR)) {
            if (//special handling IQM
            _op == Hop.OpOp1.IQM) {
                Lop iqmLop = constructLopsIQM();
                setLops(iqmLop);
            } else if (_op == Hop.OpOp1.MEDIAN) {
                Lop medianLop = constructLopsMedian();
                setLops(medianLop);
            } else //general case SCALAR/CAST (always in CP)
            {
                UnaryCP.OperationTypes optype = HopsOpOp1LopsUS.get(_op);
                if (optype == null)
                    throw new HopsException("Unknown UnaryCP lop type for UnaryOp operation type '" + _op + "'");
                UnaryCP unary1 = null;
                if ((_op == Hop.OpOp1.NROW || _op == Hop.OpOp1.NCOL || _op == Hop.OpOp1.LENGTH) && input instanceof UnaryOp && ((UnaryOp) input).getOp() == OpOp1.SELP) {
                    // Dimensions does not change during sel+ operation.
                    // This case is helpful to avoid unnecessary sel+ operation for fused maxpooling.
                    unary1 = new UnaryCP(input.getInput().get(0).constructLops(), optype, getDataType(), getValueType());
                } else
                    unary1 = new UnaryCP(input.constructLops(), optype, getDataType(), getValueType());
                setOutputDimensions(unary1);
                setLineNumbers(unary1);
                setLops(unary1);
            }
        } else //general case MATRIX
        {
            ExecType et = optFindExecType();
            //special handling cumsum/cumprod/cummin/cumsum
            if (isCumulativeUnaryOperation() && et != ExecType.CP) {
                //TODO additional physical operation if offsets fit in memory
                Lop cumsumLop = null;
                if (et == ExecType.MR)
                    cumsumLop = constructLopsMRCumulativeUnary();
                else
                    cumsumLop = constructLopsSparkCumulativeUnary();
                setLops(cumsumLop);
            } else //default unary 
            {
                int k = isCumulativeUnaryOperation() ? OptimizerUtils.getConstrainedNumThreads(_maxNumThreads) : 1;
                switch(_op) {
                    case SELP:
                    case EXP:
                    case SQRT:
                    case LOG:
                    case ABS:
                    case ROUND:
                    case FLOOR:
                    case CEIL:
                    case SIN:
                    case COS:
                    case TAN:
                    case ASIN:
                    case ACOS:
                    case ATAN:
                    case SIGN:
                        et = findGPUExecTypeByMemEstimate(et);
                        break;
                    default:
                }
                Unary unary1 = new Unary(input.constructLops(), HopsOpOp1LopsU.get(_op), getDataType(), getValueType(), et, k);
                setOutputDimensions(unary1);
                setLineNumbers(unary1);
                setLops(unary1);
            }
        }
    } catch (Exception e) {
        throw new HopsException(this.printErrorLocation() + "error constructing Lops for UnaryOp Hop -- \n ", e);
    }
    //add reblock/checkpoint lops if necessary
    constructAndSetLopsDataFlowProperties();
    return getLops();
}
Also used : OperationTypes(org.apache.sysml.lops.Aggregate.OperationTypes) MultiThreadedHop(org.apache.sysml.hops.Hop.MultiThreadedHop) ExecType(org.apache.sysml.lops.LopProperties.ExecType) Lop(org.apache.sysml.lops.Lop) CombineUnary(org.apache.sysml.lops.CombineUnary) Unary(org.apache.sysml.lops.Unary) LopsException(org.apache.sysml.lops.LopsException) UnaryCP(org.apache.sysml.lops.UnaryCP)

Example 3 with UnaryCP

use of org.apache.sysml.lops.UnaryCP in project incubator-systemml by apache.

the class BinaryOp method constructLopsCentralMoment.

private void constructLopsCentralMoment(ExecType et) throws HopsException, LopsException {
    // The output data type is a SCALAR if central moment 
    // gets computed in CP/SPARK, and it will be MATRIX otherwise.
    DataType dt = (et == ExecType.MR ? DataType.MATRIX : DataType.SCALAR);
    CentralMoment cm = new CentralMoment(getInput().get(0).constructLops(), getInput().get(1).constructLops(), dt, getValueType(), et);
    setLineNumbers(cm);
    if (et == ExecType.MR) {
        cm.getOutputParameters().setDimensions(1, 1, 0, 0, -1);
        UnaryCP unary1 = new UnaryCP(cm, HopsOpOp1LopsUS.get(OpOp1.CAST_AS_SCALAR), getDataType(), getValueType());
        unary1.getOutputParameters().setDimensions(0, 0, 0, 0, -1);
        setLineNumbers(unary1);
        setLops(unary1);
    } else {
        cm.getOutputParameters().setDimensions(0, 0, 0, 0, -1);
        setLops(cm);
    }
}
Also used : CentralMoment(org.apache.sysml.lops.CentralMoment) DataType(org.apache.sysml.parser.Expression.DataType) UnaryCP(org.apache.sysml.lops.UnaryCP)

Example 4 with UnaryCP

use of org.apache.sysml.lops.UnaryCP in project incubator-systemml by apache.

the class AggUnaryOp method constructLops.

@Override
public Lop constructLops() throws HopsException, LopsException {
    //return already created lops
    if (getLops() != null)
        return getLops();
    try {
        ExecType et = optFindExecType();
        Hop input = getInput().get(0);
        if (et == ExecType.CP) {
            Lop agg1 = null;
            if (isTernaryAggregateRewriteApplicable(et)) {
                agg1 = constructLopsTernaryAggregateRewrite(et);
            } else if (isUnaryAggregateOuterCPRewriteApplicable()) {
                OperationTypes op = HopsAgg2Lops.get(_op);
                DirectionTypes dir = HopsDirection2Lops.get(_direction);
                BinaryOp binput = (BinaryOp) getInput().get(0);
                agg1 = new UAggOuterChain(binput.getInput().get(0).constructLops(), binput.getInput().get(1).constructLops(), op, dir, HopsOpOp2LopsB.get(binput.getOp()), DataType.MATRIX, getValueType(), ExecType.CP);
                PartialAggregate.setDimensionsBasedOnDirection(agg1, getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock(), dir);
                if (getDataType() == DataType.SCALAR) {
                    UnaryCP unary1 = new UnaryCP(agg1, HopsOpOp1LopsUS.get(OpOp1.CAST_AS_SCALAR), getDataType(), getValueType());
                    unary1.getOutputParameters().setDimensions(0, 0, 0, 0, -1);
                    setLineNumbers(unary1);
                    setLops(unary1);
                }
            } else {
                //general case		
                int k = OptimizerUtils.getConstrainedNumThreads(_maxNumThreads);
                if (DMLScript.USE_ACCELERATOR && (DMLScript.FORCE_ACCELERATOR || getMemEstimate() < OptimizerUtils.GPU_MEMORY_BUDGET)) {
                    // Only implemented methods for GPU
                    if ((_op == AggOp.SUM && (_direction == Direction.RowCol || _direction == Direction.Row || _direction == Direction.Col)) || (_op == AggOp.SUM_SQ && (_direction == Direction.RowCol || _direction == Direction.Row || _direction == Direction.Col)) || (_op == AggOp.MAX && (_direction == Direction.RowCol || _direction == Direction.Row || _direction == Direction.Col)) || (_op == AggOp.MIN && (_direction == Direction.RowCol || _direction == Direction.Row || _direction == Direction.Col)) || (_op == AggOp.MEAN && (_direction == Direction.RowCol || _direction == Direction.Row || _direction == Direction.Col)) || (_op == AggOp.VAR && (_direction == Direction.RowCol || _direction == Direction.Row || _direction == Direction.Col)) || (_op == AggOp.PROD && (_direction == Direction.RowCol))) {
                        et = ExecType.GPU;
                        k = 1;
                    }
                }
                agg1 = new PartialAggregate(input.constructLops(), HopsAgg2Lops.get(_op), HopsDirection2Lops.get(_direction), getDataType(), getValueType(), et, k);
            }
            setOutputDimensions(agg1);
            setLineNumbers(agg1);
            setLops(agg1);
            if (getDataType() == DataType.SCALAR) {
                agg1.getOutputParameters().setDimensions(1, 1, getRowsInBlock(), getColsInBlock(), getNnz());
            }
        } else if (et == ExecType.MR) {
            OperationTypes op = HopsAgg2Lops.get(_op);
            DirectionTypes dir = HopsDirection2Lops.get(_direction);
            //unary aggregate operation
            Lop transform1 = null;
            if (isUnaryAggregateOuterRewriteApplicable()) {
                BinaryOp binput = (BinaryOp) getInput().get(0);
                transform1 = new UAggOuterChain(binput.getInput().get(0).constructLops(), binput.getInput().get(1).constructLops(), op, dir, HopsOpOp2LopsB.get(binput.getOp()), DataType.MATRIX, getValueType(), ExecType.MR);
                PartialAggregate.setDimensionsBasedOnDirection(transform1, getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock(), dir);
            } else //default
            {
                transform1 = new PartialAggregate(input.constructLops(), op, dir, DataType.MATRIX, getValueType());
                ((PartialAggregate) transform1).setDimensionsBasedOnDirection(getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock());
            }
            setLineNumbers(transform1);
            //aggregation if required
            Lop aggregate = null;
            Group group1 = null;
            Aggregate agg1 = null;
            if (requiresAggregation(input, _direction) || transform1 instanceof UAggOuterChain) {
                group1 = new Group(transform1, Group.OperationTypes.Sort, DataType.MATRIX, getValueType());
                group1.getOutputParameters().setDimensions(getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock(), getNnz());
                setLineNumbers(group1);
                agg1 = new Aggregate(group1, HopsAgg2Lops.get(_op), DataType.MATRIX, getValueType(), et);
                agg1.getOutputParameters().setDimensions(getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock(), getNnz());
                agg1.setupCorrectionLocation(PartialAggregate.getCorrectionLocation(op, dir));
                setLineNumbers(agg1);
                aggregate = agg1;
            } else {
                ((PartialAggregate) transform1).setDropCorrection();
                aggregate = transform1;
            }
            setLops(aggregate);
            //cast if required
            if (getDataType() == DataType.SCALAR) {
                // Set the dimensions of PartialAggregate LOP based on the
                // direction in which aggregation is performed
                PartialAggregate.setDimensionsBasedOnDirection(transform1, input.getDim1(), input.getDim2(), input.getRowsInBlock(), input.getColsInBlock(), dir);
                if (group1 != null && agg1 != null) {
                    //if aggregation required
                    group1.getOutputParameters().setDimensions(input.getDim1(), input.getDim2(), input.getRowsInBlock(), input.getColsInBlock(), getNnz());
                    agg1.getOutputParameters().setDimensions(1, 1, input.getRowsInBlock(), input.getColsInBlock(), getNnz());
                }
                UnaryCP unary1 = new UnaryCP(aggregate, HopsOpOp1LopsUS.get(OpOp1.CAST_AS_SCALAR), getDataType(), getValueType());
                unary1.getOutputParameters().setDimensions(0, 0, 0, 0, -1);
                setLineNumbers(unary1);
                setLops(unary1);
            }
        } else if (et == ExecType.SPARK) {
            OperationTypes op = HopsAgg2Lops.get(_op);
            DirectionTypes dir = HopsDirection2Lops.get(_direction);
            //unary aggregate
            if (isTernaryAggregateRewriteApplicable(et)) {
                Lop aggregate = constructLopsTernaryAggregateRewrite(et);
                //0x0 (scalar)
                setOutputDimensions(aggregate);
                setLineNumbers(aggregate);
                setLops(aggregate);
            } else if (isUnaryAggregateOuterSPRewriteApplicable()) {
                BinaryOp binput = (BinaryOp) getInput().get(0);
                Lop transform1 = new UAggOuterChain(binput.getInput().get(0).constructLops(), binput.getInput().get(1).constructLops(), op, dir, HopsOpOp2LopsB.get(binput.getOp()), DataType.MATRIX, getValueType(), ExecType.SPARK);
                PartialAggregate.setDimensionsBasedOnDirection(transform1, getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock(), dir);
                setLineNumbers(transform1);
                setLops(transform1);
                if (getDataType() == DataType.SCALAR) {
                    UnaryCP unary1 = new UnaryCP(transform1, HopsOpOp1LopsUS.get(OpOp1.CAST_AS_SCALAR), getDataType(), getValueType());
                    unary1.getOutputParameters().setDimensions(0, 0, 0, 0, -1);
                    setLineNumbers(unary1);
                    setLops(unary1);
                }
            } else //default
            {
                boolean needAgg = requiresAggregation(input, _direction);
                SparkAggType aggtype = getSparkUnaryAggregationType(needAgg);
                PartialAggregate aggregate = new PartialAggregate(input.constructLops(), HopsAgg2Lops.get(_op), HopsDirection2Lops.get(_direction), DataType.MATRIX, getValueType(), aggtype, et);
                aggregate.setDimensionsBasedOnDirection(getDim1(), getDim2(), input.getRowsInBlock(), input.getColsInBlock());
                setLineNumbers(aggregate);
                setLops(aggregate);
                if (getDataType() == DataType.SCALAR) {
                    UnaryCP unary1 = new UnaryCP(aggregate, HopsOpOp1LopsUS.get(OpOp1.CAST_AS_SCALAR), getDataType(), getValueType());
                    unary1.getOutputParameters().setDimensions(0, 0, 0, 0, -1);
                    setLineNumbers(unary1);
                    setLops(unary1);
                }
            }
        }
    } catch (Exception e) {
        throw new HopsException(this.printErrorLocation() + "In AggUnary Hop, error constructing Lops ", e);
    }
    //add reblock/checkpoint lops if necessary
    constructAndSetLopsDataFlowProperties();
    //return created lops
    return getLops();
}
Also used : PartialAggregate(org.apache.sysml.lops.PartialAggregate) Group(org.apache.sysml.lops.Group) SparkAggType(org.apache.sysml.hops.AggBinaryOp.SparkAggType) MultiThreadedHop(org.apache.sysml.hops.Hop.MultiThreadedHop) Lop(org.apache.sysml.lops.Lop) LopsException(org.apache.sysml.lops.LopsException) UAggOuterChain(org.apache.sysml.lops.UAggOuterChain) UnaryCP(org.apache.sysml.lops.UnaryCP) OperationTypes(org.apache.sysml.lops.Aggregate.OperationTypes) DirectionTypes(org.apache.sysml.lops.PartialAggregate.DirectionTypes) ExecType(org.apache.sysml.lops.LopProperties.ExecType) PartialAggregate(org.apache.sysml.lops.PartialAggregate) TernaryAggregate(org.apache.sysml.lops.TernaryAggregate) Aggregate(org.apache.sysml.lops.Aggregate)

Example 5 with UnaryCP

use of org.apache.sysml.lops.UnaryCP in project incubator-systemml by apache.

the class TernaryOp method constructLopsCentralMoment.

/**
	 * Method to construct LOPs when op = CENTRAILMOMENT.
	 * 
	 * @throws HopsException if HopsException occurs
	 * @throws LopsException if LopsException occurs
	 */
private void constructLopsCentralMoment() throws HopsException, LopsException {
    if (_op != OpOp3.CENTRALMOMENT)
        throw new HopsException("Unexpected operation: " + _op + ", expecting " + OpOp3.CENTRALMOMENT);
    ExecType et = optFindExecType();
    if (et == ExecType.MR) {
        CombineBinary combine = CombineBinary.constructCombineLop(OperationTypes.PreCentralMoment, getInput().get(0).constructLops(), getInput().get(1).constructLops(), DataType.MATRIX, getValueType());
        combine.getOutputParameters().setDimensions(getInput().get(0).getDim1(), getInput().get(0).getDim2(), getInput().get(0).getRowsInBlock(), getInput().get(0).getColsInBlock(), getInput().get(0).getNnz());
        CentralMoment cm = new CentralMoment(combine, getInput().get(2).constructLops(), DataType.MATRIX, getValueType(), et);
        cm.getOutputParameters().setDimensions(1, 1, 0, 0, -1);
        setLineNumbers(cm);
        UnaryCP unary1 = new UnaryCP(cm, HopsOpOp1LopsUS.get(OpOp1.CAST_AS_SCALAR), getDataType(), getValueType());
        unary1.getOutputParameters().setDimensions(0, 0, 0, 0, -1);
        setLineNumbers(unary1);
        setLops(unary1);
    } else //CP / SPARK
    {
        CentralMoment cm = new CentralMoment(getInput().get(0).constructLops(), getInput().get(1).constructLops(), getInput().get(2).constructLops(), getDataType(), getValueType(), et);
        cm.getOutputParameters().setDimensions(0, 0, 0, 0, -1);
        setLineNumbers(cm);
        setLops(cm);
    }
}
Also used : CombineBinary(org.apache.sysml.lops.CombineBinary) CentralMoment(org.apache.sysml.lops.CentralMoment) ExecType(org.apache.sysml.lops.LopProperties.ExecType) UnaryCP(org.apache.sysml.lops.UnaryCP)

Aggregations

UnaryCP (org.apache.sysml.lops.UnaryCP)12 Group (org.apache.sysml.lops.Group)6 Lop (org.apache.sysml.lops.Lop)6 ExecType (org.apache.sysml.lops.LopProperties.ExecType)6 MultiThreadedHop (org.apache.sysml.hops.Hop.MultiThreadedHop)5 Aggregate (org.apache.sysml.lops.Aggregate)5 CombineBinary (org.apache.sysml.lops.CombineBinary)3 CombineUnary (org.apache.sysml.lops.CombineUnary)3 LopsException (org.apache.sysml.lops.LopsException)3 PartialAggregate (org.apache.sysml.lops.PartialAggregate)3 Unary (org.apache.sysml.lops.Unary)3 OperationTypes (org.apache.sysml.lops.Aggregate.OperationTypes)2 CentralMoment (org.apache.sysml.lops.CentralMoment)2 CoVariance (org.apache.sysml.lops.CoVariance)2 Data (org.apache.sysml.lops.Data)2 DataPartition (org.apache.sysml.lops.DataPartition)2 PickByCount (org.apache.sysml.lops.PickByCount)2 RepMat (org.apache.sysml.lops.RepMat)2 SortKeys (org.apache.sysml.lops.SortKeys)2 Transform (org.apache.sysml.lops.Transform)2