Search in sources :

Example 16 with ScalarObject

use of org.apache.sysml.runtime.instructions.cp.ScalarObject in project incubator-systemml by apache.

the class Recompiler method extractDAGOutputStatistics.

public static void extractDAGOutputStatistics(Hop hop, LocalVariableMap vars, boolean overwrite) {
    if (// for all writes to symbol table
    hop instanceof DataOp && ((DataOp) hop).getDataOpType() == DataOpTypes.TRANSIENTWRITE) {
        String varName = hop.getName();
        if (// not existing so far
        !vars.keySet().contains(varName) || overwrite) {
            // extract matrix sizes for size propagation
            if (hop.getDataType() == DataType.MATRIX) {
                MatrixObject mo = new MatrixObject(ValueType.DOUBLE, null);
                MatrixCharacteristics mc = new MatrixCharacteristics(hop.getDim1(), hop.getDim2(), ConfigurationManager.getBlocksize(), ConfigurationManager.getBlocksize(), hop.getNnz());
                MetaDataFormat meta = new MetaDataFormat(mc, null, null);
                mo.setMetaData(meta);
                vars.put(varName, mo);
            } else // extract scalar constants for second constant propagation
            if (hop.getDataType() == DataType.SCALAR) {
                // extract literal assignments
                if (hop.getInput().size() == 1 && hop.getInput().get(0) instanceof LiteralOp) {
                    ScalarObject constant = HopRewriteUtils.getScalarObject((LiteralOp) hop.getInput().get(0));
                    if (constant != null)
                        vars.put(varName, constant);
                } else // extract constant variable assignments
                if (hop.getInput().size() == 1 && hop.getInput().get(0) instanceof DataOp) {
                    DataOp dop = (DataOp) hop.getInput().get(0);
                    String dopvarname = dop.getName();
                    if (dop.isRead() && vars.keySet().contains(dopvarname)) {
                        ScalarObject constant = (ScalarObject) vars.get(dopvarname);
                        // no clone because constant
                        vars.put(varName, constant);
                    }
                } else // extract ncol/nrow variable assignments
                if (hop.getInput().size() == 1 && hop.getInput().get(0) instanceof UnaryOp && (((UnaryOp) hop.getInput().get(0)).getOp() == OpOp1.NROW || ((UnaryOp) hop.getInput().get(0)).getOp() == OpOp1.NCOL)) {
                    UnaryOp uop = (UnaryOp) hop.getInput().get(0);
                    if (uop.getOp() == OpOp1.NROW && uop.getInput().get(0).getDim1() > 0)
                        vars.put(varName, new IntObject(uop.getInput().get(0).getDim1()));
                    else if (uop.getOp() == OpOp1.NCOL && uop.getInput().get(0).getDim2() > 0)
                        vars.put(varName, new IntObject(uop.getInput().get(0).getDim2()));
                } else // remove other updated scalars
                {
                    // we need to remove other updated scalars in order to ensure result
                    // correctness of recompilation w/o being too conservative
                    vars.remove(varName);
                }
            }
        } else // already existing: take largest
        {
            Data dat = vars.get(varName);
            if (dat instanceof MatrixObject) {
                MatrixObject mo = (MatrixObject) dat;
                MatrixCharacteristics mc = mo.getMatrixCharacteristics();
                if (OptimizerUtils.estimateSizeExactSparsity(mc.getRows(), mc.getCols(), (mc.getNonZeros() >= 0) ? ((double) mc.getNonZeros()) / mc.getRows() / mc.getCols() : 1.0) < OptimizerUtils.estimateSize(hop.getDim1(), hop.getDim2())) {
                    // update statistics if necessary
                    mc.setDimension(hop.getDim1(), hop.getDim2());
                    mc.setNonZeros(hop.getNnz());
                }
            } else // scalar (just overwrite)
            {
                if (hop.getInput().size() == 1 && hop.getInput().get(0) instanceof LiteralOp) {
                    ScalarObject constant = HopRewriteUtils.getScalarObject((LiteralOp) hop.getInput().get(0));
                    if (constant != null)
                        vars.put(varName, constant);
                }
            }
        }
    }
}
Also used : MetaDataFormat(org.apache.sysml.runtime.matrix.MetaDataFormat) ScalarObject(org.apache.sysml.runtime.instructions.cp.ScalarObject) MatrixObject(org.apache.sysml.runtime.controlprogram.caching.MatrixObject) UnaryOp(org.apache.sysml.hops.UnaryOp) IntObject(org.apache.sysml.runtime.instructions.cp.IntObject) CacheableData(org.apache.sysml.runtime.controlprogram.caching.CacheableData) Data(org.apache.sysml.runtime.instructions.cp.Data) LiteralOp(org.apache.sysml.hops.LiteralOp) DataOp(org.apache.sysml.hops.DataOp) MatrixCharacteristics(org.apache.sysml.runtime.matrix.MatrixCharacteristics)

Example 17 with ScalarObject

use of org.apache.sysml.runtime.instructions.cp.ScalarObject in project incubator-systemml by apache.

the class ScalarMatrixArithmeticGPUInstruction method processInstruction.

@Override
public void processInstruction(ExecutionContext ec) {
    GPUStatistics.incrementNoOfExecutedGPUInst();
    CPOperand mat = (_input1.getDataType() == DataType.MATRIX) ? _input1 : _input2;
    CPOperand scalar = (_input1.getDataType() == DataType.MATRIX) ? _input2 : _input1;
    MatrixObject in1 = getMatrixInputForGPUInstruction(ec, mat.getName());
    ScalarObject constant = (ScalarObject) ec.getScalarInput(scalar.getName(), scalar.getValueType(), scalar.isLiteral());
    boolean isTransposed = false;
    int rlen = isTransposed ? (int) in1.getNumColumns() : (int) in1.getNumRows();
    int clen = isTransposed ? (int) in1.getNumRows() : (int) in1.getNumColumns();
    ec.setMetaData(_output.getName(), rlen, clen);
    ScalarOperator sc_op = (ScalarOperator) _optr;
    sc_op = sc_op.setConstant(constant.getDoubleValue());
    LibMatrixCUDA.matrixScalarArithmetic(ec, ec.getGPUContext(0), getExtendedOpcode(), in1, _output.getName(), isTransposed, sc_op);
    ec.releaseMatrixInputForGPUInstruction(mat.getName());
    ec.releaseMatrixOutputForGPUInstruction(_output.getName());
}
Also used : ScalarOperator(org.apache.sysml.runtime.matrix.operators.ScalarOperator) ScalarObject(org.apache.sysml.runtime.instructions.cp.ScalarObject) MatrixObject(org.apache.sysml.runtime.controlprogram.caching.MatrixObject) CPOperand(org.apache.sysml.runtime.instructions.cp.CPOperand)

Example 18 with ScalarObject

use of org.apache.sysml.runtime.instructions.cp.ScalarObject in project incubator-systemml by apache.

the class ScalarMatrixRelationalBinaryGPUInstruction method processInstruction.

@Override
public void processInstruction(ExecutionContext ec) {
    GPUStatistics.incrementNoOfExecutedGPUInst();
    CPOperand mat = (_input1.getDataType() == Expression.DataType.MATRIX) ? _input1 : _input2;
    CPOperand scalar = (_input1.getDataType() == Expression.DataType.MATRIX) ? _input2 : _input1;
    MatrixObject in1 = getMatrixInputForGPUInstruction(ec, mat.getName());
    ScalarObject constant = (ScalarObject) ec.getScalarInput(scalar.getName(), scalar.getValueType(), scalar.isLiteral());
    int rlen = (int) in1.getNumRows();
    int clen = (int) in1.getNumColumns();
    ec.setMetaData(_output.getName(), rlen, clen);
    ScalarOperator sc_op = (ScalarOperator) _optr;
    sc_op = sc_op.setConstant(constant.getDoubleValue());
    LibMatrixCUDA.matrixScalarRelational(ec, ec.getGPUContext(0), getExtendedOpcode(), in1, _output.getName(), sc_op);
    ec.releaseMatrixInputForGPUInstruction(mat.getName());
    ec.releaseMatrixOutputForGPUInstruction(_output.getName());
}
Also used : ScalarOperator(org.apache.sysml.runtime.matrix.operators.ScalarOperator) ScalarObject(org.apache.sysml.runtime.instructions.cp.ScalarObject) MatrixObject(org.apache.sysml.runtime.controlprogram.caching.MatrixObject) CPOperand(org.apache.sysml.runtime.instructions.cp.CPOperand)

Example 19 with ScalarObject

use of org.apache.sysml.runtime.instructions.cp.ScalarObject in project incubator-systemml by apache.

the class QuantilePickSPInstruction method processInstruction.

@Override
public void processInstruction(ExecutionContext ec) {
    SparkExecutionContext sec = (SparkExecutionContext) ec;
    // get input rdds
    JavaPairRDD<MatrixIndexes, MatrixBlock> in = sec.getBinaryBlockRDDHandleForVariable(input1.getName());
    MatrixCharacteristics mc = sec.getMatrixCharacteristics(input1.getName());
    // (in contrast to cp instructions, w/o weights does not materializes weights of 1)
    switch(_type) {
        case VALUEPICK:
            {
                ScalarObject quantile = ec.getScalarInput(input2);
                double[] wt = getWeightedQuantileSummary(in, mc, quantile.getDoubleValue());
                ec.setScalarOutput(output.getName(), new DoubleObject(wt[3]));
                break;
            }
        case MEDIAN:
            {
                double[] wt = getWeightedQuantileSummary(in, mc, 0.5);
                ec.setScalarOutput(output.getName(), new DoubleObject(wt[3]));
                break;
            }
        case IQM:
            {
                double[] wt = getWeightedQuantileSummary(in, mc, 0.25, 0.75);
                long key25 = (long) Math.ceil(wt[1]);
                long key75 = (long) Math.ceil(wt[2]);
                JavaPairRDD<MatrixIndexes, MatrixBlock> out = in.filter(new FilterFunction(key25 + 1, key75, mc.getRowsPerBlock())).mapToPair(new ExtractAndSumFunction(key25 + 1, key75, mc.getRowsPerBlock()));
                double sum = RDDAggregateUtils.sumStable(out).getValue(0, 0);
                double val = MatrixBlock.computeIQMCorrection(sum, wt[0], wt[3], wt[5], wt[4], wt[6]);
                ec.setScalarOutput(output.getName(), new DoubleObject(val));
                break;
            }
        default:
            throw new DMLRuntimeException("Unsupported qpick operation type: " + _type);
    }
}
Also used : ScalarObject(org.apache.sysml.runtime.instructions.cp.ScalarObject) MatrixBlock(org.apache.sysml.runtime.matrix.data.MatrixBlock) MatrixIndexes(org.apache.sysml.runtime.matrix.data.MatrixIndexes) DoubleObject(org.apache.sysml.runtime.instructions.cp.DoubleObject) JavaPairRDD(org.apache.spark.api.java.JavaPairRDD) SparkExecutionContext(org.apache.sysml.runtime.controlprogram.context.SparkExecutionContext) MatrixCharacteristics(org.apache.sysml.runtime.matrix.MatrixCharacteristics) DMLRuntimeException(org.apache.sysml.runtime.DMLRuntimeException)

Example 20 with ScalarObject

use of org.apache.sysml.runtime.instructions.cp.ScalarObject in project incubator-systemml by apache.

the class SpoofSPInstruction method processInstruction.

@Override
public void processInstruction(ExecutionContext ec) {
    SparkExecutionContext sec = (SparkExecutionContext) ec;
    // decide upon broadcast side inputs
    boolean[] bcVect = determineBroadcastInputs(sec, _in);
    boolean[] bcVect2 = getMatrixBroadcastVector(sec, _in, bcVect);
    int main = getMainInputIndex(_in, bcVect);
    // create joined input rdd w/ replication if needed
    MatrixCharacteristics mcIn = sec.getMatrixCharacteristics(_in[main].getName());
    JavaPairRDD<MatrixIndexes, MatrixBlock[]> in = createJoinedInputRDD(sec, _in, bcVect, (_class.getSuperclass() == SpoofOuterProduct.class));
    JavaPairRDD<MatrixIndexes, MatrixBlock> out = null;
    // create lists of input broadcasts and scalars
    ArrayList<PartitionedBroadcast<MatrixBlock>> bcMatrices = new ArrayList<>();
    ArrayList<ScalarObject> scalars = new ArrayList<>();
    for (int i = 0; i < _in.length; i++) {
        if (_in[i].getDataType() == DataType.MATRIX && bcVect[i]) {
            bcMatrices.add(sec.getBroadcastForVariable(_in[i].getName()));
        } else if (_in[i].getDataType() == DataType.SCALAR) {
            // note: even if literal, it might be compiled as scalar placeholder
            scalars.add(sec.getScalarInput(_in[i].getName(), _in[i].getValueType(), _in[i].isLiteral()));
        }
    }
    // execute generated operator
    if (// CELL
    _class.getSuperclass() == SpoofCellwise.class) {
        SpoofCellwise op = (SpoofCellwise) CodegenUtils.createInstance(_class);
        AggregateOperator aggop = getAggregateOperator(op.getAggOp());
        if (_out.getDataType() == DataType.MATRIX) {
            // execute codegen block operation
            out = in.mapPartitionsToPair(new CellwiseFunction(_class.getName(), _classBytes, bcVect2, bcMatrices, scalars), true);
            if ((op.getCellType() == CellType.ROW_AGG && mcIn.getCols() > mcIn.getColsPerBlock()) || (op.getCellType() == CellType.COL_AGG && mcIn.getRows() > mcIn.getRowsPerBlock())) {
                long numBlocks = (op.getCellType() == CellType.ROW_AGG) ? mcIn.getNumRowBlocks() : mcIn.getNumColBlocks();
                out = RDDAggregateUtils.aggByKeyStable(out, aggop, (int) Math.min(out.getNumPartitions(), numBlocks), false);
            }
            sec.setRDDHandleForVariable(_out.getName(), out);
            // maintain lineage info and output characteristics
            maintainLineageInfo(sec, _in, bcVect, _out);
            updateOutputMatrixCharacteristics(sec, op);
        } else {
            // SCALAR
            out = in.mapPartitionsToPair(new CellwiseFunction(_class.getName(), _classBytes, bcVect2, bcMatrices, scalars), true);
            MatrixBlock tmpMB = RDDAggregateUtils.aggStable(out, aggop);
            sec.setVariable(_out.getName(), new DoubleObject(tmpMB.getValue(0, 0)));
        }
    } else if (// MAGG
    _class.getSuperclass() == SpoofMultiAggregate.class) {
        SpoofMultiAggregate op = (SpoofMultiAggregate) CodegenUtils.createInstance(_class);
        AggOp[] aggOps = op.getAggOps();
        MatrixBlock tmpMB = in.mapToPair(new MultiAggregateFunction(_class.getName(), _classBytes, bcVect2, bcMatrices, scalars)).values().fold(new MatrixBlock(), new MultiAggAggregateFunction(aggOps));
        sec.setMatrixOutput(_out.getName(), tmpMB, getExtendedOpcode());
    } else if (// OUTER
    _class.getSuperclass() == SpoofOuterProduct.class) {
        if (_out.getDataType() == DataType.MATRIX) {
            SpoofOperator op = (SpoofOperator) CodegenUtils.createInstance(_class);
            OutProdType type = ((SpoofOuterProduct) op).getOuterProdType();
            // update matrix characteristics
            updateOutputMatrixCharacteristics(sec, op);
            MatrixCharacteristics mcOut = sec.getMatrixCharacteristics(_out.getName());
            out = in.mapPartitionsToPair(new OuterProductFunction(_class.getName(), _classBytes, bcVect2, bcMatrices, scalars), true);
            if (type == OutProdType.LEFT_OUTER_PRODUCT || type == OutProdType.RIGHT_OUTER_PRODUCT) {
                long numBlocks = mcOut.getNumRowBlocks() * mcOut.getNumColBlocks();
                out = RDDAggregateUtils.sumByKeyStable(out, (int) Math.min(out.getNumPartitions(), numBlocks), false);
            }
            sec.setRDDHandleForVariable(_out.getName(), out);
            // maintain lineage info and output characteristics
            maintainLineageInfo(sec, _in, bcVect, _out);
        } else {
            out = in.mapPartitionsToPair(new OuterProductFunction(_class.getName(), _classBytes, bcVect2, bcMatrices, scalars), true);
            MatrixBlock tmp = RDDAggregateUtils.sumStable(out);
            sec.setVariable(_out.getName(), new DoubleObject(tmp.getValue(0, 0)));
        }
    } else if (_class.getSuperclass() == SpoofRowwise.class) {
        // ROW
        if (mcIn.getCols() > mcIn.getColsPerBlock()) {
            throw new DMLRuntimeException("Invalid spark rowwise operator w/ ncol=" + mcIn.getCols() + ", ncolpb=" + mcIn.getColsPerBlock() + ".");
        }
        SpoofRowwise op = (SpoofRowwise) CodegenUtils.createInstance(_class);
        long clen2 = op.getRowType().isConstDim2(op.getConstDim2()) ? op.getConstDim2() : op.getRowType().isRowTypeB1() ? sec.getMatrixCharacteristics(_in[1].getName()).getCols() : -1;
        RowwiseFunction fmmc = new RowwiseFunction(_class.getName(), _classBytes, bcVect2, bcMatrices, scalars, (int) mcIn.getCols(), (int) clen2);
        out = in.mapPartitionsToPair(fmmc, op.getRowType() == RowType.ROW_AGG || op.getRowType() == RowType.NO_AGG);
        if (op.getRowType().isColumnAgg() || op.getRowType() == RowType.FULL_AGG) {
            MatrixBlock tmpMB = RDDAggregateUtils.sumStable(out);
            if (op.getRowType().isColumnAgg())
                sec.setMatrixOutput(_out.getName(), tmpMB, getExtendedOpcode());
            else
                sec.setScalarOutput(_out.getName(), new DoubleObject(tmpMB.quickGetValue(0, 0)));
        } else // row-agg or no-agg
        {
            if (op.getRowType() == RowType.ROW_AGG && mcIn.getCols() > mcIn.getColsPerBlock()) {
                out = RDDAggregateUtils.sumByKeyStable(out, (int) Math.min(out.getNumPartitions(), mcIn.getNumRowBlocks()), false);
            }
            sec.setRDDHandleForVariable(_out.getName(), out);
            // maintain lineage info and output characteristics
            maintainLineageInfo(sec, _in, bcVect, _out);
            updateOutputMatrixCharacteristics(sec, op);
        }
    } else {
        throw new DMLRuntimeException("Operator " + _class.getSuperclass() + " is not supported on Spark");
    }
}
Also used : MatrixBlock(org.apache.sysml.runtime.matrix.data.MatrixBlock) SpoofRowwise(org.apache.sysml.runtime.codegen.SpoofRowwise) DoubleObject(org.apache.sysml.runtime.instructions.cp.DoubleObject) ArrayList(java.util.ArrayList) SpoofOperator(org.apache.sysml.runtime.codegen.SpoofOperator) ScalarObject(org.apache.sysml.runtime.instructions.cp.ScalarObject) PartitionedBroadcast(org.apache.sysml.runtime.instructions.spark.data.PartitionedBroadcast) AggregateOperator(org.apache.sysml.runtime.matrix.operators.AggregateOperator) SparkExecutionContext(org.apache.sysml.runtime.controlprogram.context.SparkExecutionContext) SpoofMultiAggregate(org.apache.sysml.runtime.codegen.SpoofMultiAggregate) OutProdType(org.apache.sysml.runtime.codegen.SpoofOuterProduct.OutProdType) MatrixIndexes(org.apache.sysml.runtime.matrix.data.MatrixIndexes) SpoofOuterProduct(org.apache.sysml.runtime.codegen.SpoofOuterProduct) MatrixCharacteristics(org.apache.sysml.runtime.matrix.MatrixCharacteristics) DMLRuntimeException(org.apache.sysml.runtime.DMLRuntimeException) SpoofCellwise(org.apache.sysml.runtime.codegen.SpoofCellwise)

Aggregations

ScalarObject (org.apache.sysml.runtime.instructions.cp.ScalarObject)23 MatrixObject (org.apache.sysml.runtime.controlprogram.caching.MatrixObject)13 DMLRuntimeException (org.apache.sysml.runtime.DMLRuntimeException)9 CPOperand (org.apache.sysml.runtime.instructions.cp.CPOperand)7 DoubleObject (org.apache.sysml.runtime.instructions.cp.DoubleObject)7 MatrixCharacteristics (org.apache.sysml.runtime.matrix.MatrixCharacteristics)7 LiteralOp (org.apache.sysml.hops.LiteralOp)6 IntObject (org.apache.sysml.runtime.instructions.cp.IntObject)6 DataOp (org.apache.sysml.hops.DataOp)5 SparkExecutionContext (org.apache.sysml.runtime.controlprogram.context.SparkExecutionContext)5 ArrayList (java.util.ArrayList)4 BooleanObject (org.apache.sysml.runtime.instructions.cp.BooleanObject)4 Data (org.apache.sysml.runtime.instructions.cp.Data)4 StringObject (org.apache.sysml.runtime.instructions.cp.StringObject)4 MatrixBlock (org.apache.sysml.runtime.matrix.data.MatrixBlock)4 MatrixIndexes (org.apache.sysml.runtime.matrix.data.MatrixIndexes)4 Hop (org.apache.sysml.hops.Hop)3 UnaryOp (org.apache.sysml.hops.UnaryOp)3 MetaDataFormat (org.apache.sysml.runtime.matrix.MetaDataFormat)3 ScalarOperator (org.apache.sysml.runtime.matrix.operators.ScalarOperator)3