use of org.apache.sysml.lops.Unary in project incubator-systemml by apache.
the class BinaryOp method constructLopsBinaryDefault.
private void constructLopsBinaryDefault() throws HopsException, LopsException {
/* Default behavior for BinaryOp */
// it depends on input data types
DataType dt1 = getInput().get(0).getDataType();
DataType dt2 = getInput().get(1).getDataType();
if (dt1 == dt2 && dt1 == DataType.SCALAR) {
// Both operands scalar
BinaryScalar binScalar1 = new BinaryScalar(getInput().get(0).constructLops(), getInput().get(1).constructLops(), HopsOpOp2LopsBS.get(op), getDataType(), getValueType());
binScalar1.getOutputParameters().setDimensions(0, 0, 0, 0, -1);
setLineNumbers(binScalar1);
setLops(binScalar1);
} else if ((dt1 == DataType.MATRIX && dt2 == DataType.SCALAR) || (dt1 == DataType.SCALAR && dt2 == DataType.MATRIX)) {
// One operand is Matrix and the other is scalar
ExecType et = optFindExecType();
//select specific operator implementations
Unary.OperationTypes ot = null;
Hop right = getInput().get(1);
if (op == OpOp2.POW && right instanceof LiteralOp && ((LiteralOp) right).getDoubleValue() == 2.0)
ot = Unary.OperationTypes.POW2;
else if (op == OpOp2.MULT && right instanceof LiteralOp && ((LiteralOp) right).getDoubleValue() == 2.0)
ot = Unary.OperationTypes.MULTIPLY2;
else
//general case
ot = HopsOpOp2LopsU.get(op);
if (DMLScript.USE_ACCELERATOR && (DMLScript.FORCE_ACCELERATOR || getMemEstimate() < OptimizerUtils.GPU_MEMORY_BUDGET) && (op == OpOp2.MULT || op == OpOp2.PLUS || op == OpOp2.MINUS || op == OpOp2.DIV || op == OpOp2.POW)) {
et = ExecType.GPU;
}
Unary unary1 = new Unary(getInput().get(0).constructLops(), getInput().get(1).constructLops(), ot, getDataType(), getValueType(), et);
setOutputDimensions(unary1);
setLineNumbers(unary1);
setLops(unary1);
} else {
// Both operands are Matrixes
ExecType et = optFindExecType();
if (et == ExecType.CP) {
if (DMLScript.USE_ACCELERATOR && (DMLScript.FORCE_ACCELERATOR || getMemEstimate() < OptimizerUtils.GPU_MEMORY_BUDGET) && (op == OpOp2.MULT || op == OpOp2.PLUS || op == OpOp2.MINUS || op == OpOp2.DIV || op == OpOp2.POW || op == OpOp2.SOLVE)) {
et = ExecType.GPU;
}
Lop binary = null;
boolean isLeftXGt = (getInput().get(0) instanceof BinaryOp) && ((BinaryOp) getInput().get(0)).getOp() == OpOp2.GREATER;
Hop potentialZero = isLeftXGt ? ((BinaryOp) getInput().get(0)).getInput().get(1) : null;
boolean isLeftXGt0 = isLeftXGt && potentialZero != null && potentialZero instanceof LiteralOp && ((LiteralOp) potentialZero).getDoubleValue() == 0;
if (op == OpOp2.MULT && isLeftXGt0 && !getInput().get(0).isVector() && !getInput().get(1).isVector() && getInput().get(0).dimsKnown() && getInput().get(1).dimsKnown()) {
binary = new ConvolutionTransform(getInput().get(0).getInput().get(0).constructLops(), getInput().get(1).constructLops(), ConvolutionTransform.OperationTypes.RELU_BACKWARD, getDataType(), getValueType(), et, -1);
} else
binary = new Binary(getInput().get(0).constructLops(), getInput().get(1).constructLops(), HopsOpOp2LopsB.get(op), getDataType(), getValueType(), et);
setOutputDimensions(binary);
setLineNumbers(binary);
setLops(binary);
} else if (et == ExecType.SPARK) {
Hop left = getInput().get(0);
Hop right = getInput().get(1);
MMBinaryMethod mbin = optFindMMBinaryMethodSpark(left, right);
Lop binary = null;
if (mbin == MMBinaryMethod.MR_BINARY_UAGG_CHAIN) {
AggUnaryOp uRight = (AggUnaryOp) right;
binary = new BinaryUAggChain(left.constructLops(), HopsOpOp2LopsB.get(op), HopsAgg2Lops.get(uRight.getOp()), HopsDirection2Lops.get(uRight.getDirection()), getDataType(), getValueType(), et);
} else if (mbin == MMBinaryMethod.MR_BINARY_M) {
boolean partitioned = false;
boolean isColVector = (right.getDim2() == 1 && left.getDim1() == right.getDim1());
binary = new BinaryM(left.constructLops(), right.constructLops(), HopsOpOp2LopsB.get(op), getDataType(), getValueType(), et, partitioned, isColVector);
} else {
binary = new Binary(left.constructLops(), right.constructLops(), HopsOpOp2LopsB.get(op), getDataType(), getValueType(), et);
}
setOutputDimensions(binary);
setLineNumbers(binary);
setLops(binary);
} else //MR
{
Hop left = getInput().get(0);
Hop right = getInput().get(1);
MMBinaryMethod mbin = optFindMMBinaryMethod(left, right);
if (mbin == MMBinaryMethod.MR_BINARY_M) {
boolean needPart = requiresPartitioning(right);
Lop dcInput = right.constructLops();
if (needPart) {
//right side in distributed cache
ExecType etPart = (OptimizerUtils.estimateSizeExactSparsity(right.getDim1(), right.getDim2(), OptimizerUtils.getSparsity(right.getDim1(), right.getDim2(), right.getNnz())) < OptimizerUtils.getLocalMemBudget()) ? ExecType.CP : //operator selection
ExecType.MR;
dcInput = new DataPartition(dcInput, DataType.MATRIX, ValueType.DOUBLE, etPart, (right.getDim2() == 1) ? PDataPartitionFormat.ROW_BLOCK_WISE_N : PDataPartitionFormat.COLUMN_BLOCK_WISE_N);
dcInput.getOutputParameters().setDimensions(right.getDim1(), right.getDim2(), right.getRowsInBlock(), right.getColsInBlock(), right.getNnz());
dcInput.setAllPositions(right.getBeginLine(), right.getBeginColumn(), right.getEndLine(), right.getEndColumn());
}
BinaryM binary = new BinaryM(left.constructLops(), dcInput, HopsOpOp2LopsB.get(op), getDataType(), getValueType(), ExecType.MR, needPart, (right.getDim2() == 1 && left.getDim1() == right.getDim1()));
setOutputDimensions(binary);
setLineNumbers(binary);
setLops(binary);
} else if (mbin == MMBinaryMethod.MR_BINARY_UAGG_CHAIN) {
AggUnaryOp uRight = (AggUnaryOp) right;
BinaryUAggChain bin = new BinaryUAggChain(left.constructLops(), HopsOpOp2LopsB.get(op), HopsAgg2Lops.get(uRight.getOp()), HopsDirection2Lops.get(uRight.getDirection()), getDataType(), getValueType(), et);
setOutputDimensions(bin);
setLineNumbers(bin);
setLops(bin);
} else if (mbin == MMBinaryMethod.MR_BINARY_OUTER_R) {
boolean requiresRepLeft = (!right.dimsKnown() || right.getDim2() > right.getColsInBlock());
boolean requiresRepRight = (!left.dimsKnown() || left.getDim1() > right.getRowsInBlock());
Lop leftLop = left.constructLops();
Lop rightLop = right.constructLops();
if (requiresRepLeft) {
//ncol of right determines rep of left
Lop offset = createOffsetLop(right, true);
leftLop = new RepMat(leftLop, offset, true, left.getDataType(), left.getValueType());
setOutputDimensions(leftLop);
setLineNumbers(leftLop);
}
if (requiresRepRight) {
//nrow of right determines rep of right
Lop offset = createOffsetLop(left, false);
rightLop = new RepMat(rightLop, offset, false, right.getDataType(), right.getValueType());
setOutputDimensions(rightLop);
setLineNumbers(rightLop);
}
Group group1 = new Group(leftLop, Group.OperationTypes.Sort, getDataType(), getValueType());
setLineNumbers(group1);
setOutputDimensions(group1);
Group group2 = new Group(rightLop, Group.OperationTypes.Sort, getDataType(), getValueType());
setLineNumbers(group2);
setOutputDimensions(group2);
Binary binary = new Binary(group1, group2, HopsOpOp2LopsB.get(op), getDataType(), getValueType(), et);
setOutputDimensions(binary);
setLineNumbers(binary);
setLops(binary);
} else //MMBinaryMethod.MR_BINARY_R
{
boolean requiresRep = requiresReplication(left, right);
Lop rightLop = right.constructLops();
if (requiresRep) {
//ncol of left input (determines num replicates)
Lop offset = createOffsetLop(left, (right.getDim2() <= 1));
rightLop = new RepMat(rightLop, offset, (right.getDim2() <= 1), right.getDataType(), right.getValueType());
setOutputDimensions(rightLop);
setLineNumbers(rightLop);
}
Group group1 = new Group(getInput().get(0).constructLops(), Group.OperationTypes.Sort, getDataType(), getValueType());
setLineNumbers(group1);
setOutputDimensions(group1);
Group group2 = new Group(rightLop, Group.OperationTypes.Sort, getDataType(), getValueType());
setLineNumbers(group2);
setOutputDimensions(group2);
Binary binary = new Binary(group1, group2, HopsOpOp2LopsB.get(op), getDataType(), getValueType(), et);
setLineNumbers(binary);
setOutputDimensions(binary);
setLops(binary);
}
}
}
}
use of org.apache.sysml.lops.Unary in project incubator-systemml by apache.
the class BinaryOp method constructLopsIQM.
private void constructLopsIQM(ExecType et) throws HopsException, LopsException {
if (et == ExecType.MR) {
CombineBinary combine = CombineBinary.constructCombineLop(OperationTypes.PreSort, (Lop) getInput().get(0).constructLops(), (Lop) getInput().get(1).constructLops(), DataType.MATRIX, getValueType());
combine.getOutputParameters().setDimensions(getInput().get(0).getDim1(), getInput().get(0).getDim2(), getInput().get(0).getRowsInBlock(), getInput().get(0).getColsInBlock(), getInput().get(0).getNnz());
SortKeys sort = SortKeys.constructSortByValueLop(combine, SortKeys.OperationTypes.WithWeights, DataType.MATRIX, ValueType.DOUBLE, ExecType.MR);
// Sort dimensions are same as the first input
sort.getOutputParameters().setDimensions(getInput().get(0).getDim1(), getInput().get(0).getDim2(), getInput().get(0).getRowsInBlock(), getInput().get(0).getColsInBlock(), getInput().get(0).getNnz());
Data lit = Data.createLiteralLop(ValueType.DOUBLE, Double.toString(0.25));
setLineNumbers(lit);
PickByCount pick = new PickByCount(sort, lit, DataType.MATRIX, getValueType(), PickByCount.OperationTypes.RANGEPICK);
pick.getOutputParameters().setDimensions(-1, -1, getRowsInBlock(), getColsInBlock(), -1);
setLineNumbers(pick);
PartialAggregate pagg = new PartialAggregate(pick, HopsAgg2Lops.get(Hop.AggOp.SUM), HopsDirection2Lops.get(Hop.Direction.RowCol), DataType.MATRIX, getValueType());
setLineNumbers(pagg);
// Set the dimensions of PartialAggregate LOP based on the
// direction in which aggregation is performed
pagg.setDimensionsBasedOnDirection(getDim1(), getDim2(), getRowsInBlock(), getColsInBlock());
Group group1 = new Group(pagg, Group.OperationTypes.Sort, DataType.MATRIX, getValueType());
setOutputDimensions(group1);
setLineNumbers(group1);
Aggregate agg1 = new Aggregate(group1, HopsAgg2Lops.get(Hop.AggOp.SUM), DataType.MATRIX, getValueType(), ExecType.MR);
setOutputDimensions(agg1);
agg1.setupCorrectionLocation(pagg.getCorrectionLocation());
setLineNumbers(agg1);
UnaryCP unary1 = new UnaryCP(agg1, HopsOpOp1LopsUS.get(OpOp1.CAST_AS_SCALAR), DataType.SCALAR, getValueType());
unary1.getOutputParameters().setDimensions(0, 0, 0, 0, -1);
setLineNumbers(unary1);
Unary iqm = new Unary(sort, unary1, Unary.OperationTypes.MR_IQM, DataType.SCALAR, ValueType.DOUBLE, ExecType.CP);
iqm.getOutputParameters().setDimensions(0, 0, 0, 0, -1);
setLineNumbers(iqm);
setLops(iqm);
} else {
SortKeys sort = SortKeys.constructSortByValueLop(getInput().get(0).constructLops(), getInput().get(1).constructLops(), SortKeys.OperationTypes.WithWeights, getInput().get(0).getDataType(), getInput().get(0).getValueType(), et);
sort.getOutputParameters().setDimensions(getInput().get(0).getDim1(), getInput().get(0).getDim2(), getInput().get(0).getRowsInBlock(), getInput().get(0).getColsInBlock(), getInput().get(0).getNnz());
PickByCount pick = new PickByCount(sort, null, getDataType(), getValueType(), PickByCount.OperationTypes.IQM, et, true);
setOutputDimensions(pick);
setLineNumbers(pick);
setLops(pick);
}
}
use of org.apache.sysml.lops.Unary in project incubator-systemml by apache.
the class Dag method getMapperInstructions.
/**
* Method to get mapper instructions for a MR job.
*
* @param node low-level operator
* @param execNodes list of exec nodes
* @param inputStrings list of input strings
* @param instructionsInMapper list of instructions in mapper
* @param nodeIndexMapping ?
* @param start_index starting index
* @param inputLabels input labels
* @param MRJoblineNumbers MR job line numbers
* @return -1 if problem
* @throws LopsException if LopsException occurs
*/
private int getMapperInstructions(Lop node, ArrayList<Lop> execNodes, ArrayList<String> inputStrings, ArrayList<String> instructionsInMapper, HashMap<Lop, Integer> nodeIndexMapping, int[] start_index, ArrayList<String> inputLabels, ArrayList<Lop> inputLops, ArrayList<Integer> MRJobLineNumbers) throws LopsException {
// if input source, return index
if (nodeIndexMapping.containsKey(node))
return nodeIndexMapping.get(node);
// not input source and not in exec nodes, then return.
if (!execNodes.contains(node))
return -1;
ArrayList<Integer> inputIndices = new ArrayList<Integer>();
int max_input_index = -1;
// get mapper instructions
for (Lop childNode : node.getInputs()) {
int ret_val = getMapperInstructions(childNode, execNodes, inputStrings, instructionsInMapper, nodeIndexMapping, start_index, inputLabels, inputLops, MRJobLineNumbers);
inputIndices.add(ret_val);
if (ret_val > max_input_index) {
max_input_index = ret_val;
}
}
// to mapper instructions.
if ((node.getExecLocation() == ExecLocation.Map || node.getExecLocation() == ExecLocation.MapOrReduce) && !hasChildNode(node, execNodes, ExecLocation.MapAndReduce) && !hasChildNode(node, execNodes, ExecLocation.Reduce)) {
int output_index = max_input_index;
// cannot reuse index if this is true
// need to add better indexing schemes
// if (child_for_max_input_index.getOutputs().size() > 1) {
output_index = start_index[0];
start_index[0]++;
// }
nodeIndexMapping.put(node, output_index);
if (node instanceof Unary && node.getInputs().size() > 1) {
// Following code must be executed only for those Unary
// operators that have more than one input
// It should not be executed for "true" unary operators like
// cos(A).
int index = 0;
for (int i1 = 0; i1 < node.getInputs().size(); i1++) {
if (node.getInputs().get(i1).getDataType() == DataType.SCALAR) {
index = i1;
break;
}
}
// if data lop not a literal -- add label
if (node.getInputs().get(index).getExecLocation() == ExecLocation.Data && !((Data) (node.getInputs().get(index))).isLiteral()) {
inputLabels.add(node.getInputs().get(index).getOutputParameters().getLabel());
inputLops.add(node.getInputs().get(index));
}
// if not data lop, then this is an intermediate variable.
if (node.getInputs().get(index).getExecLocation() != ExecLocation.Data) {
inputLabels.add(node.getInputs().get(index).getOutputParameters().getLabel());
inputLops.add(node.getInputs().get(index));
}
}
// get mapper instruction.
if (node.getInputs().size() == 1)
instructionsInMapper.add(node.getInstructions(inputIndices.get(0), output_index));
else if (node.getInputs().size() == 2) {
instructionsInMapper.add(node.getInstructions(inputIndices.get(0), inputIndices.get(1), output_index));
} else if (node.getInputs().size() == 3)
instructionsInMapper.add(node.getInstructions(inputIndices.get(0), inputIndices.get(1), inputIndices.get(2), output_index));
else if (node.getInputs().size() == 4) {
// Example: Reshape
instructionsInMapper.add(node.getInstructions(inputIndices.get(0), inputIndices.get(1), inputIndices.get(2), inputIndices.get(3), output_index));
} else if (node.getInputs().size() == 5) {
// Example: RangeBasedReIndex A[row_l:row_u, col_l:col_u]
instructionsInMapper.add(node.getInstructions(inputIndices.get(0), inputIndices.get(1), inputIndices.get(2), inputIndices.get(3), inputIndices.get(4), output_index));
} else if (node.getInputs().size() == 7) {
// Example: RangeBasedReIndex A[row_l:row_u, col_l:col_u] = B
instructionsInMapper.add(node.getInstructions(inputIndices.get(0), inputIndices.get(1), inputIndices.get(2), inputIndices.get(3), inputIndices.get(4), inputIndices.get(5), inputIndices.get(6), output_index));
} else
throw new LopsException("Node with " + node.getInputs().size() + " inputs is not supported in dag.java.");
if (DMLScript.ENABLE_DEBUG_MODE) {
MRJobLineNumbers.add(node._beginLine);
}
return output_index;
}
return -1;
}
Aggregations