use of org.apache.sysml.runtime.matrix.data.MatrixBlock in project incubator-systemml by apache.
the class TernaryInstruction method processInstruction.
@Override
public void processInstruction(Class<? extends MatrixValue> valueClass, CachedValueMap cachedValues, IndexedMatrixValue tempValue, IndexedMatrixValue zeroInput, int blockRowFactor, int blockColFactor) {
MatrixBlock lm1 = input1.isMatrix() ? (MatrixBlock) cachedValues.getFirst(ixinput1).getValue() : m1;
MatrixBlock lm2 = input2.isMatrix() ? (MatrixBlock) cachedValues.getFirst(ixinput2).getValue() : m2;
MatrixBlock lm3 = input3.isMatrix() ? (MatrixBlock) cachedValues.getFirst(ixinput3).getValue() : m3;
MatrixIndexes ixin = input1.isMatrix() ? cachedValues.getFirst(ixinput1).getIndexes() : input2.isMatrix() ? cachedValues.getFirst(ixinput2).getIndexes() : cachedValues.getFirst(ixinput3).getIndexes();
// prepare output
IndexedMatrixValue out = new IndexedMatrixValue(new MatrixIndexes(), new MatrixBlock());
out.getIndexes().setIndexes(ixin);
// process instruction
TernaryOperator op = (TernaryOperator) optr;
lm1.ternaryOperations(op, lm2, lm3, (MatrixBlock) out.getValue());
// put the output value in the cache
cachedValues.add(ixoutput, out);
}
use of org.apache.sysml.runtime.matrix.data.MatrixBlock in project incubator-systemml by apache.
the class AggregateTernarySPInstruction method processInstruction.
@Override
public void processInstruction(ExecutionContext ec) {
SparkExecutionContext sec = (SparkExecutionContext) ec;
// get inputs
MatrixCharacteristics mcIn = sec.getMatrixCharacteristics(input1.getName());
JavaPairRDD<MatrixIndexes, MatrixBlock> in1 = sec.getBinaryBlockRDDHandleForVariable(input1.getName());
JavaPairRDD<MatrixIndexes, MatrixBlock> in2 = sec.getBinaryBlockRDDHandleForVariable(input2.getName());
JavaPairRDD<MatrixIndexes, MatrixBlock> in3 = // matrix or literal 1
input3.isLiteral() ? // matrix or literal 1
null : sec.getBinaryBlockRDDHandleForVariable(input3.getName());
// execute aggregate ternary operation
AggregateTernaryOperator aggop = (AggregateTernaryOperator) _optr;
JavaPairRDD<MatrixIndexes, MatrixBlock> out = null;
if (in3 != null) {
// 3 inputs
out = in1.join(in2).join(in3).mapToPair(new RDDAggregateTernaryFunction(aggop));
} else {
// 2 inputs (third is literal 1)
out = in1.join(in2).mapToPair(new RDDAggregateTernaryFunction2(aggop));
}
// aggregate partial results
if (// tak+*
aggop.indexFn instanceof ReduceAll) {
// aggregate and create output (no lineage because scalar)
MatrixBlock tmp = RDDAggregateUtils.sumStable(out.values());
DoubleObject ret = new DoubleObject(tmp.getValue(0, 0));
sec.setVariable(output.getName(), ret);
} else if (// tack+* single block
mcIn.dimsKnown() && mcIn.getCols() <= mcIn.getColsPerBlock()) {
// single block aggregation and drop correction
MatrixBlock ret = RDDAggregateUtils.aggStable(out, aggop.aggOp);
ret.dropLastRowsOrColumns(aggop.aggOp.correctionLocation);
// put output block into symbol table (no lineage because single block)
// this also includes implicit maintenance of matrix characteristics
sec.setMatrixOutput(output.getName(), ret, getExtendedOpcode());
} else // tack+* multi block
{
// multi-block aggregation and drop correction
out = RDDAggregateUtils.aggByKeyStable(out, aggop.aggOp, false);
out = out.mapValues(new AggregateDropCorrectionFunction(aggop.aggOp));
// put output RDD handle into symbol table
updateUnaryAggOutputMatrixCharacteristics(sec, aggop.indexFn);
sec.setRDDHandleForVariable(output.getName(), out);
sec.addLineageRDD(output.getName(), input1.getName());
sec.addLineageRDD(output.getName(), input2.getName());
if (in3 != null)
sec.addLineageRDD(output.getName(), input3.getName());
}
}
use of org.apache.sysml.runtime.matrix.data.MatrixBlock in project incubator-systemml by apache.
the class AppendGSPInstruction method processInstruction.
@Override
public void processInstruction(ExecutionContext ec) {
// general case append (map-extend, aggregate)
SparkExecutionContext sec = (SparkExecutionContext) ec;
checkBinaryAppendInputCharacteristics(sec, _cbind, false, false);
MatrixCharacteristics mc1 = sec.getMatrixCharacteristics(input1.getName());
MatrixCharacteristics mc2 = sec.getMatrixCharacteristics(input2.getName());
JavaPairRDD<MatrixIndexes, MatrixBlock> in1 = sec.getBinaryBlockRDDHandleForVariable(input1.getName());
JavaPairRDD<MatrixIndexes, MatrixBlock> in2 = sec.getBinaryBlockRDDHandleForVariable(input2.getName());
JavaPairRDD<MatrixIndexes, MatrixBlock> out = null;
// General case: This one needs shifting and merging and hence has huge performance hit.
JavaPairRDD<MatrixIndexes, MatrixBlock> shifted_in2 = in2.flatMapToPair(new ShiftMatrix(mc1, mc2, _cbind));
out = in1.cogroup(shifted_in2).mapToPair(new MergeWithShiftedBlocks(mc1, mc2, _cbind));
// put output RDD handle into symbol table
updateBinaryAppendOutputMatrixCharacteristics(sec, _cbind);
sec.setRDDHandleForVariable(output.getName(), out);
sec.addLineageRDD(output.getName(), input1.getName());
sec.addLineageRDD(output.getName(), input2.getName());
}
use of org.apache.sysml.runtime.matrix.data.MatrixBlock in project incubator-systemml by apache.
the class BinUaggChainSPInstruction method processInstruction.
@Override
public void processInstruction(ExecutionContext ec) {
SparkExecutionContext sec = (SparkExecutionContext) ec;
// get input
JavaPairRDD<MatrixIndexes, MatrixBlock> in = sec.getBinaryBlockRDDHandleForVariable(input1.getName());
// execute unary builtin operation
JavaPairRDD<MatrixIndexes, MatrixBlock> out = in.mapValues(new RDDBinUaggChainFunction(_bOp, _uaggOp));
// set output RDD
updateUnaryOutputMatrixCharacteristics(sec);
sec.setRDDHandleForVariable(output.getName(), out);
sec.addLineageRDD(output.getName(), input1.getName());
}
use of org.apache.sysml.runtime.matrix.data.MatrixBlock in project incubator-systemml by apache.
the class BinarySPInstruction method processMatrixMatrixBinaryInstruction.
/**
* Common binary matrix-matrix process instruction
*
* @param ec execution context
*/
protected void processMatrixMatrixBinaryInstruction(ExecutionContext ec) {
SparkExecutionContext sec = (SparkExecutionContext) ec;
// sanity check dimensions
checkMatrixMatrixBinaryCharacteristics(sec);
updateBinaryOutputMatrixCharacteristics(sec);
// Get input RDDs
JavaPairRDD<MatrixIndexes, MatrixBlock> in1 = sec.getBinaryBlockRDDHandleForVariable(input1.getName());
JavaPairRDD<MatrixIndexes, MatrixBlock> in2 = sec.getBinaryBlockRDDHandleForVariable(input2.getName());
MatrixCharacteristics mc1 = sec.getMatrixCharacteristics(input1.getName());
MatrixCharacteristics mc2 = sec.getMatrixCharacteristics(input2.getName());
MatrixCharacteristics mcOut = sec.getMatrixCharacteristics(output.getName());
BinaryOperator bop = (BinaryOperator) _optr;
// vector replication if required (mv or outer operations)
boolean rowvector = (mc2.getRows() == 1 && mc1.getRows() > 1);
long numRepLeft = getNumReplicas(mc1, mc2, true);
long numRepRight = getNumReplicas(mc1, mc2, false);
if (numRepLeft > 1)
in1 = in1.flatMapToPair(new ReplicateVectorFunction(false, numRepLeft));
if (numRepRight > 1)
in2 = in2.flatMapToPair(new ReplicateVectorFunction(rowvector, numRepRight));
int numPrefPart = SparkUtils.isHashPartitioned(in1) ? in1.getNumPartitions() : SparkUtils.isHashPartitioned(in2) ? in2.getNumPartitions() : Math.min(in1.getNumPartitions() + in2.getNumPartitions(), 2 * SparkUtils.getNumPreferredPartitions(mcOut));
// execute binary operation
JavaPairRDD<MatrixIndexes, MatrixBlock> out = in1.join(in2, numPrefPart).mapValues(new MatrixMatrixBinaryOpFunction(bop));
// set output RDD
sec.setRDDHandleForVariable(output.getName(), out);
sec.addLineageRDD(output.getName(), input1.getName());
sec.addLineageRDD(output.getName(), input2.getName());
}
Aggregations