use of org.apache.sysml.runtime.matrix.operators.RightScalarOperator in project incubator-systemml by apache.
the class BasicScalarOperationsTest method runScalarOperationsTest.
/**
*
* @param mb
*/
private void runScalarOperationsTest(SparsityType sptype, ValueType vtype, boolean compress) {
try {
//prepare sparsity for input data
double sparsity = -1;
switch(sptype) {
case DENSE:
sparsity = sparsity1;
break;
case SPARSE:
sparsity = sparsity2;
break;
case EMPTY:
sparsity = sparsity3;
break;
}
//generate input data
double min = (vtype == ValueType.CONST) ? 10 : -10;
double[][] input = TestUtils.generateTestMatrix(rows, cols, min, 10, sparsity, 7);
if (vtype == ValueType.RAND_ROUND_OLE || vtype == ValueType.RAND_ROUND_DDC) {
CompressedMatrixBlock.ALLOW_DDC_ENCODING = (vtype == ValueType.RAND_ROUND_DDC);
input = TestUtils.round(input);
}
MatrixBlock mb = DataConverter.convertToMatrixBlock(input);
//compress given matrix block
CompressedMatrixBlock cmb = new CompressedMatrixBlock(mb);
if (compress)
cmb.compress();
//matrix-scalar uncompressed
ScalarOperator sop = new RightScalarOperator(Multiply.getMultiplyFnObject(), 7);
MatrixBlock ret1 = (MatrixBlock) mb.scalarOperations(sop, new MatrixBlock());
//matrix-scalar compressed
MatrixBlock ret2 = (MatrixBlock) cmb.scalarOperations(sop, new MatrixBlock());
if (compress)
ret2 = ((CompressedMatrixBlock) ret2).decompress();
//compare result with input
double[][] d1 = DataConverter.convertToDoubleMatrix(ret1);
double[][] d2 = DataConverter.convertToDoubleMatrix(ret2);
TestUtils.compareMatrices(d1, d2, rows, cols, 0.0000001);
} catch (Exception ex) {
throw new RuntimeException(ex);
} finally {
CompressedMatrixBlock.ALLOW_DDC_ENCODING = true;
}
}
use of org.apache.sysml.runtime.matrix.operators.RightScalarOperator in project incubator-systemml by apache.
the class BuiltinBinarySPInstruction method parseInstruction.
public static BuiltinBinarySPInstruction parseInstruction(String str) throws DMLRuntimeException {
CPOperand in1 = new CPOperand("", ValueType.UNKNOWN, DataType.UNKNOWN);
CPOperand in2 = new CPOperand("", ValueType.UNKNOWN, DataType.UNKNOWN);
CPOperand out = new CPOperand("", ValueType.UNKNOWN, DataType.UNKNOWN);
String opcode = null;
boolean isBroadcast = false;
VectorType vtype = null;
ValueFunction func = null;
if (//map builtin function
str.startsWith("SPARK" + Lop.OPERAND_DELIMITOR + "map")) {
String[] parts = InstructionUtils.getInstructionPartsWithValueType(str);
InstructionUtils.checkNumFields(parts, 5);
opcode = parts[0];
in1.split(parts[1]);
in2.split(parts[2]);
out.split(parts[3]);
func = Builtin.getBuiltinFnObject(opcode.substring(3));
vtype = VectorType.valueOf(parts[5]);
isBroadcast = true;
} else //default builtin function
{
opcode = parseBinaryInstruction(str, in1, in2, out);
func = Builtin.getBuiltinFnObject(opcode);
}
//sanity check value function
if (func == null)
throw new DMLRuntimeException("Failed to create builtin value function for opcode: " + opcode);
// Determine appropriate Function Object based on opcode
if (//MATRIX-SCALAR
in1.getDataType() != in2.getDataType()) {
return new MatrixScalarBuiltinSPInstruction(new RightScalarOperator(func, 0), in1, in2, out, opcode, str);
} else //MATRIX-MATRIX
{
if (isBroadcast)
return new MatrixBVectorBuiltinSPInstruction(new BinaryOperator(func), in1, in2, out, vtype, opcode, str);
else
return new MatrixMatrixBuiltinSPInstruction(new BinaryOperator(func), in1, in2, out, opcode, str);
}
}
use of org.apache.sysml.runtime.matrix.operators.RightScalarOperator in project incubator-systemml by apache.
the class BasicScalarOperationsSparseUnsafeTest method runScalarOperationsTest.
/**
*
* @param mb
*/
private void runScalarOperationsTest(SparsityType sptype, ValueType vtype, boolean compress) {
try {
//prepare sparsity for input data
double sparsity = -1;
switch(sptype) {
case DENSE:
sparsity = sparsity1;
break;
case SPARSE:
sparsity = sparsity2;
break;
case EMPTY:
sparsity = sparsity3;
break;
}
//generate input data
double min = (vtype == ValueType.CONST) ? 10 : -10;
double[][] input = TestUtils.generateTestMatrix(rows, cols, min, 10, sparsity, 7);
if (vtype == ValueType.RAND_ROUND_OLE || vtype == ValueType.RAND_ROUND_DDC) {
CompressedMatrixBlock.ALLOW_DDC_ENCODING = (vtype == ValueType.RAND_ROUND_DDC);
input = TestUtils.round(input);
}
MatrixBlock mb = DataConverter.convertToMatrixBlock(input);
//compress given matrix block
CompressedMatrixBlock cmb = new CompressedMatrixBlock(mb);
if (compress)
cmb.compress();
//matrix-scalar uncompressed
ScalarOperator sop = new RightScalarOperator(Plus.getPlusFnObject(), 7);
MatrixBlock ret1 = (MatrixBlock) mb.scalarOperations(sop, new MatrixBlock());
//matrix-scalar compressed
MatrixBlock ret2 = (MatrixBlock) cmb.scalarOperations(sop, new MatrixBlock());
if (compress)
ret2 = ((CompressedMatrixBlock) ret2).decompress();
//compare result with input
double[][] d1 = DataConverter.convertToDoubleMatrix(ret1);
double[][] d2 = DataConverter.convertToDoubleMatrix(ret2);
TestUtils.compareMatrices(d1, d2, rows, cols, 0.0000001);
} catch (Exception ex) {
throw new RuntimeException(ex);
} finally {
CompressedMatrixBlock.ALLOW_DDC_ENCODING = true;
}
}
Aggregations