use of org.apache.sysml.parser.Expression.DataType in project incubator-systemml by apache.
the class BinaryOp method optFindExecType.
@Override
protected ExecType optFindExecType() {
checkAndSetForcedPlatform();
ExecType REMOTE = OptimizerUtils.isSparkExecutionMode() ? ExecType.SPARK : ExecType.MR;
DataType dt1 = getInput().get(0).getDataType();
DataType dt2 = getInput().get(1).getDataType();
if (_etypeForced != null) {
_etype = _etypeForced;
} else {
if (OptimizerUtils.isMemoryBasedOptLevel()) {
_etype = findExecTypeByMemEstimate();
} else {
_etype = null;
if (dt1 == DataType.MATRIX && dt2 == DataType.MATRIX) {
// OR if both are vectors
if ((getInput().get(0).areDimsBelowThreshold() && getInput().get(1).areDimsBelowThreshold()) || (getInput().get(0).isVector() && getInput().get(1).isVector())) {
_etype = ExecType.CP;
}
} else if (dt1 == DataType.MATRIX && dt2 == DataType.SCALAR) {
if (getInput().get(0).areDimsBelowThreshold() || getInput().get(0).isVector()) {
_etype = ExecType.CP;
}
} else if (dt1 == DataType.SCALAR && dt2 == DataType.MATRIX) {
if (getInput().get(1).areDimsBelowThreshold() || getInput().get(1).isVector()) {
_etype = ExecType.CP;
}
} else {
_etype = ExecType.CP;
}
// if no CP condition applied
if (_etype == null)
_etype = REMOTE;
}
// check for valid CP dimensions and matrix size
checkAndSetInvalidCPDimsAndSize();
}
// single parent also in spark because it's likely cheap and reduces intermediates)
if (_etype == ExecType.CP && _etypeForced != ExecType.CP && getDataType().isMatrix() && (dt1.isScalar() || dt2.isScalar()) && // scalar operations
supportsMatrixScalarOperations() && // input is not checkpoint
!(getInput().get(dt1.isScalar() ? 1 : 0) instanceof DataOp) && // unary scalar is only parent
getInput().get(dt1.isScalar() ? 1 : 0).getParent().size() == 1 && // single block triggered exec
!HopRewriteUtils.isSingleBlock(getInput().get(dt1.isScalar() ? 1 : 0)) && getInput().get(dt1.isScalar() ? 1 : 0).optFindExecType() == ExecType.SPARK) {
// pull unary scalar operation into spark
_etype = ExecType.SPARK;
}
// mark for recompile (forever)
setRequiresRecompileIfNecessary();
// ensure cp exec type for single-node operations
if (op == OpOp2.SOLVE) {
if (isGPUEnabled())
_etype = ExecType.GPU;
else
_etype = ExecType.CP;
}
return _etype;
}
use of org.apache.sysml.parser.Expression.DataType in project incubator-systemml by apache.
the class ProgramConverter method serializeDataObject.
public static String serializeDataObject(String key, Data dat) {
// SCHEMA: <name>|<datatype>|<valuetype>|value
// (scalars are serialize by value, matrices by filename)
StringBuilder sb = new StringBuilder();
// prepare data for serialization
String name = key;
DataType datatype = dat.getDataType();
ValueType valuetype = dat.getValueType();
String value = null;
String[] matrixMetaData = null;
switch(datatype) {
case SCALAR:
ScalarObject so = (ScalarObject) dat;
// name = so.getName();
value = so.getStringValue();
break;
case MATRIX:
MatrixObject mo = (MatrixObject) dat;
MetaDataFormat md = (MetaDataFormat) dat.getMetaData();
MatrixCharacteristics mc = md.getMatrixCharacteristics();
value = mo.getFileName();
PartitionFormat partFormat = (mo.getPartitionFormat() != null) ? new PartitionFormat(mo.getPartitionFormat(), mo.getPartitionSize()) : PartitionFormat.NONE;
matrixMetaData = new String[9];
matrixMetaData[0] = String.valueOf(mc.getRows());
matrixMetaData[1] = String.valueOf(mc.getCols());
matrixMetaData[2] = String.valueOf(mc.getRowsPerBlock());
matrixMetaData[3] = String.valueOf(mc.getColsPerBlock());
matrixMetaData[4] = String.valueOf(mc.getNonZeros());
matrixMetaData[5] = InputInfo.inputInfoToString(md.getInputInfo());
matrixMetaData[6] = OutputInfo.outputInfoToString(md.getOutputInfo());
matrixMetaData[7] = String.valueOf(partFormat);
matrixMetaData[8] = String.valueOf(mo.getUpdateType());
break;
default:
throw new DMLRuntimeException("Unable to serialize datatype " + datatype);
}
// serialize data
sb.append(name);
sb.append(DATA_FIELD_DELIM);
sb.append(datatype);
sb.append(DATA_FIELD_DELIM);
sb.append(valuetype);
sb.append(DATA_FIELD_DELIM);
sb.append(value);
if (matrixMetaData != null)
for (int i = 0; i < matrixMetaData.length; i++) {
sb.append(DATA_FIELD_DELIM);
sb.append(matrixMetaData[i]);
}
return sb.toString();
}
use of org.apache.sysml.parser.Expression.DataType in project incubator-systemml by apache.
the class ArithmeticBinaryGPUInstruction method parseInstruction.
public static ArithmeticBinaryGPUInstruction parseInstruction(String str) {
String[] parts = InstructionUtils.getInstructionPartsWithValueType(str);
InstructionUtils.checkNumFields(parts, 3);
String opcode = parts[0];
CPOperand in1 = new CPOperand(parts[1]);
CPOperand in2 = new CPOperand(parts[2]);
CPOperand out = new CPOperand(parts[3]);
DataType dt1 = in1.getDataType();
DataType dt2 = in2.getDataType();
DataType dt3 = out.getDataType();
Operator operator = (dt1 != dt2) ? InstructionUtils.parseScalarBinaryOperator(opcode, (dt1 == DataType.SCALAR)) : InstructionUtils.parseBinaryOperator(opcode);
if (dt1 == DataType.MATRIX && dt2 == DataType.MATRIX && dt3 == DataType.MATRIX) {
return new MatrixMatrixArithmeticGPUInstruction(operator, in1, in2, out, opcode, str);
} else if (dt3 == DataType.MATRIX && ((dt1 == DataType.SCALAR && dt2 == DataType.MATRIX) || (dt1 == DataType.MATRIX && dt2 == DataType.SCALAR))) {
return new ScalarMatrixArithmeticGPUInstruction(operator, in1, in2, out, opcode, str);
} else
throw new DMLRuntimeException("Unsupported GPU ArithmeticInstruction.");
}
use of org.apache.sysml.parser.Expression.DataType in project incubator-systemml by apache.
the class ScalarInstruction method isFirstArgumentScalar.
private static boolean isFirstArgumentScalar(String inst) {
// get first argument
String[] parts = InstructionUtils.getInstructionPartsWithValueType(inst);
String arg1 = parts[1];
// get data type of first argument
String[] subparts = arg1.split(Lop.VALUETYPE_PREFIX);
DataType dt = DataType.valueOf(subparts[1]);
return (dt == DataType.SCALAR);
}
use of org.apache.sysml.parser.Expression.DataType in project incubator-systemml by apache.
the class TemplateCell method isValidOperation.
protected static boolean isValidOperation(Hop hop) {
// prepare indicators for binary operations
boolean isBinaryMatrixScalar = false;
boolean isBinaryMatrixVector = false;
boolean isBinaryMatrixMatrix = false;
if (hop instanceof BinaryOp && hop.getDataType().isMatrix()) {
Hop left = hop.getInput().get(0);
Hop right = hop.getInput().get(1);
DataType ldt = left.getDataType();
DataType rdt = right.getDataType();
isBinaryMatrixScalar = (ldt.isScalar() || rdt.isScalar());
isBinaryMatrixVector = hop.dimsKnown() && ((ldt.isMatrix() && TemplateUtils.isVectorOrScalar(right)) || (rdt.isMatrix() && TemplateUtils.isVectorOrScalar(left)));
isBinaryMatrixMatrix = hop.dimsKnown() && HopRewriteUtils.isEqualSize(left, right) && ldt.isMatrix() && rdt.isMatrix();
}
// prepare indicators for ternary operations
boolean isTernaryVectorScalarVector = false;
boolean isTernaryMatrixScalarMatrixDense = false;
boolean isTernaryIfElse = (HopRewriteUtils.isTernary(hop, OpOp3.IFELSE) && hop.getDataType().isMatrix());
if (hop instanceof TernaryOp && hop.getInput().size() == 3 && hop.dimsKnown() && HopRewriteUtils.checkInputDataTypes(hop, DataType.MATRIX, DataType.SCALAR, DataType.MATRIX)) {
Hop left = hop.getInput().get(0);
Hop right = hop.getInput().get(2);
isTernaryVectorScalarVector = TemplateUtils.isVector(left) && TemplateUtils.isVector(right);
isTernaryMatrixScalarMatrixDense = HopRewriteUtils.isEqualSize(left, right) && !HopRewriteUtils.isSparse(left) && !HopRewriteUtils.isSparse(right);
}
// check supported unary, binary, ternary operations
return hop.getDataType() == DataType.MATRIX && TemplateUtils.isOperationSupported(hop) && (hop instanceof UnaryOp || isBinaryMatrixScalar || isBinaryMatrixVector || isBinaryMatrixMatrix || isTernaryVectorScalarVector || isTernaryMatrixScalarMatrixDense || isTernaryIfElse || (hop instanceof ParameterizedBuiltinOp && ((ParameterizedBuiltinOp) hop).getOp() == ParamBuiltinOp.REPLACE));
}
Aggregations