use of org.apache.sysml.runtime.controlprogram.caching.FrameObject in project incubator-systemml by apache.
the class ExecutionContext method releaseFrameInput.
/**
* Unpins a currently pinned frame variable.
*
* @param varName variable name
* @throws DMLRuntimeException if DMLRuntimeException occurs
*/
public void releaseFrameInput(String varName) throws DMLRuntimeException {
FrameObject fo = getFrameObject(varName);
fo.release();
}
use of org.apache.sysml.runtime.controlprogram.caching.FrameObject in project incubator-systemml by apache.
the class FrameEvictionTest method runFrameEvictionTest.
/**
*
* @param schema
* @param sparse
* @param defaultMeta
* @param force
*/
private void runFrameEvictionTest(ValueType[] schema, boolean sparse, boolean defaultMeta, boolean force) {
try {
//data generation
double sparsity = sparse ? sparsity2 : sparsity1;
double[][] A = getRandomMatrix(rows, schema.length, -10, 10, sparsity, 765);
MatrixBlock mA = DataConverter.convertToMatrixBlock(A);
FrameBlock fA = DataConverter.convertToFrameBlock(mA, schema);
//create non-default column names
if (!defaultMeta) {
String[] colnames = new String[schema.length];
for (int i = 0; i < schema.length; i++) colnames[i] = "Custom_name_" + i;
fA.setColumnNames(colnames);
}
//setup caching
CacheableData.initCaching("tmp_frame_eviction_test");
//create frame object
MatrixCharacteristics mc = new MatrixCharacteristics(rows, schema.length, -1, -1, -1);
MatrixFormatMetaData meta = new MatrixFormatMetaData(mc, OutputInfo.BinaryBlockOutputInfo, InputInfo.BinaryBlockInputInfo);
FrameObject fo = new FrameObject("fA", meta, schema);
fo.acquireModify(fA);
fo.release();
//evict frame and clear in-memory reference
if (force)
LazyWriteBuffer.forceEviction();
Method clearfo = CacheableData.class.getDeclaredMethod("clearCache", new Class[] {});
//make method public
clearfo.setAccessible(true);
clearfo.invoke(fo, new Object[] {});
//read frame through buffer pool (if forced, this is a read from disk
//otherwise deserialization or simple reference depending on schema)
FrameBlock fA2 = fo.acquireRead();
fo.release();
//compare frames
String[][] sA = DataConverter.convertToStringFrame(fA);
String[][] sA2 = DataConverter.convertToStringFrame(fA2);
TestUtils.compareFrames(sA, sA2, rows, schema.length);
} catch (Exception ex) {
ex.printStackTrace();
throw new RuntimeException(ex);
}
}
use of org.apache.sysml.runtime.controlprogram.caching.FrameObject in project incubator-systemml by apache.
the class MRJobInstruction method extractInputMatrices.
/**
* Extracts input variables with MATRIX data type, and stores references to
* corresponding matrix objects in <code>inputMatrices</code>. Also, stores
* the data types in <code>inputDataTypes</code>.
*
* @param ec execution context
* @return array of matrix objects
*/
public MatrixObject[] extractInputMatrices(ExecutionContext ec) {
ArrayList<MatrixObject> inputmat = new ArrayList<MatrixObject>();
inputDataTypes = new DataType[inputVars.length];
for (int i = 0; i < inputVars.length; i++) {
Data d = ec.getVariable(inputVars[i]);
inputDataTypes[i] = d.getDataType();
if (d.getDataType() == DataType.MATRIX) {
inputmat.add((MatrixObject) d);
} else if (d.getDataType() == DataType.FRAME) {
//FIXME conversion from frame to matrix object (meta data only) to adhere to
//the given matrix-based mr job submission framework
FrameObject fo = (FrameObject) d;
MatrixObject mo = new MatrixObject(fo.getValueType(), fo.getFileName(), fo.getMetaData());
mo.setFileFormatProperties(fo.getFileFormatProperties());
inputmat.add(mo);
}
}
inputMatrices = inputmat.toArray(new MatrixObject[inputmat.size()]);
// populate auxiliary data structures
populateInputs();
return inputMatrices;
}
use of org.apache.sysml.runtime.controlprogram.caching.FrameObject in project incubator-systemml by apache.
the class ParameterizedBuiltinCPInstruction method processInstruction.
@Override
public void processInstruction(ExecutionContext ec) throws DMLRuntimeException {
String opcode = getOpcode();
ScalarObject sores = null;
if (opcode.equalsIgnoreCase("cdf")) {
SimpleOperator op = (SimpleOperator) _optr;
double result = op.fn.execute(params);
sores = new DoubleObject(result);
ec.setScalarOutput(output.getName(), sores);
} else if (opcode.equalsIgnoreCase("invcdf")) {
SimpleOperator op = (SimpleOperator) _optr;
double result = op.fn.execute(params);
sores = new DoubleObject(result);
ec.setScalarOutput(output.getName(), sores);
} else if (opcode.equalsIgnoreCase("groupedagg")) {
// acquire locks
MatrixBlock target = ec.getMatrixInput(params.get(Statement.GAGG_TARGET));
MatrixBlock groups = ec.getMatrixInput(params.get(Statement.GAGG_GROUPS));
MatrixBlock weights = null;
if (params.get(Statement.GAGG_WEIGHTS) != null)
weights = ec.getMatrixInput(params.get(Statement.GAGG_WEIGHTS));
int ngroups = -1;
if (params.get(Statement.GAGG_NUM_GROUPS) != null) {
ngroups = (int) Double.parseDouble(params.get(Statement.GAGG_NUM_GROUPS));
}
// compute the result
//num threads
int k = Integer.parseInt(params.get("k"));
MatrixBlock soresBlock = groups.groupedAggOperations(target, weights, new MatrixBlock(), ngroups, _optr, k);
ec.setMatrixOutput(output.getName(), soresBlock);
// release locks
target = groups = weights = null;
ec.releaseMatrixInput(params.get(Statement.GAGG_TARGET));
ec.releaseMatrixInput(params.get(Statement.GAGG_GROUPS));
if (params.get(Statement.GAGG_WEIGHTS) != null)
ec.releaseMatrixInput(params.get(Statement.GAGG_WEIGHTS));
} else if (opcode.equalsIgnoreCase("rmempty")) {
// acquire locks
MatrixBlock target = ec.getMatrixInput(params.get("target"));
MatrixBlock select = params.containsKey("select") ? ec.getMatrixInput(params.get("select")) : null;
// compute the result
String margin = params.get("margin");
MatrixBlock soresBlock = null;
if (margin.equals("rows"))
soresBlock = target.removeEmptyOperations(new MatrixBlock(), true, select);
else if (margin.equals("cols"))
soresBlock = target.removeEmptyOperations(new MatrixBlock(), false, select);
else
throw new DMLRuntimeException("Unspupported margin identifier '" + margin + "'.");
//release locks
ec.setMatrixOutput(output.getName(), soresBlock);
ec.releaseMatrixInput(params.get("target"));
if (params.containsKey("select"))
ec.releaseMatrixInput(params.get("select"));
} else if (opcode.equalsIgnoreCase("replace")) {
// acquire locks
MatrixBlock target = ec.getMatrixInput(params.get("target"));
// compute the result
double pattern = Double.parseDouble(params.get("pattern"));
double replacement = Double.parseDouble(params.get("replacement"));
MatrixBlock ret = (MatrixBlock) target.replaceOperations(new MatrixBlock(), pattern, replacement);
//release locks
ec.setMatrixOutput(output.getName(), ret);
ec.releaseMatrixInput(params.get("target"));
} else if (opcode.equalsIgnoreCase("rexpand")) {
// acquire locks
MatrixBlock target = ec.getMatrixInput(params.get("target"));
// compute the result
double maxVal = Double.parseDouble(params.get("max"));
boolean dirVal = params.get("dir").equals("rows");
boolean cast = Boolean.parseBoolean(params.get("cast"));
boolean ignore = Boolean.parseBoolean(params.get("ignore"));
int numThreads = Integer.parseInt(params.get("k"));
MatrixBlock ret = (MatrixBlock) target.rexpandOperations(new MatrixBlock(), maxVal, dirVal, cast, ignore, numThreads);
//release locks
ec.setMatrixOutput(output.getName(), ret);
ec.releaseMatrixInput(params.get("target"));
} else if (opcode.equalsIgnoreCase("transform")) {
FrameObject fo = ec.getFrameObject(params.get("target"));
MatrixObject out = ec.getMatrixObject(output.getName());
try {
JobReturn jt = DataTransform.cpDataTransform(this, new FrameObject[] { fo }, new MatrixObject[] { out });
out.updateMatrixCharacteristics(jt.getMatrixCharacteristics(0));
} catch (Exception e) {
throw new DMLRuntimeException(e);
}
} else if (opcode.equalsIgnoreCase("transformapply")) {
//acquire locks
FrameBlock data = ec.getFrameInput(params.get("target"));
FrameBlock meta = ec.getFrameInput(params.get("meta"));
String[] colNames = data.getColumnNames();
//compute transformapply
Encoder encoder = EncoderFactory.createEncoder(params.get("spec"), colNames, data.getNumColumns(), meta);
MatrixBlock mbout = encoder.apply(data, new MatrixBlock(data.getNumRows(), data.getNumColumns(), false));
//release locks
ec.setMatrixOutput(output.getName(), mbout);
ec.releaseFrameInput(params.get("target"));
ec.releaseFrameInput(params.get("meta"));
} else if (opcode.equalsIgnoreCase("transformdecode")) {
//acquire locks
MatrixBlock data = ec.getMatrixInput(params.get("target"));
FrameBlock meta = ec.getFrameInput(params.get("meta"));
String[] colnames = meta.getColumnNames();
//compute transformdecode
Decoder decoder = DecoderFactory.createDecoder(getParameterMap().get("spec"), colnames, null, meta);
FrameBlock fbout = decoder.decode(data, new FrameBlock(decoder.getSchema()));
//release locks
ec.setFrameOutput(output.getName(), fbout);
ec.releaseMatrixInput(params.get("target"));
ec.releaseFrameInput(params.get("meta"));
} else if (opcode.equalsIgnoreCase("transformmeta")) {
//get input spec and path
String spec = getParameterMap().get("spec");
String path = getParameterMap().get(ParameterizedBuiltinFunctionExpression.TF_FN_PARAM_MTD);
String delim = getParameterMap().containsKey("sep") ? getParameterMap().get("sep") : TfUtils.TXMTD_SEP;
//execute transform meta data read
FrameBlock meta = null;
try {
meta = TfMetaUtils.readTransformMetaDataFromFile(spec, path, delim);
} catch (Exception ex) {
throw new DMLRuntimeException(ex);
}
//release locks
ec.setFrameOutput(output.getName(), meta);
} else if (opcode.equalsIgnoreCase("toString")) {
//handle input parameters
int rows = (getParam("rows") != null) ? Integer.parseInt(getParam("rows")) : TOSTRING_MAXROWS;
int cols = (getParam("cols") != null) ? Integer.parseInt(getParam("cols")) : TOSTRING_MAXCOLS;
int decimal = (getParam("decimal") != null) ? Integer.parseInt(getParam("decimal")) : TOSTRING_DECIMAL;
boolean sparse = (getParam("sparse") != null) ? Boolean.parseBoolean(getParam("sparse")) : TOSTRING_SPARSE;
String separator = (getParam("sep") != null) ? getParam("sep") : TOSTRING_SEPARATOR;
String lineseparator = (getParam("linesep") != null) ? getParam("linesep") : TOSTRING_LINESEPARATOR;
//get input matrix/frame and convert to string
CacheableData<?> data = ec.getCacheableData(getParam("target"));
String out = null;
if (data instanceof MatrixObject) {
MatrixBlock matrix = (MatrixBlock) data.acquireRead();
out = DataConverter.toString(matrix, sparse, separator, lineseparator, rows, cols, decimal);
} else if (data instanceof FrameObject) {
FrameBlock frame = (FrameBlock) data.acquireRead();
out = DataConverter.toString(frame, sparse, separator, lineseparator, rows, cols, decimal);
} else {
throw new DMLRuntimeException("toString only converts matrix or frames to string");
}
ec.releaseCacheableData(getParam("target"));
ec.setScalarOutput(output.getName(), new StringObject(out));
} else {
throw new DMLRuntimeException("Unknown opcode : " + opcode);
}
}
use of org.apache.sysml.runtime.controlprogram.caching.FrameObject in project incubator-systemml by apache.
the class SparkExecutionContext method getBroadcastForFrameVariable.
@SuppressWarnings("unchecked")
public PartitionedBroadcast<FrameBlock> getBroadcastForFrameVariable(String varname) throws DMLRuntimeException {
long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;
FrameObject fo = getFrameObject(varname);
PartitionedBroadcast<FrameBlock> bret = null;
//reuse existing broadcast handle
if (fo.getBroadcastHandle() != null && fo.getBroadcastHandle().isValid()) {
bret = fo.getBroadcastHandle().getBroadcast();
}
//create new broadcast handle (never created, evicted)
if (bret == null) {
//account for overwritten invalid broadcast (e.g., evicted)
if (fo.getBroadcastHandle() != null)
CacheableData.addBroadcastSize(-fo.getBroadcastHandle().getSize());
//obtain meta data for frame
int bclen = (int) fo.getNumColumns();
int brlen = OptimizerUtils.getDefaultFrameSize();
//create partitioned frame block and release memory consumed by input
FrameBlock mb = fo.acquireRead();
PartitionedBlock<FrameBlock> pmb = new PartitionedBlock<FrameBlock>(mb, brlen, bclen);
fo.release();
//determine coarse-grained partitioning
int numPerPart = PartitionedBroadcast.computeBlocksPerPartition(fo.getNumRows(), fo.getNumColumns(), brlen, bclen);
int numParts = (int) Math.ceil((double) pmb.getNumRowBlocks() * pmb.getNumColumnBlocks() / numPerPart);
Broadcast<PartitionedBlock<FrameBlock>>[] ret = new Broadcast[numParts];
//create coarse-grained partitioned broadcasts
if (numParts > 1) {
for (int i = 0; i < numParts; i++) {
int offset = i * numPerPart;
int numBlks = Math.min(numPerPart, pmb.getNumRowBlocks() * pmb.getNumColumnBlocks() - offset);
PartitionedBlock<FrameBlock> tmp = pmb.createPartition(offset, numBlks, new FrameBlock());
ret[i] = getSparkContext().broadcast(tmp);
}
} else {
//single partition
ret[0] = getSparkContext().broadcast(pmb);
}
bret = new PartitionedBroadcast<FrameBlock>(ret);
BroadcastObject<FrameBlock> bchandle = new BroadcastObject<FrameBlock>(bret, varname, OptimizerUtils.estimatePartitionedSizeExactSparsity(fo.getMatrixCharacteristics()));
fo.setBroadcastHandle(bchandle);
CacheableData.addBroadcastSize(bchandle.getSize());
}
if (DMLScript.STATISTICS) {
Statistics.accSparkBroadCastTime(System.nanoTime() - t0);
Statistics.incSparkBroadcastCount(1);
}
return bret;
}
Aggregations