use of org.apache.sysml.runtime.controlprogram.parfor.ResultMergeRemoteSpark in project incubator-systemml by apache.
the class ParForProgramBlock method createResultMerge.
private ResultMerge createResultMerge(PResultMerge prm, MatrixObject out, MatrixObject[] in, String fname, boolean accum, ExecutionContext ec) {
ResultMerge rm = null;
// determine degree of parallelism
int maxMap = -1, maxRed = -1;
if (OptimizerUtils.isSparkExecutionMode()) {
maxMap = (int) SparkExecutionContext.getDefaultParallelism(true);
// equal map/reduce
maxRed = maxMap;
} else {
int numReducers = ConfigurationManager.getNumReducers();
maxMap = InfrastructureAnalyzer.getRemoteParallelMapTasks();
maxRed = Math.min(numReducers, InfrastructureAnalyzer.getRemoteParallelReduceTasks());
// correction max number of reducers on yarn clusters
if (InfrastructureAnalyzer.isYarnEnabled()) {
maxMap = (int) Math.max(maxMap, YarnClusterAnalyzer.getNumCores());
maxRed = (int) Math.max(maxRed, YarnClusterAnalyzer.getNumCores() / 2);
}
}
int numMap = Math.max(_numThreads, maxMap);
int numRed = maxRed;
// create result merge implementation
switch(prm) {
case LOCAL_MEM:
rm = new ResultMergeLocalMemory(out, in, fname, accum);
break;
case LOCAL_FILE:
rm = new ResultMergeLocalFile(out, in, fname, accum);
break;
case LOCAL_AUTOMATIC:
rm = new ResultMergeLocalAutomatic(out, in, fname, accum);
break;
case REMOTE_MR:
rm = new ResultMergeRemoteMR(out, in, fname, accum, _ID, numMap, numRed, WRITE_REPLICATION_FACTOR, MAX_RETRYS_ON_ERROR, ALLOW_REUSE_MR_JVMS);
break;
case REMOTE_SPARK:
rm = new ResultMergeRemoteSpark(out, in, fname, accum, ec, numMap, numRed);
break;
default:
throw new DMLRuntimeException("Undefined result merge: '" + prm.toString() + "'.");
}
return rm;
}
use of org.apache.sysml.runtime.controlprogram.parfor.ResultMergeRemoteSpark in project systemml by apache.
the class ParForProgramBlock method createResultMerge.
private ResultMerge createResultMerge(PResultMerge prm, MatrixObject out, MatrixObject[] in, String fname, boolean accum, ExecutionContext ec) {
ResultMerge rm = null;
// determine degree of parallelism
int maxMap = -1, maxRed = -1;
if (OptimizerUtils.isSparkExecutionMode()) {
maxMap = (int) SparkExecutionContext.getDefaultParallelism(true);
// equal map/reduce
maxRed = maxMap;
} else {
int numReducers = ConfigurationManager.getNumReducers();
maxMap = InfrastructureAnalyzer.getRemoteParallelMapTasks();
maxRed = Math.min(numReducers, InfrastructureAnalyzer.getRemoteParallelReduceTasks());
// correction max number of reducers on yarn clusters
if (InfrastructureAnalyzer.isYarnEnabled()) {
maxMap = (int) Math.max(maxMap, YarnClusterAnalyzer.getNumCores());
maxRed = (int) Math.max(maxRed, YarnClusterAnalyzer.getNumCores() / 2);
}
}
int numMap = Math.max(_numThreads, maxMap);
int numRed = maxRed;
// create result merge implementation
switch(prm) {
case LOCAL_MEM:
rm = new ResultMergeLocalMemory(out, in, fname, accum);
break;
case LOCAL_FILE:
rm = new ResultMergeLocalFile(out, in, fname, accum);
break;
case LOCAL_AUTOMATIC:
rm = new ResultMergeLocalAutomatic(out, in, fname, accum);
break;
case REMOTE_MR:
rm = new ResultMergeRemoteMR(out, in, fname, accum, _ID, numMap, numRed, WRITE_REPLICATION_FACTOR, MAX_RETRYS_ON_ERROR, ALLOW_REUSE_MR_JVMS);
break;
case REMOTE_SPARK:
rm = new ResultMergeRemoteSpark(out, in, fname, accum, ec, numMap, numRed);
break;
default:
throw new DMLRuntimeException("Undefined result merge: '" + prm.toString() + "'.");
}
return rm;
}
use of org.apache.sysml.runtime.controlprogram.parfor.ResultMergeRemoteSpark in project incubator-systemml by apache.
the class ParForProgramBlock method createResultMerge.
private ResultMerge createResultMerge(PResultMerge prm, MatrixObject out, MatrixObject[] in, String fname, ExecutionContext ec) throws DMLRuntimeException {
ResultMerge rm = null;
//determine degree of parallelism
int maxMap = -1, maxRed = -1;
if (OptimizerUtils.isSparkExecutionMode()) {
maxMap = (int) SparkExecutionContext.getDefaultParallelism(true);
//equal map/reduce
maxRed = maxMap;
} else {
int numReducers = ConfigurationManager.getNumReducers();
maxMap = InfrastructureAnalyzer.getRemoteParallelMapTasks();
maxRed = Math.min(numReducers, InfrastructureAnalyzer.getRemoteParallelReduceTasks());
//correction max number of reducers on yarn clusters
if (InfrastructureAnalyzer.isYarnEnabled()) {
maxMap = (int) Math.max(maxMap, YarnClusterAnalyzer.getNumCores());
maxRed = (int) Math.max(maxRed, YarnClusterAnalyzer.getNumCores() / 2);
}
}
int numMap = Math.max(_numThreads, maxMap);
int numRed = maxRed;
//create result merge implementation
switch(prm) {
case LOCAL_MEM:
rm = new ResultMergeLocalMemory(out, in, fname);
break;
case LOCAL_FILE:
rm = new ResultMergeLocalFile(out, in, fname);
break;
case LOCAL_AUTOMATIC:
rm = new ResultMergeLocalAutomatic(out, in, fname);
break;
case REMOTE_MR:
rm = new ResultMergeRemoteMR(out, in, fname, _ID, numMap, numRed, WRITE_REPLICATION_FACTOR, MAX_RETRYS_ON_ERROR, ALLOW_REUSE_MR_JVMS);
break;
case REMOTE_SPARK:
rm = new ResultMergeRemoteSpark(out, in, fname, ec, numMap, numRed);
break;
default:
throw new DMLRuntimeException("Undefined result merge: '" + prm.toString() + "'.");
}
return rm;
}
Aggregations