use of org.apache.ignite.internal.processors.hadoop.planner.HadoopDefaultMapReducePlan in project ignite by apache.
the class HadoopTestRoundRobinMrPlanner method preparePlan.
/** {@inheritDoc} */
@Override
public HadoopMapReducePlan preparePlan(HadoopJob job, Collection<ClusterNode> top, @Nullable HadoopMapReducePlan oldPlan) throws IgniteCheckedException {
if (top.isEmpty())
throw new IllegalArgumentException("Topology is empty");
// Has at least one element.
Iterator<ClusterNode> it = top.iterator();
Map<UUID, Collection<HadoopInputSplit>> mappers = new HashMap<>();
for (HadoopInputSplit block : job.input()) {
ClusterNode node = it.next();
Collection<HadoopInputSplit> nodeBlocks = mappers.get(node.id());
if (nodeBlocks == null) {
nodeBlocks = new ArrayList<>();
mappers.put(node.id(), nodeBlocks);
}
nodeBlocks.add(block);
if (!it.hasNext())
it = top.iterator();
}
int[] rdc = new int[job.reducers()];
for (int i = 0; i < rdc.length; i++) rdc[i] = i;
return new HadoopDefaultMapReducePlan(mappers, Collections.singletonMap(it.next().id(), rdc));
}
use of org.apache.ignite.internal.processors.hadoop.planner.HadoopDefaultMapReducePlan in project ignite by apache.
the class IgniteHadoopWeightedMapReducePlanner method preparePlan.
/** {@inheritDoc} */
@Override
public HadoopMapReducePlan preparePlan(HadoopJob job, Collection<ClusterNode> nodes, @Nullable HadoopMapReducePlan oldPlan) throws IgniteCheckedException {
List<HadoopInputSplit> splits = HadoopCommonUtils.sortInputSplits(job.input());
int reducerCnt = job.reducers();
if (reducerCnt < 0)
throw new IgniteCheckedException("Number of reducers must be non-negative, actual: " + reducerCnt);
HadoopMapReducePlanTopology top = topology(nodes);
Mappers mappers = assignMappers(splits, top);
Map<UUID, int[]> reducers = assignReducers(splits, top, mappers, reducerCnt);
return new HadoopDefaultMapReducePlan(mappers.nodeToSplits, reducers);
}
Aggregations