use of org.apache.flink.runtime.operators.DriverStrategy in project flink by apache.
the class JobGraphGenerator method createDualInputVertex.
private JobVertex createDualInputVertex(DualInputPlanNode node) throws CompilerException {
final String taskName = node.getNodeName();
final DriverStrategy ds = node.getDriverStrategy();
final JobVertex vertex = new JobVertex(taskName);
final TaskConfig config = new TaskConfig(vertex.getConfiguration());
vertex.setResources(node.getMinResources(), node.getPreferredResources());
vertex.setInvokableClass((this.currentIteration != null && node.isOnDynamicPath()) ? IterationIntermediateTask.class : BatchTask.class);
// set user code
config.setStubWrapper(node.getProgramOperator().getUserCodeWrapper());
config.setStubParameters(node.getProgramOperator().getParameters());
// set the driver strategy
config.setDriver(ds.getDriverClass());
config.setDriverStrategy(ds);
if (node.getComparator1() != null) {
config.setDriverComparator(node.getComparator1(), 0);
}
if (node.getComparator2() != null) {
config.setDriverComparator(node.getComparator2(), 1);
}
if (node.getPairComparator() != null) {
config.setDriverPairComparator(node.getPairComparator());
}
// assign memory, file-handles, etc.
assignDriverResources(node, config);
return vertex;
}
use of org.apache.flink.runtime.operators.DriverStrategy in project flink by apache.
the class HashJoinBuildFirstProperties method instantiate.
@Override
public DualInputPlanNode instantiate(Channel in1, Channel in2, TwoInputNode node) {
DriverStrategy strategy;
if (!in1.isOnDynamicPath() && in2.isOnDynamicPath()) {
// sanity check that the first input is cached and remove that cache
if (!in1.getTempMode().isCached()) {
throw new CompilerException("No cache at point where static and dynamic parts meet.");
}
in1.setTempMode(in1.getTempMode().makeNonCached());
strategy = DriverStrategy.HYBRIDHASH_BUILD_FIRST_CACHED;
} else {
strategy = DriverStrategy.HYBRIDHASH_BUILD_FIRST;
}
return new DualInputPlanNode(node, "Join(" + node.getOperator().getName() + ")", in1, in2, strategy, this.keys1, this.keys2);
}
use of org.apache.flink.runtime.operators.DriverStrategy in project flink by apache.
the class HashJoinBuildSecondProperties method instantiate.
@Override
public DualInputPlanNode instantiate(Channel in1, Channel in2, TwoInputNode node) {
DriverStrategy strategy;
if (!in2.isOnDynamicPath() && in1.isOnDynamicPath()) {
// sanity check that the first input is cached and remove that cache
if (!in2.getTempMode().isCached()) {
throw new CompilerException("No cache at point where static and dynamic parts meet.");
}
in2.setTempMode(in2.getTempMode().makeNonCached());
strategy = DriverStrategy.HYBRIDHASH_BUILD_SECOND_CACHED;
} else {
strategy = DriverStrategy.HYBRIDHASH_BUILD_SECOND;
}
return new DualInputPlanNode(node, "Join (" + node.getOperator().getName() + ")", in1, in2, strategy, this.keys1, this.keys2);
}
use of org.apache.flink.runtime.operators.DriverStrategy in project flink by apache.
the class JobGraphGenerator method createSingleInputVertex.
// ------------------------------------------------------------------------
// Methods for creating individual vertices
// ------------------------------------------------------------------------
private JobVertex createSingleInputVertex(SingleInputPlanNode node) throws CompilerException {
final String taskName = node.getNodeName();
final DriverStrategy ds = node.getDriverStrategy();
// check, whether chaining is possible
boolean chaining;
{
Channel inConn = node.getInput();
PlanNode pred = inConn.getSource();
chaining = ds.getPushChainDriverClass() != null && // first op after union is stand-alone, because union is merged
!(pred instanceof NAryUnionPlanNode) && // partial solution merges anyways
!(pred instanceof BulkPartialSolutionPlanNode) && // workset merges anyways
!(pred instanceof WorksetPlanNode) && // cannot chain with iteration heads currently
!(pred instanceof IterationPlanNode) && inConn.getShipStrategy() == ShipStrategyType.FORWARD && inConn.getLocalStrategy() == LocalStrategy.NONE && pred.getOutgoingChannels().size() == 1 && node.getParallelism() == pred.getParallelism() && node.getBroadcastInputs().isEmpty();
// in a tail
if (this.currentIteration != null && this.currentIteration instanceof WorksetIterationPlanNode && node.getOutgoingChannels().size() > 0) {
WorksetIterationPlanNode wspn = (WorksetIterationPlanNode) this.currentIteration;
if (wspn.getSolutionSetDeltaPlanNode() == pred || wspn.getNextWorkSetPlanNode() == pred) {
chaining = false;
}
}
// cannot chain the nodes that produce the next workset in a bulk iteration if a termination criterion follows
if (this.currentIteration != null && this.currentIteration instanceof BulkIterationPlanNode) {
BulkIterationPlanNode wspn = (BulkIterationPlanNode) this.currentIteration;
if (node == wspn.getRootOfTerminationCriterion() && wspn.getRootOfStepFunction() == pred) {
chaining = false;
} else if (node.getOutgoingChannels().size() > 0 && (wspn.getRootOfStepFunction() == pred || wspn.getRootOfTerminationCriterion() == pred)) {
chaining = false;
}
}
}
final JobVertex vertex;
final TaskConfig config;
if (chaining) {
vertex = null;
config = new TaskConfig(new Configuration());
this.chainedTasks.put(node, new TaskInChain(node, ds.getPushChainDriverClass(), config, taskName));
} else {
// create task vertex
vertex = new JobVertex(taskName);
vertex.setResources(node.getMinResources(), node.getPreferredResources());
vertex.setInvokableClass((this.currentIteration != null && node.isOnDynamicPath()) ? IterationIntermediateTask.class : BatchTask.class);
config = new TaskConfig(vertex.getConfiguration());
config.setDriver(ds.getDriverClass());
}
// set user code
config.setStubWrapper(node.getProgramOperator().getUserCodeWrapper());
config.setStubParameters(node.getProgramOperator().getParameters());
// set the driver strategy
config.setDriverStrategy(ds);
for (int i = 0; i < ds.getNumRequiredComparators(); i++) {
config.setDriverComparator(node.getComparator(i), i);
}
// assign memory, file-handles, etc.
assignDriverResources(node, config);
return vertex;
}
Aggregations