use of org.apache.flink.table.planner.plan.nodes.exec.ExecNode in project flink by apache.
the class InputOrderCalculator method calculateAllAncestors.
private static Set<ExecNode<?>> calculateAllAncestors(ExecNode<?> node) {
Set<ExecNode<?>> ret = new HashSet<>();
AbstractExecNodeExactlyOnceVisitor visitor = new AbstractExecNodeExactlyOnceVisitor() {
@Override
protected void visitNode(ExecNode<?> node) {
ret.add(node);
visitInputs(node);
}
};
node.accept(visitor);
return ret;
}
use of org.apache.flink.table.planner.plan.nodes.exec.ExecNode in project flink by apache.
the class MultipleInputNodeCreationProcessor method createStreamMultipleInputNode.
private StreamExecMultipleInput createStreamMultipleInputNode(MultipleInputGroup group, List<Tuple3<ExecNode<?>, InputProperty, ExecEdge>> inputs) {
ExecNode<?> rootNode = group.root.execNode;
List<ExecNode<?>> inputNodes = new ArrayList<>();
for (Tuple3<ExecNode<?>, InputProperty, ExecEdge> tuple3 : inputs) {
inputNodes.add(tuple3.f0);
}
String description = ExecNodeUtil.getMultipleInputDescription(rootNode, inputNodes, new ArrayList<>());
StreamExecMultipleInput multipleInput = new StreamExecMultipleInput(inputNodes.stream().map(i -> InputProperty.DEFAULT).collect(Collectors.toList()), rootNode, description);
List<ExecEdge> inputEdges = new ArrayList<>(inputNodes.size());
for (ExecNode<?> inputNode : inputNodes) {
inputEdges.add(ExecEdge.builder().source(inputNode).target(multipleInput).build());
}
multipleInput.setInputEdges(inputEdges);
return multipleInput;
}
use of org.apache.flink.table.planner.plan.nodes.exec.ExecNode in project flink by apache.
the class MultipleInputNodeCreationProcessor method wrapExecNodes.
// --------------------------------------------------------------------------------
// Wrapping and Sorting
// --------------------------------------------------------------------------------
private List<ExecNodeWrapper> wrapExecNodes(List<ExecNode<?>> rootNodes) {
Map<ExecNode<?>, ExecNodeWrapper> wrapperMap = new HashMap<>();
AbstractExecNodeExactlyOnceVisitor visitor = new AbstractExecNodeExactlyOnceVisitor() {
@Override
protected void visitNode(ExecNode<?> node) {
ExecNodeWrapper wrapper = wrapperMap.computeIfAbsent(node, k -> new ExecNodeWrapper(node));
for (ExecEdge inputEdge : node.getInputEdges()) {
ExecNode<?> inputNode = inputEdge.getSource();
ExecNodeWrapper inputWrapper = wrapperMap.computeIfAbsent(inputNode, k -> new ExecNodeWrapper(inputNode));
wrapper.inputs.add(inputWrapper);
inputWrapper.outputs.add(wrapper);
}
visitInputs(node);
}
};
rootNodes.forEach(s -> s.accept(visitor));
List<ExecNodeWrapper> rootWrappers = new ArrayList<>();
for (ExecNode<?> root : rootNodes) {
ExecNodeWrapper rootWrapper = wrapperMap.get(root);
Preconditions.checkNotNull(rootWrapper, "Root node is not wrapped. This is a bug.");
rootWrappers.add(rootWrapper);
}
return rootWrappers;
}
use of org.apache.flink.table.planner.plan.nodes.exec.ExecNode in project flink by apache.
the class MultipleInputNodeCreationProcessor method process.
@Override
public ExecNodeGraph process(ExecNodeGraph execGraph, ProcessorContext context) {
if (!isStreaming) {
// As multiple input nodes use function call to deliver records between sub-operators,
// we cannot rely on network buffers to buffer records not yet ready to be read,
// so only BLOCKING dam behavior is safe here.
// If conflict is detected under this stricter constraint,
// we add a PIPELINED exchange to mark that its input and output node cannot be merged
// into the same multiple input node
InputPriorityConflictResolver resolver = new InputPriorityConflictResolver(execGraph.getRootNodes(), InputProperty.DamBehavior.BLOCKING, StreamExchangeMode.PIPELINED, context.getPlanner().getConfiguration());
resolver.detectAndResolve();
}
List<ExecNodeWrapper> rootWrappers = wrapExecNodes(execGraph.getRootNodes());
// sort all nodes in topological order, sinks come first and sources come last
List<ExecNodeWrapper> orderedWrappers = topologicalSort(rootWrappers);
// group nodes into multiple input groups
createMultipleInputGroups(orderedWrappers);
// apply optimizations to remove unnecessary nodes out of multiple input groups
optimizeMultipleInputGroups(orderedWrappers, context);
// create the real multiple input nodes
List<ExecNode<?>> newRootNodes = createMultipleInputNodes(rootWrappers);
return new ExecNodeGraph(newRootNodes);
}
use of org.apache.flink.table.planner.plan.nodes.exec.ExecNode in project flink by apache.
the class MultipleInputNodeCreationProcessor method createBatchMultipleInputNode.
private BatchExecMultipleInput createBatchMultipleInputNode(MultipleInputGroup group, List<Tuple3<ExecNode<?>, InputProperty, ExecEdge>> inputs) {
// first calculate the input orders using InputPriorityConflictResolver
Set<ExecNode<?>> inputSet = new HashSet<>();
for (Tuple3<ExecNode<?>, InputProperty, ExecEdge> tuple3 : inputs) {
inputSet.add(tuple3.f0);
}
InputOrderCalculator calculator = new InputOrderCalculator(group.root.execNode, inputSet, InputProperty.DamBehavior.BLOCKING);
Map<ExecNode<?>, Integer> inputOrderMap = calculator.calculate();
// then create input rels and edges with the input orders
ExecNode<?> rootNode = group.root.execNode;
List<ExecNode<?>> inputNodes = new ArrayList<>();
List<InputProperty> inputProperties = new ArrayList<>();
List<ExecEdge> originalEdges = new ArrayList<>();
for (Tuple3<ExecNode<?>, InputProperty, ExecEdge> tuple3 : inputs) {
ExecNode<?> inputNode = tuple3.f0;
InputProperty originalInputEdge = tuple3.f1;
ExecEdge edge = tuple3.f2;
inputNodes.add(inputNode);
inputProperties.add(InputProperty.builder().requiredDistribution(originalInputEdge.getRequiredDistribution()).damBehavior(originalInputEdge.getDamBehavior()).priority(inputOrderMap.get(inputNode)).build());
originalEdges.add(edge);
}
String description = ExecNodeUtil.getMultipleInputDescription(rootNode, inputNodes, inputProperties);
BatchExecMultipleInput multipleInput = new BatchExecMultipleInput(inputProperties, rootNode, originalEdges, description);
List<ExecEdge> inputEdges = new ArrayList<>(inputNodes.size());
for (ExecNode<?> inputNode : inputNodes) {
inputEdges.add(ExecEdge.builder().source(inputNode).target(multipleInput).build());
}
multipleInput.setInputEdges(inputEdges);
return multipleInput;
}
Aggregations