use of org.apache.flink.table.planner.plan.nodes.exec.ExecNode in project flink by apache.
the class InputOrderCalculatorTest method testCalculateInputOrderWithUnaffectedRelatedBoundaries.
@Test
public void testCalculateInputOrderWithUnaffectedRelatedBoundaries() {
// P = InputProperty.DamBehavior.PIPELINED, B = InputProperty.DamBehavior.BLOCKING
// P1 = PIPELINED + priority 1
//
// 0 --(P0)-> 1 -------(B0)-----> 2 -(P0)-\
// \ \--(B0)-> 3 -(P1)-/ 4
// \-(B0)-> 5 -------(P1)-----> 6 -(P0)-/
// 7 --(B0)--/
TestingBatchExecNode[] nodes = new TestingBatchExecNode[8];
for (int i = 0; i < nodes.length; i++) {
nodes[i] = new TestingBatchExecNode("TestingBatchExecNode" + i);
}
nodes[1].addInput(nodes[0]);
nodes[2].addInput(nodes[1], InputProperty.builder().damBehavior(InputProperty.DamBehavior.BLOCKING).build());
nodes[2].addInput(nodes[3], InputProperty.builder().priority(1).build());
nodes[3].addInput(nodes[1], InputProperty.builder().damBehavior(InputProperty.DamBehavior.BLOCKING).build());
nodes[4].addInput(nodes[2]);
nodes[4].addInput(nodes[6]);
nodes[5].addInput(nodes[0], InputProperty.builder().damBehavior(InputProperty.DamBehavior.BLOCKING).build());
nodes[6].addInput(nodes[5], InputProperty.builder().priority(1).build());
nodes[6].addInput(nodes[7], InputProperty.builder().damBehavior(InputProperty.DamBehavior.BLOCKING).build());
InputOrderCalculator calculator = new InputOrderCalculator(nodes[4], new HashSet<>(Arrays.asList(nodes[1], nodes[3], nodes[5], nodes[7])), InputProperty.DamBehavior.BLOCKING);
Map<ExecNode<?>, Integer> result = calculator.calculate();
Assert.assertEquals(4, result.size());
Assert.assertEquals(0, result.get(nodes[1]).intValue());
Assert.assertEquals(1, result.get(nodes[3]).intValue());
Assert.assertEquals(1, result.get(nodes[5]).intValue());
Assert.assertEquals(0, result.get(nodes[7]).intValue());
}
use of org.apache.flink.table.planner.plan.nodes.exec.ExecNode in project flink by apache.
the class InputOrderCalculatorTest method testCalculateInputOrder.
@Test
public void testCalculateInputOrder() {
// P = InputProperty.DamBehavior.PIPELINED, B = InputProperty.DamBehavior.BLOCKING
// P1 = PIPELINED + priority 1
//
// 0 -(P1)-> 3 -(B0)-\
// 6 -(B0)-\
// /-(P1)-/ \
// 1 -(P1)-> 4 8
// \-(B0)-\ /
// 7 -(P1)-/
// 2 -(P1)-> 5 -(P1)-/
TestingBatchExecNode[] nodes = new TestingBatchExecNode[9];
for (int i = 0; i < nodes.length; i++) {
nodes[i] = new TestingBatchExecNode("TestingBatchExecNode" + i);
}
nodes[3].addInput(nodes[0], InputProperty.builder().priority(1).build());
nodes[4].addInput(nodes[1], InputProperty.builder().priority(1).build());
nodes[5].addInput(nodes[2], InputProperty.builder().priority(1).build());
nodes[6].addInput(nodes[3], InputProperty.builder().damBehavior(InputProperty.DamBehavior.BLOCKING).priority(0).build());
nodes[6].addInput(nodes[4], InputProperty.builder().priority(1).build());
nodes[7].addInput(nodes[4], InputProperty.builder().damBehavior(InputProperty.DamBehavior.BLOCKING).priority(0).build());
nodes[7].addInput(nodes[5], InputProperty.builder().priority(1).build());
nodes[8].addInput(nodes[6], InputProperty.builder().damBehavior(InputProperty.DamBehavior.BLOCKING).priority(0).build());
nodes[8].addInput(nodes[7], InputProperty.builder().priority(1).build());
InputOrderCalculator calculator = new InputOrderCalculator(nodes[8], new HashSet<>(Arrays.asList(nodes[1], nodes[3], nodes[5])), InputProperty.DamBehavior.BLOCKING);
Map<ExecNode<?>, Integer> result = calculator.calculate();
Assert.assertEquals(3, result.size());
Assert.assertEquals(0, result.get(nodes[3]).intValue());
Assert.assertEquals(1, result.get(nodes[1]).intValue());
Assert.assertEquals(2, result.get(nodes[5]).intValue());
}
use of org.apache.flink.table.planner.plan.nodes.exec.ExecNode in project flink by apache.
the class InputPriorityGraphGeneratorTest method testCalculatePipelinedAncestors.
@Test
public void testCalculatePipelinedAncestors() {
// P = InputProperty.DamBehavior.PIPELINED, E = InputProperty.DamBehavior.END_INPUT
//
// 0 ------P----> 1 -E--> 2
// \-----P----> 3 -P-/
// 4 -E-> 5 -P-/ /
// 6 -----E-----/
TestingBatchExecNode[] nodes = new TestingBatchExecNode[7];
for (int i = 0; i < nodes.length; i++) {
nodes[i] = new TestingBatchExecNode("TestingBatchExecNode" + i);
}
nodes[1].addInput(nodes[0]);
nodes[2].addInput(nodes[1], InputProperty.builder().damBehavior(InputProperty.DamBehavior.END_INPUT).build());
nodes[2].addInput(nodes[3]);
nodes[3].addInput(nodes[0]);
nodes[3].addInput(nodes[5]);
nodes[3].addInput(nodes[6], InputProperty.builder().damBehavior(InputProperty.DamBehavior.END_INPUT).build());
nodes[5].addInput(nodes[4], InputProperty.builder().damBehavior(InputProperty.DamBehavior.END_INPUT).build());
TestingInputPriorityConflictResolver resolver = new TestingInputPriorityConflictResolver(Collections.singletonList(nodes[2]), Collections.emptySet(), InputProperty.DamBehavior.END_INPUT);
List<ExecNode<?>> ancestors = resolver.calculatePipelinedAncestors(nodes[2]);
Assert.assertEquals(2, ancestors.size());
Assert.assertTrue(ancestors.contains(nodes[0]));
Assert.assertTrue(ancestors.contains(nodes[5]));
}
use of org.apache.flink.table.planner.plan.nodes.exec.ExecNode in project flink by apache.
the class JsonPlanGraph method convertToExecNodeGraph.
ExecNodeGraph convertToExecNodeGraph() {
Map<Integer, ExecNode<?>> idToExecNodes = new HashMap<>();
for (ExecNode<?> execNode : nodes) {
int id = execNode.getId();
if (idToExecNodes.containsKey(id)) {
throw new TableException(String.format("The id: %s is not unique for ExecNode: %s.\nplease check it.", id, execNode.getDescription()));
}
idToExecNodes.put(id, execNode);
}
Map<Integer, List<ExecEdge>> idToInputEdges = new HashMap<>();
Map<Integer, List<ExecEdge>> idToOutputEdges = new HashMap<>();
for (JsonPlanEdge edge : edges) {
ExecNode<?> source = idToExecNodes.get(edge.getSourceId());
if (source == null) {
throw new TableException(String.format("Source node id: %s is not found in nodes.", edge.getSourceId()));
}
ExecNode<?> target = idToExecNodes.get(edge.getTargetId());
if (target == null) {
throw new TableException(String.format("Target node id: %s is not found in nodes.", edge.getTargetId()));
}
ExecEdge execEdge = ExecEdge.builder().source(source).target(target).shuffle(edge.getShuffle()).exchangeMode(edge.getExchangeMode()).build();
idToInputEdges.computeIfAbsent(target.getId(), n -> new ArrayList<>()).add(execEdge);
idToOutputEdges.computeIfAbsent(source.getId(), n -> new ArrayList<>()).add(execEdge);
}
List<ExecNode<?>> rootNodes = new ArrayList<>();
for (Map.Entry<Integer, ExecNode<?>> entry : idToExecNodes.entrySet()) {
int id = entry.getKey();
ExecNode<?> node = entry.getValue();
// connect input edges
List<ExecEdge> inputEdges = idToInputEdges.getOrDefault(id, new ArrayList<>());
node.setInputEdges(inputEdges);
if (!idToOutputEdges.containsKey(id)) {
// if the node has no output nodes, it's a root node
rootNodes.add(node);
}
}
return new ExecNodeGraph(flinkVersion, rootNodes);
}
use of org.apache.flink.table.planner.plan.nodes.exec.ExecNode in project flink by apache.
the class JsonPlanGraph method fromExecNodeGraph.
static JsonPlanGraph fromExecNodeGraph(ExecNodeGraph execGraph) {
final List<ExecNode<?>> allNodes = new ArrayList<>();
final List<JsonPlanEdge> allEdges = new ArrayList<>();
final Set<Integer> nodesIds = new HashSet<>();
// for quick search
final Set<ExecNode<?>> visitedNodes = Sets.newIdentityHashSet();
// visit the nodes as topological ordering
final ExecNodeVisitor visitor = new ExecNodeVisitorImpl() {
@Override
public void visit(ExecNode<?> node) {
if (visitedNodes.contains(node)) {
return;
}
super.visitInputs(node);
final int id = node.getId();
if (nodesIds.contains(id)) {
throw new TableException(String.format("The id: %s is not unique for ExecNode: %s.\nplease check it.", id, node.getDescription()));
}
allNodes.add(node);
nodesIds.add(id);
visitedNodes.add(node);
for (ExecEdge execEdge : node.getInputEdges()) {
allEdges.add(JsonPlanEdge.fromExecEdge(execEdge));
}
}
};
execGraph.getRootNodes().forEach(visitor::visit);
checkArgument(allNodes.size() == nodesIds.size());
return new JsonPlanGraph(execGraph.getFlinkVersion(), allNodes, allEdges);
}
Aggregations