use of org.apache.flink.table.planner.plan.nodes.exec.TestingBatchExecNode in project flink by apache.
the class InputPriorityConflictResolverTest method testDetectAndResolve.
@Test
public void testDetectAndResolve() {
// P = InputProperty.DamBehavior.PIPELINED, E = InputProperty.DamBehavior.END_INPUT
// P100 = PIPELINED + priority 100
//
// 0 --------(P0)----> 1 --(P0)-----------> 7
// \ \-(P0)-> 2 -(P0)--/
// \-------(P0)----> 3 --(P1)-----------/
// \------(P0)----> 4 --(P10)---------/
// \ / /
// \ 8 -(P0)-< /
// \ \ /
// \--(E0)----> 5 --(P10)-----/
// 6 ---------(P100)----------------/
TestingBatchExecNode[] nodes = new TestingBatchExecNode[9];
for (int i = 0; i < nodes.length; i++) {
nodes[i] = new TestingBatchExecNode("TestingBatchExecNode" + i);
}
nodes[1].addInput(nodes[0], InputProperty.builder().priority(0).build());
nodes[2].addInput(nodes[1], InputProperty.builder().priority(0).build());
nodes[3].addInput(nodes[0], InputProperty.builder().priority(0).build());
nodes[4].addInput(nodes[8], InputProperty.builder().priority(0).build());
nodes[4].addInput(nodes[0], InputProperty.builder().priority(0).build());
nodes[5].addInput(nodes[8], InputProperty.builder().priority(0).build());
nodes[5].addInput(nodes[0], InputProperty.builder().damBehavior(InputProperty.DamBehavior.END_INPUT).priority(0).build());
nodes[7].addInput(nodes[1], InputProperty.builder().priority(0).build());
nodes[7].addInput(nodes[2], InputProperty.builder().priority(0).build());
nodes[7].addInput(nodes[3], InputProperty.builder().priority(1).build());
nodes[7].addInput(nodes[4], InputProperty.builder().priority(10).build());
nodes[7].addInput(nodes[5], InputProperty.builder().priority(10).build());
nodes[7].addInput(nodes[6], InputProperty.builder().priority(100).build());
InputPriorityConflictResolver resolver = new InputPriorityConflictResolver(Collections.singletonList(nodes[7]), InputProperty.DamBehavior.END_INPUT, StreamExchangeMode.BATCH, new Configuration());
resolver.detectAndResolve();
Assert.assertEquals(nodes[1], nodes[7].getInputNodes().get(0));
Assert.assertEquals(nodes[2], nodes[7].getInputNodes().get(1));
Assert.assertTrue(nodes[7].getInputNodes().get(2) instanceof BatchExecExchange);
Assert.assertEquals(Optional.of(StreamExchangeMode.BATCH), ((BatchExecExchange) nodes[7].getInputNodes().get(2)).getRequiredExchangeMode());
Assert.assertEquals(nodes[3], nodes[7].getInputNodes().get(2).getInputEdges().get(0).getSource());
Assert.assertTrue(nodes[7].getInputNodes().get(3) instanceof BatchExecExchange);
Assert.assertEquals(Optional.of(StreamExchangeMode.BATCH), ((BatchExecExchange) nodes[7].getInputNodes().get(3)).getRequiredExchangeMode());
Assert.assertEquals(nodes[4], nodes[7].getInputNodes().get(3).getInputEdges().get(0).getSource());
Assert.assertEquals(nodes[5], nodes[7].getInputNodes().get(4));
Assert.assertEquals(nodes[6], nodes[7].getInputNodes().get(5));
}
use of org.apache.flink.table.planner.plan.nodes.exec.TestingBatchExecNode in project flink by apache.
the class InputOrderCalculatorTest method testCalculateInputOrderWithLoop.
@Test(expected = IllegalStateException.class)
public void testCalculateInputOrderWithLoop() {
TestingBatchExecNode a = new TestingBatchExecNode("TestingBatchExecNode0");
TestingBatchExecNode b = new TestingBatchExecNode("TestingBatchExecNode1");
for (int i = 0; i < 2; i++) {
b.addInput(a, InputProperty.builder().priority(i).build());
}
InputOrderCalculator calculator = new InputOrderCalculator(b, Collections.emptySet(), InputProperty.DamBehavior.BLOCKING);
calculator.calculate();
}
use of org.apache.flink.table.planner.plan.nodes.exec.TestingBatchExecNode in project flink by apache.
the class InputOrderCalculatorTest method testCalculateInputOrderWithUnaffectedRelatedBoundaries.
@Test
public void testCalculateInputOrderWithUnaffectedRelatedBoundaries() {
// P = InputProperty.DamBehavior.PIPELINED, B = InputProperty.DamBehavior.BLOCKING
// P1 = PIPELINED + priority 1
//
// 0 --(P0)-> 1 -------(B0)-----> 2 -(P0)-\
// \ \--(B0)-> 3 -(P1)-/ 4
// \-(B0)-> 5 -------(P1)-----> 6 -(P0)-/
// 7 --(B0)--/
TestingBatchExecNode[] nodes = new TestingBatchExecNode[8];
for (int i = 0; i < nodes.length; i++) {
nodes[i] = new TestingBatchExecNode("TestingBatchExecNode" + i);
}
nodes[1].addInput(nodes[0]);
nodes[2].addInput(nodes[1], InputProperty.builder().damBehavior(InputProperty.DamBehavior.BLOCKING).build());
nodes[2].addInput(nodes[3], InputProperty.builder().priority(1).build());
nodes[3].addInput(nodes[1], InputProperty.builder().damBehavior(InputProperty.DamBehavior.BLOCKING).build());
nodes[4].addInput(nodes[2]);
nodes[4].addInput(nodes[6]);
nodes[5].addInput(nodes[0], InputProperty.builder().damBehavior(InputProperty.DamBehavior.BLOCKING).build());
nodes[6].addInput(nodes[5], InputProperty.builder().priority(1).build());
nodes[6].addInput(nodes[7], InputProperty.builder().damBehavior(InputProperty.DamBehavior.BLOCKING).build());
InputOrderCalculator calculator = new InputOrderCalculator(nodes[4], new HashSet<>(Arrays.asList(nodes[1], nodes[3], nodes[5], nodes[7])), InputProperty.DamBehavior.BLOCKING);
Map<ExecNode<?>, Integer> result = calculator.calculate();
Assert.assertEquals(4, result.size());
Assert.assertEquals(0, result.get(nodes[1]).intValue());
Assert.assertEquals(1, result.get(nodes[3]).intValue());
Assert.assertEquals(1, result.get(nodes[5]).intValue());
Assert.assertEquals(0, result.get(nodes[7]).intValue());
}
use of org.apache.flink.table.planner.plan.nodes.exec.TestingBatchExecNode in project flink by apache.
the class InputOrderCalculatorTest method testCalculateInputOrder.
@Test
public void testCalculateInputOrder() {
// P = InputProperty.DamBehavior.PIPELINED, B = InputProperty.DamBehavior.BLOCKING
// P1 = PIPELINED + priority 1
//
// 0 -(P1)-> 3 -(B0)-\
// 6 -(B0)-\
// /-(P1)-/ \
// 1 -(P1)-> 4 8
// \-(B0)-\ /
// 7 -(P1)-/
// 2 -(P1)-> 5 -(P1)-/
TestingBatchExecNode[] nodes = new TestingBatchExecNode[9];
for (int i = 0; i < nodes.length; i++) {
nodes[i] = new TestingBatchExecNode("TestingBatchExecNode" + i);
}
nodes[3].addInput(nodes[0], InputProperty.builder().priority(1).build());
nodes[4].addInput(nodes[1], InputProperty.builder().priority(1).build());
nodes[5].addInput(nodes[2], InputProperty.builder().priority(1).build());
nodes[6].addInput(nodes[3], InputProperty.builder().damBehavior(InputProperty.DamBehavior.BLOCKING).priority(0).build());
nodes[6].addInput(nodes[4], InputProperty.builder().priority(1).build());
nodes[7].addInput(nodes[4], InputProperty.builder().damBehavior(InputProperty.DamBehavior.BLOCKING).priority(0).build());
nodes[7].addInput(nodes[5], InputProperty.builder().priority(1).build());
nodes[8].addInput(nodes[6], InputProperty.builder().damBehavior(InputProperty.DamBehavior.BLOCKING).priority(0).build());
nodes[8].addInput(nodes[7], InputProperty.builder().priority(1).build());
InputOrderCalculator calculator = new InputOrderCalculator(nodes[8], new HashSet<>(Arrays.asList(nodes[1], nodes[3], nodes[5])), InputProperty.DamBehavior.BLOCKING);
Map<ExecNode<?>, Integer> result = calculator.calculate();
Assert.assertEquals(3, result.size());
Assert.assertEquals(0, result.get(nodes[3]).intValue());
Assert.assertEquals(1, result.get(nodes[1]).intValue());
Assert.assertEquals(2, result.get(nodes[5]).intValue());
}
use of org.apache.flink.table.planner.plan.nodes.exec.TestingBatchExecNode in project flink by apache.
the class InputPriorityGraphGeneratorTest method testCalculatePipelinedAncestors.
@Test
public void testCalculatePipelinedAncestors() {
// P = InputProperty.DamBehavior.PIPELINED, E = InputProperty.DamBehavior.END_INPUT
//
// 0 ------P----> 1 -E--> 2
// \-----P----> 3 -P-/
// 4 -E-> 5 -P-/ /
// 6 -----E-----/
TestingBatchExecNode[] nodes = new TestingBatchExecNode[7];
for (int i = 0; i < nodes.length; i++) {
nodes[i] = new TestingBatchExecNode("TestingBatchExecNode" + i);
}
nodes[1].addInput(nodes[0]);
nodes[2].addInput(nodes[1], InputProperty.builder().damBehavior(InputProperty.DamBehavior.END_INPUT).build());
nodes[2].addInput(nodes[3]);
nodes[3].addInput(nodes[0]);
nodes[3].addInput(nodes[5]);
nodes[3].addInput(nodes[6], InputProperty.builder().damBehavior(InputProperty.DamBehavior.END_INPUT).build());
nodes[5].addInput(nodes[4], InputProperty.builder().damBehavior(InputProperty.DamBehavior.END_INPUT).build());
TestingInputPriorityConflictResolver resolver = new TestingInputPriorityConflictResolver(Collections.singletonList(nodes[2]), Collections.emptySet(), InputProperty.DamBehavior.END_INPUT);
List<ExecNode<?>> ancestors = resolver.calculatePipelinedAncestors(nodes[2]);
Assert.assertEquals(2, ancestors.size());
Assert.assertTrue(ancestors.contains(nodes[0]));
Assert.assertTrue(ancestors.contains(nodes[5]));
}
Aggregations