use of org.apache.flink.runtime.jobmanager.scheduler.Scheduler in project flink by apache.
the class PointwisePatternTest method testLowToHigh.
private void testLowToHigh(int lowDop, int highDop) throws Exception {
if (highDop < lowDop) {
throw new IllegalArgumentException();
}
final int factor = highDop / lowDop;
final int delta = highDop % lowDop == 0 ? 0 : 1;
JobVertex v1 = new JobVertex("vertex1");
JobVertex v2 = new JobVertex("vertex2");
v1.setParallelism(lowDop);
v2.setParallelism(highDop);
v1.setInvokableClass(AbstractInvokable.class);
v2.setInvokableClass(AbstractInvokable.class);
v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED);
List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2));
ExecutionGraph eg = new ExecutionGraph(TestingUtils.defaultExecutor(), TestingUtils.defaultExecutor(), jobId, jobName, cfg, new SerializedValue<>(new ExecutionConfig()), AkkaUtils.getDefaultTimeout(), new NoRestartStrategy(), new Scheduler(TestingUtils.defaultExecutionContext()));
try {
eg.attachJobGraph(ordered);
} catch (JobException e) {
e.printStackTrace();
fail("Job failed with exception: " + e.getMessage());
}
ExecutionJobVertex target = eg.getAllVertices().get(v2.getID());
int[] timesUsed = new int[lowDop];
for (ExecutionVertex ev : target.getTaskVertices()) {
assertEquals(1, ev.getNumberOfInputs());
ExecutionEdge[] inEdges = ev.getInputEdges(0);
assertEquals(1, inEdges.length);
timesUsed[inEdges[0].getSource().getPartitionNumber()]++;
}
for (int used : timesUsed) {
assertTrue(used >= factor && used <= factor + delta);
}
}
use of org.apache.flink.runtime.jobmanager.scheduler.Scheduler in project flink by apache.
the class PointwisePatternTest method testNTo7N.
@Test
public void testNTo7N() throws Exception {
final int N = 11;
JobVertex v1 = new JobVertex("vertex1");
JobVertex v2 = new JobVertex("vertex2");
v1.setParallelism(N);
v2.setParallelism(7 * N);
v1.setInvokableClass(AbstractInvokable.class);
v2.setInvokableClass(AbstractInvokable.class);
v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED);
List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2));
ExecutionGraph eg = new ExecutionGraph(TestingUtils.defaultExecutor(), TestingUtils.defaultExecutor(), jobId, jobName, cfg, new SerializedValue<>(new ExecutionConfig()), AkkaUtils.getDefaultTimeout(), new NoRestartStrategy(), new Scheduler(TestingUtils.defaultExecutionContext()));
try {
eg.attachJobGraph(ordered);
} catch (JobException e) {
e.printStackTrace();
fail("Job failed with exception: " + e.getMessage());
}
ExecutionJobVertex target = eg.getAllVertices().get(v2.getID());
for (ExecutionVertex ev : target.getTaskVertices()) {
assertEquals(1, ev.getNumberOfInputs());
ExecutionEdge[] inEdges = ev.getInputEdges(0);
assertEquals(1, inEdges.length);
assertEquals(ev.getParallelSubtaskIndex() / 7, inEdges[0].getSource().getPartitionNumber());
}
}
use of org.apache.flink.runtime.jobmanager.scheduler.Scheduler in project flink by apache.
the class PointwisePatternTest method test2NToN.
@Test
public void test2NToN() throws Exception {
final int N = 17;
JobVertex v1 = new JobVertex("vertex1");
JobVertex v2 = new JobVertex("vertex2");
v1.setParallelism(2 * N);
v2.setParallelism(N);
v1.setInvokableClass(AbstractInvokable.class);
v2.setInvokableClass(AbstractInvokable.class);
v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED);
List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2));
ExecutionGraph eg = new ExecutionGraph(TestingUtils.defaultExecutor(), TestingUtils.defaultExecutor(), jobId, jobName, cfg, new SerializedValue<>(new ExecutionConfig()), AkkaUtils.getDefaultTimeout(), new NoRestartStrategy(), new Scheduler(TestingUtils.defaultExecutionContext()));
try {
eg.attachJobGraph(ordered);
} catch (JobException e) {
e.printStackTrace();
fail("Job failed with exception: " + e.getMessage());
}
ExecutionJobVertex target = eg.getAllVertices().get(v2.getID());
for (ExecutionVertex ev : target.getTaskVertices()) {
assertEquals(1, ev.getNumberOfInputs());
ExecutionEdge[] inEdges = ev.getInputEdges(0);
assertEquals(2, inEdges.length);
assertEquals(ev.getParallelSubtaskIndex() * 2, inEdges[0].getSource().getPartitionNumber());
assertEquals(ev.getParallelSubtaskIndex() * 2 + 1, inEdges[1].getSource().getPartitionNumber());
}
}
use of org.apache.flink.runtime.jobmanager.scheduler.Scheduler in project flink by apache.
the class PointwisePatternTest method test3NToN.
@Test
public void test3NToN() throws Exception {
final int N = 17;
JobVertex v1 = new JobVertex("vertex1");
JobVertex v2 = new JobVertex("vertex2");
v1.setParallelism(3 * N);
v2.setParallelism(N);
v1.setInvokableClass(AbstractInvokable.class);
v2.setInvokableClass(AbstractInvokable.class);
v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED);
List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2));
ExecutionGraph eg = new ExecutionGraph(TestingUtils.defaultExecutor(), TestingUtils.defaultExecutor(), jobId, jobName, cfg, new SerializedValue<>(new ExecutionConfig()), AkkaUtils.getDefaultTimeout(), new NoRestartStrategy(), new Scheduler(TestingUtils.defaultExecutionContext()));
try {
eg.attachJobGraph(ordered);
} catch (JobException e) {
e.printStackTrace();
fail("Job failed with exception: " + e.getMessage());
}
ExecutionJobVertex target = eg.getAllVertices().get(v2.getID());
for (ExecutionVertex ev : target.getTaskVertices()) {
assertEquals(1, ev.getNumberOfInputs());
ExecutionEdge[] inEdges = ev.getInputEdges(0);
assertEquals(3, inEdges.length);
assertEquals(ev.getParallelSubtaskIndex() * 3, inEdges[0].getSource().getPartitionNumber());
assertEquals(ev.getParallelSubtaskIndex() * 3 + 1, inEdges[1].getSource().getPartitionNumber());
assertEquals(ev.getParallelSubtaskIndex() * 3 + 2, inEdges[2].getSource().getPartitionNumber());
}
}
use of org.apache.flink.runtime.jobmanager.scheduler.Scheduler in project flink by apache.
the class TerminalStateDeadlockTest method testProvokeDeadlock.
// ------------------------------------------------------------------------
@Test
public void testProvokeDeadlock() {
try {
final JobID jobId = resource.getJobID();
final JobVertexID vid1 = new JobVertexID();
final JobVertexID vid2 = new JobVertexID();
final List<JobVertex> vertices;
{
JobVertex v1 = new JobVertex("v1", vid1);
JobVertex v2 = new JobVertex("v2", vid2);
v1.setParallelism(1);
v2.setParallelism(1);
v1.setInvokableClass(DummyInvokable.class);
v2.setInvokableClass(DummyInvokable.class);
vertices = Arrays.asList(v1, v2);
}
final Scheduler scheduler = new Scheduler(TestingUtils.defaultExecutionContext());
final Executor executor = Executors.newFixedThreadPool(4);
// try a lot!
for (int i = 0; i < 20000; i++) {
final TestExecGraph eg = new TestExecGraph(jobId);
eg.attachJobGraph(vertices);
final Execution e1 = eg.getJobVertex(vid1).getTaskVertices()[0].getCurrentExecutionAttempt();
final Execution e2 = eg.getJobVertex(vid2).getTaskVertices()[0].getCurrentExecutionAttempt();
initializeExecution(e1);
initializeExecution(e2);
execGraphStateField.set(eg, JobStatus.FAILING);
execGraphSlotProviderField.set(eg, scheduler);
Runnable r1 = new Runnable() {
@Override
public void run() {
e1.cancelingComplete();
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
e2.cancelingComplete();
}
};
executor.execute(r1);
executor.execute(r2);
eg.waitTillDone();
}
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
Aggregations