use of org.apache.flink.core.testutils.OneShotLatch in project flink by apache.
the class FutureUtilsTest method testComposeAfterwardsSecondExceptional.
@Test
public void testComposeAfterwardsSecondExceptional() throws InterruptedException {
final CompletableFuture<Void> inputFuture = new CompletableFuture<>();
final OneShotLatch composeLatch = new OneShotLatch();
final FlinkException testException = new FlinkException("Test exception");
final CompletableFuture<Void> composeFuture = FutureUtils.composeAfterwards(inputFuture, () -> {
composeLatch.trigger();
return FutureUtils.completedExceptionally(testException);
});
assertThat(composeLatch.isTriggered(), is(false));
assertThat(composeFuture.isDone(), is(false));
inputFuture.complete(null);
assertThat(composeLatch.isTriggered(), is(true));
assertThat(composeFuture.isDone(), is(true));
// check that this future is not exceptionally completed
try {
composeFuture.get();
fail("Expected an exceptional completion");
} catch (ExecutionException ee) {
assertThat(ExceptionUtils.stripExecutionException(ee), is(testException));
}
}
use of org.apache.flink.core.testutils.OneShotLatch in project flink by apache.
the class FutureUtilsTest method testComposeAfterwardsFirstExceptional.
@Test
public void testComposeAfterwardsFirstExceptional() throws InterruptedException {
final CompletableFuture<Void> inputFuture = new CompletableFuture<>();
final OneShotLatch composeLatch = new OneShotLatch();
final FlinkException testException = new FlinkException("Test exception");
final CompletableFuture<Void> composeFuture = FutureUtils.composeAfterwards(inputFuture, () -> {
composeLatch.trigger();
return CompletableFuture.completedFuture(null);
});
assertThat(composeLatch.isTriggered(), is(false));
assertThat(composeFuture.isDone(), is(false));
inputFuture.completeExceptionally(testException);
assertThat(composeLatch.isTriggered(), is(true));
assertThat(composeFuture.isDone(), is(true));
// check that this future is not exceptionally completed
try {
composeFuture.get();
fail("Expected an exceptional completion");
} catch (ExecutionException ee) {
assertThat(ExceptionUtils.stripExecutionException(ee), is(testException));
}
}
use of org.apache.flink.core.testutils.OneShotLatch in project flink by apache.
the class ArrowSourceFunctionTestBase method testParallelProcessing.
@Test
public void testParallelProcessing() throws Exception {
Tuple2<List<RowData>, Integer> testData = getTestData();
final ArrowSourceFunction arrowSourceFunction = createTestArrowSourceFunction(testData.f0, testData.f1);
final AbstractStreamOperatorTestHarness<RowData> testHarness = new AbstractStreamOperatorTestHarness(new StreamSource<>(arrowSourceFunction), 2, 2, 0);
testHarness.open();
final Throwable[] error = new Throwable[2];
final OneShotLatch latch = new OneShotLatch();
final AtomicInteger numOfEmittedElements = new AtomicInteger(0);
final List<RowData> results = Collections.synchronizedList(new ArrayList<>());
// run the source asynchronously
Thread runner = new Thread(() -> {
try {
arrowSourceFunction.run(new DummySourceContext<RowData>() {
@Override
public void collect(RowData element) {
results.add(typeSerializer.copy(element));
if (numOfEmittedElements.incrementAndGet() == testData.f0.size()) {
latch.trigger();
}
}
});
} catch (Throwable t) {
error[0] = t;
}
});
runner.start();
final ArrowSourceFunction arrowSourceFunction2 = createTestArrowSourceFunction(testData.f0, testData.f1);
final AbstractStreamOperatorTestHarness<RowData> testHarness2 = new AbstractStreamOperatorTestHarness(new StreamSource<>(arrowSourceFunction2), 2, 2, 1);
testHarness2.open();
// run the source asynchronously
Thread runner2 = new Thread(() -> {
try {
arrowSourceFunction2.run(new DummySourceContext<RowData>() {
@Override
public void collect(RowData element) {
results.add(typeSerializer.copy(element));
if (numOfEmittedElements.incrementAndGet() == testData.f0.size()) {
latch.trigger();
}
}
});
} catch (Throwable t) {
error[1] = t;
}
});
runner2.start();
if (!latch.isTriggered()) {
latch.await();
}
runner.join();
runner2.join();
testHarness.close();
testHarness2.close();
Assert.assertNull(error[0]);
Assert.assertNull(error[1]);
Assert.assertEquals(testData.f0.size(), numOfEmittedElements.get());
checkElementsEquals(results, testData.f0);
}
use of org.apache.flink.core.testutils.OneShotLatch in project flink by apache.
the class CliFrontendStopWithSavepointTest method testStopOnlyWithMaxWM.
@Test
public void testStopOnlyWithMaxWM() throws Exception {
JobID jid = new JobID();
String[] parameters = { "-d", jid.toString() };
OneShotLatch stopWithSavepointLatch = new OneShotLatch();
TestingClusterClient<String> clusterClient = new TestingClusterClient<>();
clusterClient.setStopWithSavepointFunction((jobID, advanceToEndOfEventTime, savepointDirectory, formatType) -> {
assertThat(jobID, is(jid));
assertThat(advanceToEndOfEventTime, is(true));
assertNull(savepointDirectory);
stopWithSavepointLatch.trigger();
return CompletableFuture.completedFuture(savepointDirectory);
});
MockedCliFrontend testFrontend = new MockedCliFrontend(clusterClient);
testFrontend.stop(parameters);
stopWithSavepointLatch.await();
}
use of org.apache.flink.core.testutils.OneShotLatch in project flink by apache.
the class CliFrontendStopWithSavepointTest method testStopWithDefaultSavepointDir.
@Test
public void testStopWithDefaultSavepointDir() throws Exception {
JobID jid = new JobID();
String[] parameters = { jid.toString() };
OneShotLatch stopWithSavepointLatch = new OneShotLatch();
TestingClusterClient<String> clusterClient = new TestingClusterClient<>();
clusterClient.setStopWithSavepointFunction((jobID, advanceToEndOfEventTime, savepointDirectory, formatType) -> {
assertThat(jobID, is(jid));
assertThat(advanceToEndOfEventTime, is(false));
assertNull(savepointDirectory);
stopWithSavepointLatch.trigger();
return CompletableFuture.completedFuture(savepointDirectory);
});
MockedCliFrontend testFrontend = new MockedCliFrontend(clusterClient);
testFrontend.stop(parameters);
stopWithSavepointLatch.await();
}
Aggregations