use of org.apache.flink.api.connector.source.mocks.MockSourceSplit in project flink by apache.
the class SourceCoordinatorContextTest method testCallableInterruptedDuringShutdownDoNotFailJob.
@Test
public void testCallableInterruptedDuringShutdownDoNotFailJob() throws InterruptedException {
AtomicReference<Throwable> expectedError = new AtomicReference<>(null);
ManuallyTriggeredScheduledExecutorService manualWorkerExecutor = new ManuallyTriggeredScheduledExecutorService();
ManuallyTriggeredScheduledExecutorService manualCoordinatorExecutor = new ManuallyTriggeredScheduledExecutorService();
SourceCoordinatorContext<MockSourceSplit> testingContext = new SourceCoordinatorContext<>(manualCoordinatorExecutor, manualWorkerExecutor, new SourceCoordinatorProvider.CoordinatorExecutorThreadFactory(TEST_OPERATOR_ID.toHexString(), getClass().getClassLoader()), operatorCoordinatorContext, new MockSourceSplitSerializer(), splitSplitAssignmentTracker);
testingContext.callAsync(() -> {
throw new InterruptedException();
}, (ignored, e) -> {
if (e != null) {
expectedError.set(e);
throw new RuntimeException(e);
}
});
manualWorkerExecutor.triggerAll();
testingContext.close();
manualCoordinatorExecutor.triggerAll();
assertTrue(expectedError.get() instanceof InterruptedException);
assertFalse(operatorCoordinatorContext.isJobFailed());
}
use of org.apache.flink.api.connector.source.mocks.MockSourceSplit in project flink by apache.
the class SourceCoordinatorTest method testBlockOnClose.
@Test
public void testBlockOnClose() throws Exception {
// It is possible that the split enumerator submits some heavy-duty work to the
// coordinator executor which blocks the coordinator closure.
final CountDownLatch latch = new CountDownLatch(1);
try (final MockSplitEnumeratorContext<MockSourceSplit> enumeratorContext = new MockSplitEnumeratorContext<>(1);
final MockSplitEnumerator splitEnumerator = new MockSplitEnumerator(1, enumeratorContext) {
@Override
public void handleSourceEvent(int subtaskId, SourceEvent sourceEvent) {
context.callAsync(() -> 1L, (ignored, t) -> {
latch.countDown();
// Submit a callable that will never return.
try {
Thread.sleep(Long.MAX_VALUE);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
});
}
};
final SourceCoordinator<?, ?> coordinator = new SourceCoordinator<>(OPERATOR_NAME, new EnumeratorCreatingSource<>(() -> splitEnumerator), context, new CoordinatorStoreImpl())) {
coordinator.start();
coordinator.handleEventFromOperator(1, new SourceEventWrapper(new SourceEvent() {
}));
// Wait until the coordinator executor blocks.
latch.await();
CompletableFuture<?> future = ComponentClosingUtils.closeAsyncWithTimeout("testBlockOnClose", (ThrowingRunnable<Exception>) coordinator::close, Duration.ofMillis(1));
future.exceptionally(e -> {
assertTrue(e instanceof TimeoutException);
return null;
}).get();
waitUtil(splitEnumerator::closed, Duration.ofSeconds(5), "Split enumerator was not closed in 5 seconds.");
}
}
use of org.apache.flink.api.connector.source.mocks.MockSourceSplit in project flink by apache.
the class SourceCoordinatorTest method testFailJobWhenExceptionThrownFromStart.
@Test
public void testFailJobWhenExceptionThrownFromStart() throws Exception {
final RuntimeException failureReason = new RuntimeException("Artificial Exception");
try (final MockSplitEnumeratorContext<MockSourceSplit> enumeratorContext = new MockSplitEnumeratorContext<>(1);
final SplitEnumerator<MockSourceSplit, Set<MockSourceSplit>> splitEnumerator = new MockSplitEnumerator(1, enumeratorContext) {
@Override
public void start() {
throw failureReason;
}
};
final SourceCoordinator<?, ?> coordinator = new SourceCoordinator<>(OPERATOR_NAME, new EnumeratorCreatingSource<>(() -> splitEnumerator), context, new CoordinatorStoreImpl(), WatermarkAlignmentParams.WATERMARK_ALIGNMENT_DISABLED)) {
coordinator.start();
waitUtil(() -> operatorCoordinatorContext.isJobFailed(), Duration.ofSeconds(10), "The job should have failed due to the artificial exception.");
assertEquals(failureReason, operatorCoordinatorContext.getJobFailureReason());
}
}
use of org.apache.flink.api.connector.source.mocks.MockSourceSplit in project flink by apache.
the class CoordinatorTestUtils method verifyAssignment.
/**
* Check the actual assignment meets the expectation.
*/
static void verifyAssignment(List<String> expectedSplitIds, Collection<MockSourceSplit> actualAssignment) {
assertEquals(expectedSplitIds.size(), actualAssignment.size());
int i = 0;
for (MockSourceSplit split : actualAssignment) {
assertEquals(expectedSplitIds.get(i++), split.splitId());
}
}
use of org.apache.flink.api.connector.source.mocks.MockSourceSplit in project flink by apache.
the class MultipleInputStreamTaskTest method addSourceRecords.
static void addSourceRecords(StreamTaskMailboxTestHarness<String> testHarness, int sourceId, Boundedness boundedness, int... records) throws Exception {
OperatorID sourceOperatorID = getSourceOperatorID(testHarness, sourceId);
// Prepare the source split and assign it to the source reader.
MockSourceSplit split = new MockSourceSplit(0, 0, boundedness == Boundedness.BOUNDED ? records.length : Integer.MAX_VALUE);
for (int record : records) {
split.addRecord(record);
}
// Assign the split to the source reader.
AddSplitEvent<MockSourceSplit> addSplitEvent = new AddSplitEvent<>(Collections.singletonList(split), new MockSourceSplitSerializer());
testHarness.getStreamTask().dispatchOperatorEvent(sourceOperatorID, new SerializedValue<>(addSplitEvent));
}
Aggregations