use of org.apache.flink.api.connector.source.mocks.MockSourceSplit in project flink by apache.
the class SourceOperatorEventTimeTest method createTestOperator.
// ------------------------------------------------------------------------
// test setup helpers
// ------------------------------------------------------------------------
private static <T> SourceOperator<T, MockSourceSplit> createTestOperator(SourceReader<T, MockSourceSplit> reader, WatermarkStrategy<T> watermarkStrategy, ProcessingTimeService timeService, boolean emitProgressiveWatermarks) throws Exception {
final OperatorStateStore operatorStateStore = new MemoryStateBackend().createOperatorStateBackend(new MockEnvironmentBuilder().build(), "test-operator", Collections.emptyList(), new CloseableRegistry());
final StateInitializationContext stateContext = new StateInitializationContextImpl(null, operatorStateStore, null, null, null);
final SourceOperator<T, MockSourceSplit> sourceOperator = new TestingSourceOperator<>(reader, watermarkStrategy, timeService, emitProgressiveWatermarks);
sourceOperator.setup(new SourceOperatorStreamTask<Integer>(new StreamMockEnvironment(new Configuration(), new Configuration(), new ExecutionConfig(), 1L, new MockInputSplitProvider(), 1, new TestTaskStateManager())), new MockStreamConfig(new Configuration(), 1), new MockOutput<>(new ArrayList<>()));
sourceOperator.initializeState(stateContext);
sourceOperator.open();
return sourceOperator;
}
use of org.apache.flink.api.connector.source.mocks.MockSourceSplit in project flink by apache.
the class SourceOperatorTest method testHandleAddSplitsEvent.
@Test
public void testHandleAddSplitsEvent() throws Exception {
operator.initializeState(context.createStateContext());
operator.open();
MockSourceSplit newSplit = new MockSourceSplit((2));
operator.handleOperatorEvent(new AddSplitEvent<>(Collections.singletonList(newSplit), new MockSourceSplitSerializer()));
// The source reader should have been assigned two splits.
assertEquals(Arrays.asList(SourceOperatorTestContext.MOCK_SPLIT, newSplit), mockSourceReader.getAssignedSplits());
}
use of org.apache.flink.api.connector.source.mocks.MockSourceSplit in project flink by apache.
the class SourceCoordinatorTest method testSubtaskFailedAndRevertUncompletedAssignments.
@Test
public void testSubtaskFailedAndRevertUncompletedAssignments() throws Exception {
sourceReady();
addTestingSplitSet(6);
// two splits pending for checkpoint 100
registerReader(0);
getEnumerator().executeAssignOneSplit(0);
getEnumerator().executeAssignOneSplit(0);
sourceCoordinator.checkpointCoordinator(100L, new CompletableFuture<>());
getEnumerator().addNewSplits(new MockSourceSplit(6));
getEnumerator().executeAssignOneSplit(0);
sourceCoordinator.checkpointCoordinator(101L, new CompletableFuture<>());
// check the state.
waitForCoordinatorToProcessActions();
assertEquals(4, getEnumerator().getUnassignedSplits().size());
assertTrue(splitSplitAssignmentTracker.uncheckpointedAssignments().isEmpty());
verifyAssignment(Arrays.asList("0", "1"), splitSplitAssignmentTracker.assignmentsByCheckpointId().get(100L).get(0));
verifyAssignment(Collections.singletonList("2"), splitSplitAssignmentTracker.assignmentsByCheckpointId(101L).get(0));
// none of the checkpoints is confirmed, we fail and revert to the previous one
sourceCoordinator.subtaskFailed(0, null);
sourceCoordinator.subtaskReset(0, 99L);
waitForCoordinatorToProcessActions();
assertFalse("Reader 0 should have been unregistered.", context.registeredReaders().containsKey(0));
// The tracker should have reverted all the splits assignment to reader 0.
for (Map<Integer, ?> assignment : splitSplitAssignmentTracker.assignmentsByCheckpointId().values()) {
assertFalse("Assignment in uncompleted checkpoint should have been reverted.", assignment.containsKey(0));
}
assertFalse(splitSplitAssignmentTracker.uncheckpointedAssignments().containsKey(0));
// The split enumerator should now contains the splits used to b
// assigned to reader 0.
assertEquals(7, getEnumerator().getUnassignedSplits().size());
}
use of org.apache.flink.api.connector.source.mocks.MockSourceSplit in project flink by apache.
the class SourceCoordinatorTest method testErrorThrownFromSplitEnumerator.
@Test
public void testErrorThrownFromSplitEnumerator() throws Exception {
final Error error = new Error("Test Error");
try (final MockSplitEnumeratorContext<MockSourceSplit> enumeratorContext = new MockSplitEnumeratorContext<>(1);
final SplitEnumerator<MockSourceSplit, Set<MockSourceSplit>> splitEnumerator = new MockSplitEnumerator(1, enumeratorContext) {
@Override
public void handleSourceEvent(int subtaskId, SourceEvent sourceEvent) {
throw error;
}
};
final SourceCoordinator<?, ?> coordinator = new SourceCoordinator<>(OPERATOR_NAME, new EnumeratorCreatingSource<>(() -> splitEnumerator), context, new CoordinatorStoreImpl(), WatermarkAlignmentParams.WATERMARK_ALIGNMENT_DISABLED)) {
coordinator.start();
coordinator.handleEventFromOperator(1, new SourceEventWrapper(new SourceEvent() {
}));
waitUtil(() -> operatorCoordinatorContext.isJobFailed(), Duration.ofSeconds(10), "The job should have failed due to the artificial exception.");
assertEquals(error, operatorCoordinatorContext.getJobFailureReason());
}
}
use of org.apache.flink.api.connector.source.mocks.MockSourceSplit in project flink by apache.
the class CoordinatorTestUtils method getSplitsAssignment.
/**
* Create a SplitsAssignment. The assignments looks like following: Subtask 0: Splits {0}
* Subtask 1: Splits {1, 2} Subtask 2: Splits {3, 4, 5}
*/
static SplitsAssignment<MockSourceSplit> getSplitsAssignment(int numSubtasks, int startingSplitId) {
Map<Integer, List<MockSourceSplit>> assignments = new HashMap<>();
int splitId = startingSplitId;
for (int subtaskIndex = 0; subtaskIndex < numSubtasks; subtaskIndex++) {
List<MockSourceSplit> subtaskAssignment = new ArrayList<>();
for (int j = 0; j < subtaskIndex + 1; j++) {
subtaskAssignment.add(new MockSourceSplit(splitId++));
}
assignments.put(subtaskIndex, subtaskAssignment);
}
return new SplitsAssignment<>(assignments);
}
Aggregations