use of org.apache.flink.connector.testutils.source.reader.TestingReaderContext in project flink by apache.
the class SourceReaderBaseTest method createReaderAndAwaitAvailable.
// ------------------------------------------------------------------------
// Testing Setup Helpers
// ------------------------------------------------------------------------
private static <E> SourceReader<E, ?> createReaderAndAwaitAvailable(final String splitId, final RecordsWithSplitIds<E> records) throws Exception {
final FutureCompletingBlockingQueue<RecordsWithSplitIds<E>> elementsQueue = new FutureCompletingBlockingQueue<>();
final SourceReader<E, TestingSourceSplit> reader = new SingleThreadMultiplexSourceReaderBase<E, E, TestingSourceSplit, TestingSourceSplit>(elementsQueue, () -> new TestingSplitReader<>(records), new PassThroughRecordEmitter<>(), new Configuration(), new TestingReaderContext()) {
@Override
public void notifyCheckpointComplete(long checkpointId) {
}
@Override
protected void onSplitFinished(Map<String, TestingSourceSplit> finishedSplitIds) {
}
@Override
protected TestingSourceSplit initializedState(TestingSourceSplit split) {
return split;
}
@Override
protected TestingSourceSplit toSplitType(String splitId, TestingSourceSplit splitState) {
return splitState;
}
};
reader.start();
final List<TestingSourceSplit> splits = Collections.singletonList(new TestingSourceSplit(splitId));
reader.addSplits(splits);
reader.isAvailable().get();
return reader;
}
use of org.apache.flink.connector.testutils.source.reader.TestingReaderContext in project flink by apache.
the class SourceReaderBaseTest method createReader.
// ---------------- helper methods -----------------
@Override
protected MockSourceReader createReader() {
FutureCompletingBlockingQueue<RecordsWithSplitIds<int[]>> elementsQueue = new FutureCompletingBlockingQueue<>();
MockSplitReader mockSplitReader = MockSplitReader.newBuilder().setNumRecordsPerSplitPerFetch(2).setBlockingFetch(true).build();
return new MockSourceReader(elementsQueue, () -> mockSplitReader, getConfig(), new TestingReaderContext());
}
use of org.apache.flink.connector.testutils.source.reader.TestingReaderContext in project flink by apache.
the class SourceReaderBaseTest method testMultipleSplitsWithSeparatedFinishedRecord.
@Test
void testMultipleSplitsWithSeparatedFinishedRecord() throws Exception {
FutureCompletingBlockingQueue<RecordsWithSplitIds<int[]>> elementsQueue = new FutureCompletingBlockingQueue<>();
MockSplitReader mockSplitReader = MockSplitReader.newBuilder().setNumRecordsPerSplitPerFetch(2).setSeparatedFinishedRecord(true).setBlockingFetch(false).build();
MockSourceReader reader = new MockSourceReader(elementsQueue, () -> mockSplitReader, getConfig(), new TestingReaderContext());
reader.start();
List<MockSourceSplit> splits = Arrays.asList(getSplit(0, 10, Boundedness.BOUNDED), getSplit(1, 10, Boundedness.BOUNDED));
reader.addSplits(splits);
reader.notifyNoMoreSplits();
while (true) {
InputStatus status = reader.pollNext(new TestingReaderOutput<>());
if (status == InputStatus.END_OF_INPUT) {
break;
}
if (status == InputStatus.NOTHING_AVAILABLE) {
reader.isAvailable().get();
}
}
}
use of org.apache.flink.connector.testutils.source.reader.TestingReaderContext in project flink by apache.
the class HybridSourceReaderTest method testReaderRecovery.
@Test
public void testReaderRecovery() throws Exception {
TestingReaderContext readerContext = new TestingReaderContext();
TestingReaderOutput<Integer> readerOutput = new TestingReaderOutput<>();
MockBaseSource source = new MockBaseSource(1, 1, Boundedness.BOUNDED);
HybridSourceReader<Integer> reader = new HybridSourceReader<>(readerContext);
reader.start();
assertAndClearSourceReaderFinishedEvent(readerContext, -1);
reader.handleSourceEvents(new SwitchSourceEvent(0, source, false));
MockSourceSplit mockSplit = new MockSourceSplit(0, 0, 2147483647);
SwitchedSources switchedSources = new SwitchedSources();
switchedSources.put(0, source);
HybridSourceSplit hybridSplit = HybridSourceSplit.wrapSplit(mockSplit, 0, switchedSources);
reader.addSplits(Collections.singletonList(hybridSplit));
List<HybridSourceSplit> snapshot = reader.snapshotState(0);
Assert.assertThat(snapshot, Matchers.contains(hybridSplit));
// reader recovery
readerContext.clearSentEvents();
reader = new HybridSourceReader<>(readerContext);
reader.addSplits(snapshot);
Assert.assertNull(currentReader(reader));
reader.start();
Assert.assertNull(currentReader(reader));
assertAndClearSourceReaderFinishedEvent(readerContext, -1);
reader.handleSourceEvents(new SwitchSourceEvent(0, source, false));
Assert.assertNotNull(currentReader(reader));
Assert.assertThat(reader.snapshotState(1), Matchers.contains(hybridSplit));
reader.close();
}
use of org.apache.flink.connector.testutils.source.reader.TestingReaderContext in project flink by apache.
the class HybridSourceReaderTest method testDefaultMethodDelegation.
@Test
public void testDefaultMethodDelegation() throws Exception {
TestingReaderContext readerContext = new TestingReaderContext();
TestingReaderOutput<Integer> readerOutput = new TestingReaderOutput<>();
MockBaseSource source = new MockBaseSource(1, 1, Boundedness.BOUNDED) {
@Override
public SourceReader<Integer, MockSourceSplit> createReader(SourceReaderContext readerContext) {
return Mockito.spy(super.createReader(readerContext));
}
};
HybridSourceReader<Integer> reader = new HybridSourceReader<>(readerContext);
reader.start();
assertAndClearSourceReaderFinishedEvent(readerContext, -1);
reader.handleSourceEvents(new SwitchSourceEvent(0, source, false));
SourceReader<Integer, MockSourceSplit> underlyingReader = currentReader(reader);
reader.notifyCheckpointComplete(1);
Mockito.verify(underlyingReader).notifyCheckpointComplete(1);
reader.notifyCheckpointAborted(1);
Mockito.verify(underlyingReader).notifyCheckpointAborted(1);
reader.close();
}
Aggregations