use of org.apache.flink.api.connector.source.SourceReaderContext in project flink by apache.
the class HybridSourceReaderTest method testReader.
@Test
public void testReader() throws Exception {
TestingReaderContext readerContext = new TestingReaderContext();
TestingReaderOutput<Integer> readerOutput = new TestingReaderOutput<>();
MockBaseSource source = new MockBaseSource(1, 1, Boundedness.BOUNDED);
// 2 underlying readers to exercise switch
SourceReader<Integer, MockSourceSplit> mockSplitReader1 = source.createReader(readerContext);
SourceReader<Integer, MockSourceSplit> mockSplitReader2 = source.createReader(readerContext);
HybridSourceReader<Integer> reader = new HybridSourceReader<>(readerContext);
Assert.assertThat(readerContext.getSentEvents(), Matchers.emptyIterable());
reader.start();
assertAndClearSourceReaderFinishedEvent(readerContext, -1);
Assert.assertNull(currentReader(reader));
Assert.assertEquals(InputStatus.NOTHING_AVAILABLE, reader.pollNext(readerOutput));
Source source1 = new MockSource(null, 0) {
@Override
public SourceReader<Integer, MockSourceSplit> createReader(SourceReaderContext readerContext) {
return mockSplitReader1;
}
};
reader.handleSourceEvents(new SwitchSourceEvent(0, source1, false));
MockSourceSplit mockSplit = new MockSourceSplit(0, 0, 1);
mockSplit.addRecord(0);
SwitchedSources switchedSources = new SwitchedSources();
switchedSources.put(0, source);
HybridSourceSplit hybridSplit = HybridSourceSplit.wrapSplit(mockSplit, 0, switchedSources);
reader.addSplits(Collections.singletonList(hybridSplit));
// drain splits
InputStatus status = reader.pollNext(readerOutput);
while (readerOutput.getEmittedRecords().isEmpty() || status == InputStatus.MORE_AVAILABLE) {
status = reader.pollNext(readerOutput);
Thread.sleep(10);
}
Assert.assertThat(readerOutput.getEmittedRecords(), Matchers.contains(0));
reader.pollNext(readerOutput);
Assert.assertEquals("before notifyNoMoreSplits", InputStatus.NOTHING_AVAILABLE, reader.pollNext(readerOutput));
reader.notifyNoMoreSplits();
reader.pollNext(readerOutput);
assertAndClearSourceReaderFinishedEvent(readerContext, 0);
Assert.assertEquals("reader before switch source event", mockSplitReader1, currentReader(reader));
Source source2 = new MockSource(null, 0) {
@Override
public SourceReader<Integer, MockSourceSplit> createReader(SourceReaderContext readerContext) {
return mockSplitReader2;
}
};
reader.handleSourceEvents(new SwitchSourceEvent(1, source2, true));
Assert.assertEquals("reader after switch source event", mockSplitReader2, currentReader(reader));
reader.notifyNoMoreSplits();
Assert.assertEquals("reader 1 after notifyNoMoreSplits", InputStatus.END_OF_INPUT, reader.pollNext(readerOutput));
reader.close();
}
use of org.apache.flink.api.connector.source.SourceReaderContext in project flink by apache.
the class PulsarSourceReaderTestBase method sourceReader.
private PulsarSourceReaderBase<Integer> sourceReader(boolean autoAcknowledgementEnabled, SubscriptionType subscriptionType) {
Configuration configuration = operator().config();
configuration.set(PULSAR_MAX_FETCH_RECORDS, 1);
configuration.set(PULSAR_MAX_FETCH_TIME, 1000L);
configuration.set(PULSAR_SUBSCRIPTION_NAME, randomAlphabetic(10));
configuration.set(PULSAR_SUBSCRIPTION_TYPE, subscriptionType);
if (autoAcknowledgementEnabled || configuration.get(PULSAR_SUBSCRIPTION_TYPE) == SubscriptionType.Shared) {
configuration.set(PULSAR_ENABLE_AUTO_ACKNOWLEDGE_MESSAGE, true);
}
PulsarDeserializationSchema<Integer> deserializationSchema = pulsarSchema(Schema.INT32);
SourceReaderContext context = new TestingReaderContext();
try {
deserializationSchema.open(new PulsarDeserializationSchemaInitializationContext(context), mock(SourceConfiguration.class));
} catch (Exception e) {
fail("Error while opening deserializationSchema");
}
SourceConfiguration sourceConfiguration = new SourceConfiguration(configuration);
return (PulsarSourceReaderBase<Integer>) PulsarSourceReaderFactory.create(context, deserializationSchema, sourceConfiguration);
}
use of org.apache.flink.api.connector.source.SourceReaderContext in project flink by apache.
the class SourceOperator method initReader.
/**
* Initializes the reader. The code from this method should ideally happen in the constructor or
* in the operator factory even. It has to happen here at a slightly later stage, because of the
* lazy metric initialization.
*
* <p>Calling this method explicitly is an optional way to have the reader initialization a bit
* earlier than in open(), as needed by the {@link
* org.apache.flink.streaming.runtime.tasks.SourceOperatorStreamTask}
*
* <p>This code should move to the constructor once the metric groups are available at task
* setup time.
*/
public void initReader() throws Exception {
if (sourceReader != null) {
return;
}
final int subtaskIndex = getRuntimeContext().getIndexOfThisSubtask();
final SourceReaderContext context = new SourceReaderContext() {
@Override
public SourceReaderMetricGroup metricGroup() {
return sourceMetricGroup;
}
@Override
public Configuration getConfiguration() {
return configuration;
}
@Override
public String getLocalHostName() {
return localHostname;
}
@Override
public int getIndexOfSubtask() {
return subtaskIndex;
}
@Override
public void sendSplitRequest() {
operatorEventGateway.sendEventToCoordinator(new RequestSplitEvent(getLocalHostName()));
}
@Override
public void sendSourceEventToCoordinator(SourceEvent event) {
operatorEventGateway.sendEventToCoordinator(new SourceEventWrapper(event));
}
@Override
public UserCodeClassLoader getUserCodeClassLoader() {
return new UserCodeClassLoader() {
@Override
public ClassLoader asClassLoader() {
return getRuntimeContext().getUserCodeClassLoader();
}
@Override
public void registerReleaseHookIfAbsent(String releaseHookName, Runnable releaseHook) {
getRuntimeContext().registerUserCodeClassLoaderReleaseHookIfAbsent(releaseHookName, releaseHook);
}
};
}
};
sourceReader = readerFactory.apply(context);
}
use of org.apache.flink.api.connector.source.SourceReaderContext in project flink by apache.
the class HybridSourceReaderTest method testDefaultMethodDelegation.
@Test
public void testDefaultMethodDelegation() throws Exception {
TestingReaderContext readerContext = new TestingReaderContext();
TestingReaderOutput<Integer> readerOutput = new TestingReaderOutput<>();
MockBaseSource source = new MockBaseSource(1, 1, Boundedness.BOUNDED) {
@Override
public SourceReader<Integer, MockSourceSplit> createReader(SourceReaderContext readerContext) {
return Mockito.spy(super.createReader(readerContext));
}
};
HybridSourceReader<Integer> reader = new HybridSourceReader<>(readerContext);
reader.start();
assertAndClearSourceReaderFinishedEvent(readerContext, -1);
reader.handleSourceEvents(new SwitchSourceEvent(0, source, false));
SourceReader<Integer, MockSourceSplit> underlyingReader = currentReader(reader);
reader.notifyCheckpointComplete(1);
Mockito.verify(underlyingReader).notifyCheckpointComplete(1);
reader.notifyCheckpointAborted(1);
Mockito.verify(underlyingReader).notifyCheckpointAborted(1);
reader.close();
}
Aggregations