use of org.apache.flink.streaming.connectors.kinesis.metrics.ShardConsumerMetricsReporter in project flink by apache.
the class ShardConsumerTest method testCorrectNumOfCollectedRecordsAndUpdatedStateWithAggregatedRecordsWithSubSequenceStartingNumber.
@Test
public void testCorrectNumOfCollectedRecordsAndUpdatedStateWithAggregatedRecordsWithSubSequenceStartingNumber() throws Exception {
SequenceNumber sequenceNumber = new SequenceNumber("0", 5);
KinesisProxyInterface kinesis = spy(FakeKinesisBehavioursFactory.aggregatedRecords(1, 10, 5));
// Expecting to start consuming from last sub sequence number
// 5 batches of 1 aggregated record each with 10 child records
// Last consumed message was sub-sequence 5 (6/10) (zero based) (remaining are 6, 7, 8, 9)
// 5 * 1 * 10 - 6 = 44
ShardConsumerMetricsReporter metrics = assertNumberOfMessagesReceivedFromKinesis(44, kinesis, sequenceNumber);
assertEquals(1, metrics.getNumberOfAggregatedRecords());
assertEquals(10, metrics.getNumberOfDeaggregatedRecords());
verify(kinesis).getShardIterator(any(), eq("AT_SEQUENCE_NUMBER"), eq("0"));
}
use of org.apache.flink.streaming.connectors.kinesis.metrics.ShardConsumerMetricsReporter in project flink by apache.
the class ShardConsumerTest method testCorrectNumOfCollectedRecordsAndUpdatedStateWithAggregatedRecords.
@Test
public void testCorrectNumOfCollectedRecordsAndUpdatedStateWithAggregatedRecords() throws Exception {
KinesisProxyInterface kinesis = spy(FakeKinesisBehavioursFactory.aggregatedRecords(3, 5, 10));
// Expecting to receive all messages
// 10 batches of 3 aggregated records each with 5 child records
// 10 * 3 * 5 = 150
ShardConsumerMetricsReporter metrics = assertNumberOfMessagesReceivedFromKinesis(150, kinesis, fakeSequenceNumber());
assertEquals(3, metrics.getNumberOfAggregatedRecords());
assertEquals(15, metrics.getNumberOfDeaggregatedRecords());
verify(kinesis).getShardIterator(any(), eq("AFTER_SEQUENCE_NUMBER"), eq("fakeStartingState"));
}
use of org.apache.flink.streaming.connectors.kinesis.metrics.ShardConsumerMetricsReporter in project flink by apache.
the class ShardConsumerFanOutTest method testMillisBehindReported.
@Test
public void testMillisBehindReported() throws Exception {
SingleShardFanOutKinesisV2 kinesis = FakeKinesisFanOutBehavioursFactory.boundedShard().withMillisBehindLatest(123L).build();
// Fake behaviour defaults to 10 messages
ShardConsumerMetricsReporter metrics = assertNumberOfMessagesReceivedFromKinesis(10, kinesis, fakeSequenceNumber());
assertEquals(123L, metrics.getMillisBehindLatest());
}
use of org.apache.flink.streaming.connectors.kinesis.metrics.ShardConsumerMetricsReporter in project flink by apache.
the class ShardConsumerTestUtils method assertNumberOfMessagesReceivedFromKinesis.
public static ShardConsumerMetricsReporter assertNumberOfMessagesReceivedFromKinesis(final int expectedNumberOfMessages, final RecordPublisherFactory recordPublisherFactory, final SequenceNumber startingSequenceNumber, final Properties consumerProperties, final SequenceNumber expectedLastProcessedSequenceNum, final AbstractMetricGroup metricGroup) throws InterruptedException {
ShardConsumerMetricsReporter shardMetricsReporter = new ShardConsumerMetricsReporter(metricGroup);
StreamShardHandle fakeToBeConsumedShard = getMockStreamShard("fakeStream", 0);
LinkedList<KinesisStreamShardState> subscribedShardsStateUnderTest = new LinkedList<>();
subscribedShardsStateUnderTest.add(new KinesisStreamShardState(KinesisDataFetcher.convertToStreamShardMetadata(fakeToBeConsumedShard), fakeToBeConsumedShard, startingSequenceNumber));
TestSourceContext<String> sourceContext = new TestSourceContext<>();
KinesisDeserializationSchemaWrapper<String> deserializationSchema = new KinesisDeserializationSchemaWrapper<>(new SimpleStringSchema());
TestableKinesisDataFetcher<String> fetcher = new TestableKinesisDataFetcher<>(Collections.singletonList("fakeStream"), sourceContext, consumerProperties, deserializationSchema, 10, 2, new AtomicReference<>(), subscribedShardsStateUnderTest, KinesisDataFetcher.createInitialSubscribedStreamsToLastDiscoveredShardsState(Collections.singletonList("fakeStream")), Mockito.mock(KinesisProxyInterface.class), Mockito.mock(KinesisProxyV2Interface.class));
final StreamShardHandle shardHandle = subscribedShardsStateUnderTest.get(0).getStreamShardHandle();
final SequenceNumber lastProcessedSequenceNum = subscribedShardsStateUnderTest.get(0).getLastProcessedSequenceNum();
final StartingPosition startingPosition = AWSUtil.getStartingPosition(lastProcessedSequenceNum, consumerProperties);
final RecordPublisher recordPublisher = recordPublisherFactory.create(startingPosition, fetcher.getConsumerConfiguration(), metricGroup, shardHandle);
int shardIndex = fetcher.registerNewSubscribedShardState(subscribedShardsStateUnderTest.get(0));
new ShardConsumer<>(fetcher, recordPublisher, shardIndex, shardHandle, lastProcessedSequenceNum, shardMetricsReporter, deserializationSchema).run();
assertEquals(expectedNumberOfMessages, sourceContext.getCollectedOutputs().size());
assertEquals(expectedLastProcessedSequenceNum, subscribedShardsStateUnderTest.get(0).getLastProcessedSequenceNum());
return shardMetricsReporter;
}
use of org.apache.flink.streaming.connectors.kinesis.metrics.ShardConsumerMetricsReporter in project flink by apache.
the class ShardConsumerTest method testMetricsReporting.
@Test
public void testMetricsReporting() throws Exception {
KinesisProxyInterface kinesis = FakeKinesisBehavioursFactory.totalNumOfRecordsAfterNumOfGetRecordsCalls(500, 5, 500);
ShardConsumerMetricsReporter metrics = assertNumberOfMessagesReceivedFromKinesis(500, kinesis, fakeSequenceNumber());
assertEquals(500, metrics.getMillisBehindLatest());
}
Aggregations