Search in sources :

Example 16 with StreamShardHandle

use of org.apache.flink.streaming.connectors.kinesis.model.StreamShardHandle in project flink by apache.

the class KinesisProxyTest method testGetShardList.

@Test
public void testGetShardList() throws Exception {
    List<String> shardIds = Arrays.asList("shardId-000000000000", "shardId-000000000001", "shardId-000000000002", "shardId-000000000003");
    String nextToken = "NextToken";
    String fakeStreamName = "fake-stream";
    List<Shard> shards = shardIds.stream().map(shardId -> new Shard().withShardId(shardId)).collect(Collectors.toList());
    AmazonKinesis mockClient = mock(AmazonKinesis.class);
    KinesisProxy kinesisProxy = getProxy(mockClient);
    ListShardsResult responseWithMoreData = new ListShardsResult().withShards(shards.subList(0, 2)).withNextToken(nextToken);
    ListShardsResult responseFinal = new ListShardsResult().withShards(shards.subList(2, shards.size())).withNextToken(null);
    doReturn(responseWithMoreData).when(mockClient).listShards(argThat(initialListShardsRequestMatcher()));
    doReturn(responseFinal).when(mockClient).listShards(argThat(listShardsNextToken(nextToken)));
    HashMap<String, String> streamHashMap = createInitialSubscribedStreamsToLastDiscoveredShardsState(Arrays.asList(fakeStreamName));
    GetShardListResult shardListResult = kinesisProxy.getShardList(streamHashMap);
    Assert.assertEquals(shardListResult.hasRetrievedShards(), true);
    Set<String> expectedStreams = new HashSet<>();
    expectedStreams.add(fakeStreamName);
    Assert.assertEquals(shardListResult.getStreamsWithRetrievedShards(), expectedStreams);
    List<StreamShardHandle> actualShardList = shardListResult.getRetrievedShardListOfStream(fakeStreamName);
    List<StreamShardHandle> expectedStreamShard = new ArrayList<>();
    assertThat(actualShardList, hasSize(4));
    for (int i = 0; i < 4; i++) {
        StreamShardHandle shardHandle = new StreamShardHandle(fakeStreamName, new Shard().withShardId(KinesisShardIdGenerator.generateFromShardOrder(i)));
        expectedStreamShard.add(shardHandle);
    }
    Assert.assertThat(actualShardList, containsInAnyOrder(expectedStreamShard.toArray(new StreamShardHandle[actualShardList.size()])));
}
Also used : Shard(com.amazonaws.services.kinesis.model.Shard) Arrays(java.util.Arrays) MutableInt(org.apache.commons.lang3.mutable.MutableInt) ConsumerConfigConstants(org.apache.flink.streaming.connectors.kinesis.config.ConsumerConfigConstants) IsIterableContainingInAnyOrder.containsInAnyOrder(org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder) ClientConfigurationFactory(com.amazonaws.ClientConfigurationFactory) InetAddress(java.net.InetAddress) MockitoHamcrest.argThat(org.mockito.hamcrest.MockitoHamcrest.argThat) KinesisShardIdGenerator(org.apache.flink.streaming.connectors.kinesis.testutils.KinesisShardIdGenerator) Mockito.doReturn(org.mockito.Mockito.doReturn) ListShardsResult(com.amazonaws.services.kinesis.model.ListShardsResult) GetRecordsResult(com.amazonaws.services.kinesis.model.GetRecordsResult) AmazonServiceException(com.amazonaws.AmazonServiceException) AmazonKinesis(com.amazonaws.services.kinesis.AmazonKinesis) StreamShardHandle(org.apache.flink.streaming.connectors.kinesis.model.StreamShardHandle) Set(java.util.Set) Collectors(java.util.stream.Collectors) IsCollectionWithSize.hasSize(org.hamcrest.collection.IsCollectionWithSize.hasSize) Matchers.any(org.mockito.Matchers.any) AWSConfigConstants(org.apache.flink.streaming.connectors.kinesis.config.AWSConfigConstants) List(java.util.List) Assert.assertFalse(org.junit.Assert.assertFalse) Mockito.mock(org.mockito.Mockito.mock) IntStream(java.util.stream.IntStream) Whitebox(org.powermock.reflect.Whitebox) ListShardsRequest(com.amazonaws.services.kinesis.model.ListShardsRequest) ProvisionedThroughputExceededException(com.amazonaws.services.kinesis.model.ProvisionedThroughputExceededException) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) TypeSafeDiagnosingMatcher(org.hamcrest.TypeSafeDiagnosingMatcher) HashSet(java.util.HashSet) Answer(org.mockito.stubbing.Answer) InvocationOnMock(org.mockito.invocation.InvocationOnMock) ConnectTimeoutException(org.apache.http.conn.ConnectTimeoutException) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Description(org.hamcrest.Description) Properties(java.util.Properties) Assert.assertTrue(org.junit.Assert.assertTrue) ErrorType(com.amazonaws.AmazonServiceException.ErrorType) Test(org.junit.Test) UnknownHostException(java.net.UnknownHostException) Mockito(org.mockito.Mockito) SdkClientException(com.amazonaws.SdkClientException) ClientConfiguration(com.amazonaws.ClientConfiguration) AmazonKinesisClient(com.amazonaws.services.kinesis.AmazonKinesisClient) AWSUtil(org.apache.flink.streaming.connectors.kinesis.util.AWSUtil) AmazonKinesisException(com.amazonaws.services.kinesis.model.AmazonKinesisException) Assert(org.junit.Assert) ExpiredIteratorException(com.amazonaws.services.kinesis.model.ExpiredIteratorException) HttpHost(org.apache.http.HttpHost) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) ListShardsResult(com.amazonaws.services.kinesis.model.ListShardsResult) ArrayList(java.util.ArrayList) StreamShardHandle(org.apache.flink.streaming.connectors.kinesis.model.StreamShardHandle) Shard(com.amazonaws.services.kinesis.model.Shard) AmazonKinesis(com.amazonaws.services.kinesis.AmazonKinesis) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 17 with StreamShardHandle

use of org.apache.flink.streaming.connectors.kinesis.model.StreamShardHandle in project flink by apache.

the class KinesisProxyTest method testGetShardListWithNewShardsOnSecondRun.

@Test
public void testGetShardListWithNewShardsOnSecondRun() throws Exception {
    // given
    List<String> shardIds = Arrays.asList(KinesisShardIdGenerator.generateFromShardOrder(0), KinesisShardIdGenerator.generateFromShardOrder(1));
    String fakeStreamName = "fake-stream";
    List<Shard> shards = shardIds.stream().map(shardId -> new Shard().withShardId(shardId)).collect(Collectors.toList());
    AmazonKinesis mockClient = mock(AmazonKinesis.class);
    KinesisProxy kinesisProxy = getProxy(mockClient);
    ListShardsResult responseFirst = new ListShardsResult().withShards(shards).withNextToken(null);
    doReturn(responseFirst).when(mockClient).listShards(argThat(initialListShardsRequestMatcher()));
    HashMap<String, String> streamHashMap = createInitialSubscribedStreamsToLastDiscoveredShardsState(Collections.singletonList(fakeStreamName));
    // when
    GetShardListResult shardListResult = kinesisProxy.getShardList(streamHashMap);
    // then
    Assert.assertTrue(shardListResult.hasRetrievedShards());
    Set<String> expectedStreams = new HashSet<>();
    expectedStreams.add(fakeStreamName);
    Assert.assertEquals(shardListResult.getStreamsWithRetrievedShards(), expectedStreams);
    List<StreamShardHandle> actualShardList = shardListResult.getRetrievedShardListOfStream(fakeStreamName);
    Assert.assertThat(actualShardList, hasSize(2));
    List<StreamShardHandle> expectedStreamShard = IntStream.range(0, actualShardList.size()).mapToObj(i -> new StreamShardHandle(fakeStreamName, new Shard().withShardId(KinesisShardIdGenerator.generateFromShardOrder(i)))).collect(Collectors.toList());
    Assert.assertThat(actualShardList, containsInAnyOrder(expectedStreamShard.toArray(new StreamShardHandle[actualShardList.size()])));
    // given new shards
    ListShardsResult responseSecond = new ListShardsResult().withShards(new Shard().withShardId(KinesisShardIdGenerator.generateFromShardOrder(2))).withNextToken(null);
    doReturn(responseSecond).when(mockClient).listShards(argThat(initialListShardsRequestMatcher()));
    // when new shards
    GetShardListResult newShardListResult = kinesisProxy.getShardList(streamHashMap);
    // then new shards
    Assert.assertTrue(newShardListResult.hasRetrievedShards());
    Assert.assertEquals(newShardListResult.getStreamsWithRetrievedShards(), expectedStreams);
    List<StreamShardHandle> newActualShardList = newShardListResult.getRetrievedShardListOfStream(fakeStreamName);
    Assert.assertThat(newActualShardList, hasSize(1));
    List<StreamShardHandle> newExpectedStreamShard = Collections.singletonList(new StreamShardHandle(fakeStreamName, new Shard().withShardId(KinesisShardIdGenerator.generateFromShardOrder(2))));
    Assert.assertThat(newActualShardList, containsInAnyOrder(newExpectedStreamShard.toArray(new StreamShardHandle[newActualShardList.size()])));
}
Also used : Shard(com.amazonaws.services.kinesis.model.Shard) Arrays(java.util.Arrays) MutableInt(org.apache.commons.lang3.mutable.MutableInt) ConsumerConfigConstants(org.apache.flink.streaming.connectors.kinesis.config.ConsumerConfigConstants) IsIterableContainingInAnyOrder.containsInAnyOrder(org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder) ClientConfigurationFactory(com.amazonaws.ClientConfigurationFactory) InetAddress(java.net.InetAddress) MockitoHamcrest.argThat(org.mockito.hamcrest.MockitoHamcrest.argThat) KinesisShardIdGenerator(org.apache.flink.streaming.connectors.kinesis.testutils.KinesisShardIdGenerator) Mockito.doReturn(org.mockito.Mockito.doReturn) ListShardsResult(com.amazonaws.services.kinesis.model.ListShardsResult) GetRecordsResult(com.amazonaws.services.kinesis.model.GetRecordsResult) AmazonServiceException(com.amazonaws.AmazonServiceException) AmazonKinesis(com.amazonaws.services.kinesis.AmazonKinesis) StreamShardHandle(org.apache.flink.streaming.connectors.kinesis.model.StreamShardHandle) Set(java.util.Set) Collectors(java.util.stream.Collectors) IsCollectionWithSize.hasSize(org.hamcrest.collection.IsCollectionWithSize.hasSize) Matchers.any(org.mockito.Matchers.any) AWSConfigConstants(org.apache.flink.streaming.connectors.kinesis.config.AWSConfigConstants) List(java.util.List) Assert.assertFalse(org.junit.Assert.assertFalse) Mockito.mock(org.mockito.Mockito.mock) IntStream(java.util.stream.IntStream) Whitebox(org.powermock.reflect.Whitebox) ListShardsRequest(com.amazonaws.services.kinesis.model.ListShardsRequest) ProvisionedThroughputExceededException(com.amazonaws.services.kinesis.model.ProvisionedThroughputExceededException) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) TypeSafeDiagnosingMatcher(org.hamcrest.TypeSafeDiagnosingMatcher) HashSet(java.util.HashSet) Answer(org.mockito.stubbing.Answer) InvocationOnMock(org.mockito.invocation.InvocationOnMock) ConnectTimeoutException(org.apache.http.conn.ConnectTimeoutException) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Description(org.hamcrest.Description) Properties(java.util.Properties) Assert.assertTrue(org.junit.Assert.assertTrue) ErrorType(com.amazonaws.AmazonServiceException.ErrorType) Test(org.junit.Test) UnknownHostException(java.net.UnknownHostException) Mockito(org.mockito.Mockito) SdkClientException(com.amazonaws.SdkClientException) ClientConfiguration(com.amazonaws.ClientConfiguration) AmazonKinesisClient(com.amazonaws.services.kinesis.AmazonKinesisClient) AWSUtil(org.apache.flink.streaming.connectors.kinesis.util.AWSUtil) AmazonKinesisException(com.amazonaws.services.kinesis.model.AmazonKinesisException) Assert(org.junit.Assert) ExpiredIteratorException(com.amazonaws.services.kinesis.model.ExpiredIteratorException) HttpHost(org.apache.http.HttpHost) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) ListShardsResult(com.amazonaws.services.kinesis.model.ListShardsResult) StreamShardHandle(org.apache.flink.streaming.connectors.kinesis.model.StreamShardHandle) Shard(com.amazonaws.services.kinesis.model.Shard) AmazonKinesis(com.amazonaws.services.kinesis.AmazonKinesis) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 18 with StreamShardHandle

use of org.apache.flink.streaming.connectors.kinesis.model.StreamShardHandle in project flink by apache.

the class FlinkKinesisConsumerMigrationTest method writeSnapshot.

// ------------------------------------------------------------------------
@SuppressWarnings("unchecked")
private void writeSnapshot(String path, HashMap<StreamShardMetadata, SequenceNumber> state) throws Exception {
    final List<StreamShardHandle> initialDiscoveryShards = new ArrayList<>(state.size());
    for (StreamShardMetadata shardMetadata : state.keySet()) {
        Shard shard = new Shard();
        shard.setShardId(shardMetadata.getShardId());
        SequenceNumberRange sequenceNumberRange = new SequenceNumberRange();
        sequenceNumberRange.withStartingSequenceNumber("1");
        shard.setSequenceNumberRange(sequenceNumberRange);
        initialDiscoveryShards.add(new StreamShardHandle(shardMetadata.getStreamName(), shard));
    }
    final TestFetcher<String> fetcher = new TestFetcher<>(Collections.singletonList(TEST_STREAM_NAME), new TestSourceContext<>(), new TestRuntimeContext(true, 1, 0), TestUtils.getStandardProperties(), new KinesisDeserializationSchemaWrapper<>(new SimpleStringSchema()), state, initialDiscoveryShards);
    final DummyFlinkKinesisConsumer<String> consumer = new DummyFlinkKinesisConsumer<>(fetcher, new KinesisDeserializationSchemaWrapper<>(new SimpleStringSchema()));
    StreamSource<String, DummyFlinkKinesisConsumer<String>> consumerOperator = new StreamSource<>(consumer);
    final AbstractStreamOperatorTestHarness<String> testHarness = new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);
    testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);
    testHarness.setup();
    testHarness.open();
    final AtomicReference<Throwable> error = new AtomicReference<>();
    // run the source asynchronously
    Thread runner = new Thread() {

        @Override
        public void run() {
            try {
                consumer.run(new TestSourceContext<>());
            } catch (Throwable t) {
                t.printStackTrace();
                error.set(t);
            }
        }
    };
    runner.start();
    fetcher.waitUntilRun();
    final OperatorSubtaskState snapshot;
    synchronized (testHarness.getCheckpointLock()) {
        snapshot = testHarness.snapshot(0L, 0L);
    }
    OperatorSnapshotUtil.writeStateHandle(snapshot, path);
    consumerOperator.close();
    runner.join();
}
Also used : SequenceNumberRange(com.amazonaws.services.kinesis.model.SequenceNumberRange) StreamSource(org.apache.flink.streaming.api.operators.StreamSource) ArrayList(java.util.ArrayList) TestRuntimeContext(org.apache.flink.streaming.connectors.kinesis.testutils.TestRuntimeContext) AtomicReference(java.util.concurrent.atomic.AtomicReference) StreamShardMetadata(org.apache.flink.streaming.connectors.kinesis.model.StreamShardMetadata) OperatorSubtaskState(org.apache.flink.runtime.checkpoint.OperatorSubtaskState) AbstractStreamOperatorTestHarness(org.apache.flink.streaming.util.AbstractStreamOperatorTestHarness) StreamShardHandle(org.apache.flink.streaming.connectors.kinesis.model.StreamShardHandle) SimpleStringSchema(org.apache.flink.api.common.serialization.SimpleStringSchema) Shard(com.amazonaws.services.kinesis.model.Shard)

Example 19 with StreamShardHandle

use of org.apache.flink.streaming.connectors.kinesis.model.StreamShardHandle in project flink by apache.

the class FlinkKinesisConsumerMigrationTest method testRestoreWithEmptyState.

@Test
public void testRestoreWithEmptyState() throws Exception {
    final List<StreamShardHandle> initialDiscoveryShards = new ArrayList<>(TEST_STATE.size());
    for (StreamShardMetadata shardMetadata : TEST_STATE.keySet()) {
        Shard shard = new Shard();
        shard.setShardId(shardMetadata.getShardId());
        SequenceNumberRange sequenceNumberRange = new SequenceNumberRange();
        sequenceNumberRange.withStartingSequenceNumber("1");
        shard.setSequenceNumberRange(sequenceNumberRange);
        initialDiscoveryShards.add(new StreamShardHandle(shardMetadata.getStreamName(), shard));
    }
    final TestFetcher<String> fetcher = new TestFetcher<>(Collections.singletonList(TEST_STREAM_NAME), new TestSourceContext<>(), new TestRuntimeContext(true, 1, 0), TestUtils.getStandardProperties(), new KinesisDeserializationSchemaWrapper<>(new SimpleStringSchema()), null, initialDiscoveryShards);
    final DummyFlinkKinesisConsumer<String> consumerFunction = new DummyFlinkKinesisConsumer<>(fetcher, new KinesisDeserializationSchemaWrapper<>(new SimpleStringSchema()));
    StreamSource<String, DummyFlinkKinesisConsumer<String>> consumerOperator = new StreamSource<>(consumerFunction);
    final AbstractStreamOperatorTestHarness<String> testHarness = new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);
    testHarness.setup();
    testHarness.initializeState(OperatorSnapshotUtil.getResourceFilename("kinesis-consumer-migration-test-flink" + testMigrateVersion + "-empty-snapshot"));
    testHarness.open();
    consumerFunction.run(new TestSourceContext<>());
    // assert that no state was restored
    assertTrue(consumerFunction.getRestoredState().isEmpty());
    // although the restore state is empty, the fetcher should still have been registered the
    // initial discovered shard;
    // furthermore, the discovered shard should be considered a newly created shard while the
    // job wasn't running,
    // and therefore should be consumed from the earliest sequence number
    KinesisStreamShardState restoredShardState = fetcher.getSubscribedShardsState().get(0);
    assertEquals(TEST_STREAM_NAME, restoredShardState.getStreamShardHandle().getStreamName());
    assertEquals(TEST_SHARD_ID, restoredShardState.getStreamShardHandle().getShard().getShardId());
    assertFalse(restoredShardState.getStreamShardHandle().isClosed());
    assertEquals(SentinelSequenceNumber.SENTINEL_EARLIEST_SEQUENCE_NUM.get(), restoredShardState.getLastProcessedSequenceNum());
    consumerOperator.close();
    consumerOperator.cancel();
}
Also used : SequenceNumberRange(com.amazonaws.services.kinesis.model.SequenceNumberRange) StreamSource(org.apache.flink.streaming.api.operators.StreamSource) ArrayList(java.util.ArrayList) TestRuntimeContext(org.apache.flink.streaming.connectors.kinesis.testutils.TestRuntimeContext) StreamShardMetadata(org.apache.flink.streaming.connectors.kinesis.model.StreamShardMetadata) AbstractStreamOperatorTestHarness(org.apache.flink.streaming.util.AbstractStreamOperatorTestHarness) StreamShardHandle(org.apache.flink.streaming.connectors.kinesis.model.StreamShardHandle) SimpleStringSchema(org.apache.flink.api.common.serialization.SimpleStringSchema) KinesisStreamShardState(org.apache.flink.streaming.connectors.kinesis.model.KinesisStreamShardState) Shard(com.amazonaws.services.kinesis.model.Shard) Test(org.junit.Test)

Example 20 with StreamShardHandle

use of org.apache.flink.streaming.connectors.kinesis.model.StreamShardHandle in project flink by apache.

the class FlinkKinesisConsumerMigrationTest method testRestore.

@Test
public void testRestore() throws Exception {
    final List<StreamShardHandle> initialDiscoveryShards = new ArrayList<>(TEST_STATE.size());
    for (StreamShardMetadata shardMetadata : TEST_STATE.keySet()) {
        Shard shard = new Shard();
        shard.setShardId(shardMetadata.getShardId());
        SequenceNumberRange sequenceNumberRange = new SequenceNumberRange();
        sequenceNumberRange.withStartingSequenceNumber("1");
        shard.setSequenceNumberRange(sequenceNumberRange);
        initialDiscoveryShards.add(new StreamShardHandle(shardMetadata.getStreamName(), shard));
    }
    final TestFetcher<String> fetcher = new TestFetcher<>(Collections.singletonList(TEST_STREAM_NAME), new TestSourceContext<>(), new TestRuntimeContext(true, 1, 0), TestUtils.getStandardProperties(), new KinesisDeserializationSchemaWrapper<>(new SimpleStringSchema()), null, initialDiscoveryShards);
    final DummyFlinkKinesisConsumer<String> consumerFunction = new DummyFlinkKinesisConsumer<>(fetcher, new KinesisDeserializationSchemaWrapper<>(new SimpleStringSchema()));
    StreamSource<String, DummyFlinkKinesisConsumer<String>> consumerOperator = new StreamSource<>(consumerFunction);
    final AbstractStreamOperatorTestHarness<String> testHarness = new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);
    testHarness.setup();
    testHarness.initializeState(OperatorSnapshotUtil.getResourceFilename("kinesis-consumer-migration-test-flink" + testMigrateVersion + "-snapshot"));
    testHarness.open();
    consumerFunction.run(new TestSourceContext<>());
    // assert that state is correctly restored
    assertNotEquals(null, consumerFunction.getRestoredState());
    assertEquals(1, consumerFunction.getRestoredState().size());
    assertEquals(TEST_STATE, removeEquivalenceWrappers(consumerFunction.getRestoredState()));
    assertEquals(1, fetcher.getSubscribedShardsState().size());
    assertEquals(TEST_SEQUENCE_NUMBER, fetcher.getSubscribedShardsState().get(0).getLastProcessedSequenceNum());
    KinesisStreamShardState restoredShardState = fetcher.getSubscribedShardsState().get(0);
    assertEquals(TEST_STREAM_NAME, restoredShardState.getStreamShardHandle().getStreamName());
    assertEquals(TEST_SHARD_ID, restoredShardState.getStreamShardHandle().getShard().getShardId());
    assertFalse(restoredShardState.getStreamShardHandle().isClosed());
    assertEquals(TEST_SEQUENCE_NUMBER, restoredShardState.getLastProcessedSequenceNum());
    consumerOperator.close();
    consumerOperator.cancel();
}
Also used : SequenceNumberRange(com.amazonaws.services.kinesis.model.SequenceNumberRange) StreamSource(org.apache.flink.streaming.api.operators.StreamSource) ArrayList(java.util.ArrayList) TestRuntimeContext(org.apache.flink.streaming.connectors.kinesis.testutils.TestRuntimeContext) StreamShardMetadata(org.apache.flink.streaming.connectors.kinesis.model.StreamShardMetadata) AbstractStreamOperatorTestHarness(org.apache.flink.streaming.util.AbstractStreamOperatorTestHarness) StreamShardHandle(org.apache.flink.streaming.connectors.kinesis.model.StreamShardHandle) SimpleStringSchema(org.apache.flink.api.common.serialization.SimpleStringSchema) KinesisStreamShardState(org.apache.flink.streaming.connectors.kinesis.model.KinesisStreamShardState) Shard(com.amazonaws.services.kinesis.model.Shard) Test(org.junit.Test)

Aggregations

StreamShardHandle (org.apache.flink.streaming.connectors.kinesis.model.StreamShardHandle)30 Shard (com.amazonaws.services.kinesis.model.Shard)22 Test (org.junit.Test)19 ArrayList (java.util.ArrayList)18 HashMap (java.util.HashMap)16 KinesisStreamShardState (org.apache.flink.streaming.connectors.kinesis.model.KinesisStreamShardState)16 SequenceNumber (org.apache.flink.streaming.connectors.kinesis.model.SequenceNumber)15 SimpleStringSchema (org.apache.flink.api.common.serialization.SimpleStringSchema)14 Properties (java.util.Properties)12 Map (java.util.Map)10 StreamShardMetadata (org.apache.flink.streaming.connectors.kinesis.model.StreamShardMetadata)10 SequenceNumberRange (com.amazonaws.services.kinesis.model.SequenceNumberRange)9 SentinelSequenceNumber (org.apache.flink.streaming.connectors.kinesis.model.SentinelSequenceNumber)9 LinkedList (java.util.LinkedList)8 TestableKinesisDataFetcher (org.apache.flink.streaming.connectors.kinesis.testutils.TestableKinesisDataFetcher)7 ListStateDescriptor (org.apache.flink.api.common.state.ListStateDescriptor)6 OperatorStateStore (org.apache.flink.api.common.state.OperatorStateStore)6 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)6 Configuration (org.apache.flink.configuration.Configuration)6 StateInitializationContext (org.apache.flink.runtime.state.StateInitializationContext)6