Search in sources :

Example 1 with Repartitioned

use of org.apache.kafka.streams.kstream.Repartitioned in project kafka by apache.

the class KStreamRepartitionIntegrationTest method shouldCreateOnlyOneRepartitionTopicWhenRepartitionIsFollowedByGroupByKey.

@Test
public void shouldCreateOnlyOneRepartitionTopicWhenRepartitionIsFollowedByGroupByKey() throws Exception {
    final String repartitionName = "new-partitions";
    final long timestamp = System.currentTimeMillis();
    sendEvents(timestamp, Arrays.asList(new KeyValue<>(1, "A"), new KeyValue<>(2, "B")));
    final StreamsBuilder builder = new StreamsBuilder();
    final Repartitioned<String, String> repartitioned = Repartitioned.<String, String>as(repartitionName).withKeySerde(Serdes.String()).withValueSerde(Serdes.String()).withNumberOfPartitions(1);
    builder.stream(inputTopic, Consumed.with(Serdes.Integer(), Serdes.String())).selectKey((key, value) -> key.toString()).repartition(repartitioned).groupByKey().count().toStream().to(outputTopic);
    startStreams(builder);
    final String topology = builder.build().describe().toString();
    validateReceivedMessages(new StringDeserializer(), new LongDeserializer(), Arrays.asList(new KeyValue<>("1", 1L), new KeyValue<>("2", 1L)));
    assertTrue(topicExists(toRepartitionTopicName(repartitionName)));
    assertEquals(1, countOccurrencesInTopology(topology, "Sink: .*-repartition"));
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Arrays(java.util.Arrays) Repartitioned(org.apache.kafka.streams.kstream.Repartitioned) AdminClient(org.apache.kafka.clients.admin.AdminClient) Matcher(java.util.regex.Matcher) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) IntegrationTestUtils.safeUniqueTestName(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.safeUniqueTestName) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) After(org.junit.After) Duration(java.time.Duration) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Parameterized(org.junit.runners.Parameterized) AfterClass(org.junit.AfterClass) TestUtils(org.apache.kafka.test.TestUtils) Collection(java.util.Collection) KeyValue(org.apache.kafka.streams.KeyValue) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) Set(java.util.Set) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) State(org.apache.kafka.streams.KafkaStreams.State) Category(org.junit.experimental.categories.Category) Objects(java.util.Objects) IntegrationTestUtils(org.apache.kafka.streams.integration.utils.IntegrationTestUtils) CountDownLatch(java.util.concurrent.CountDownLatch) List(java.util.List) Pattern(java.util.regex.Pattern) ERROR(org.apache.kafka.streams.KafkaStreams.State.ERROR) StreamsConfig(org.apache.kafka.streams.StreamsConfig) BeforeClass(org.junit.BeforeClass) RunWith(org.junit.runner.RunWith) Parameters(org.junit.runners.Parameterized.Parameters) IntegrationTest(org.apache.kafka.test.IntegrationTest) KStream(org.apache.kafka.streams.kstream.KStream) AtomicReference(java.util.concurrent.atomic.AtomicReference) ArrayList(java.util.ArrayList) JoinWindows(org.apache.kafka.streams.kstream.JoinWindows) EmbeddedKafkaCluster(org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster) TestName(org.junit.rules.TestName) Named(org.apache.kafka.streams.kstream.Named) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) TopicDescription(org.apache.kafka.clients.admin.TopicDescription) Deserializer(org.apache.kafka.common.serialization.Deserializer) Before(org.junit.Before) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Parameter(org.junit.runners.Parameterized.Parameter) Assert.assertNotNull(org.junit.Assert.assertNotNull) AdminClientConfig(org.apache.kafka.clients.admin.AdminClientConfig) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) IOException(java.io.IOException) TimeUnit(java.util.concurrent.TimeUnit) RUNNING(org.apache.kafka.streams.KafkaStreams.State.RUNNING) Rule(org.junit.Rule) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) KafkaStreams(org.apache.kafka.streams.KafkaStreams) REBALANCING(org.apache.kafka.streams.KafkaStreams.State.REBALANCING) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) KeyValue(org.apache.kafka.streams.KeyValue) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Example 2 with Repartitioned

use of org.apache.kafka.streams.kstream.Repartitioned in project kafka by apache.

the class KStreamRepartitionIntegrationTest method shouldDeductNumberOfPartitionsFromRepartitionOperation.

@Test
public void shouldDeductNumberOfPartitionsFromRepartitionOperation() throws Exception {
    final String topicBMapperName = "topic-b-mapper";
    final int topicBNumberOfPartitions = 6;
    final String inputTopicRepartitionName = "join-repartition-test";
    final int inputTopicRepartitionedNumOfPartitions = 3;
    final long timestamp = System.currentTimeMillis();
    CLUSTER.createTopic(topicB, topicBNumberOfPartitions, 1);
    final List<KeyValue<Integer, String>> expectedRecords = Arrays.asList(new KeyValue<>(1, "A"), new KeyValue<>(2, "B"));
    sendEvents(timestamp, expectedRecords);
    sendEvents(topicB, timestamp, expectedRecords);
    final StreamsBuilder builder = new StreamsBuilder();
    final Repartitioned<Integer, String> inputTopicRepartitioned = Repartitioned.<Integer, String>as(inputTopicRepartitionName).withNumberOfPartitions(inputTopicRepartitionedNumOfPartitions);
    final KStream<Integer, String> topicBStream = builder.stream(topicB, Consumed.with(Serdes.Integer(), Serdes.String())).map(KeyValue::new, Named.as(topicBMapperName));
    builder.stream(inputTopic, Consumed.with(Serdes.Integer(), Serdes.String())).repartition(inputTopicRepartitioned).join(topicBStream, (value1, value2) -> value2, JoinWindows.of(Duration.ofSeconds(10))).to(outputTopic);
    builder.build(streamsConfiguration);
    startStreams(builder);
    assertEquals(inputTopicRepartitionedNumOfPartitions, getNumberOfPartitionsForTopic(toRepartitionTopicName(inputTopicRepartitionName)));
    assertEquals(inputTopicRepartitionedNumOfPartitions, getNumberOfPartitionsForTopic(toRepartitionTopicName(topicBMapperName)));
    validateReceivedMessages(new IntegerDeserializer(), new StringDeserializer(), expectedRecords);
}
Also used : Arrays(java.util.Arrays) Repartitioned(org.apache.kafka.streams.kstream.Repartitioned) AdminClient(org.apache.kafka.clients.admin.AdminClient) Matcher(java.util.regex.Matcher) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) IntegrationTestUtils.safeUniqueTestName(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.safeUniqueTestName) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) After(org.junit.After) Duration(java.time.Duration) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Parameterized(org.junit.runners.Parameterized) AfterClass(org.junit.AfterClass) TestUtils(org.apache.kafka.test.TestUtils) Collection(java.util.Collection) KeyValue(org.apache.kafka.streams.KeyValue) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) Set(java.util.Set) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) State(org.apache.kafka.streams.KafkaStreams.State) Category(org.junit.experimental.categories.Category) Objects(java.util.Objects) IntegrationTestUtils(org.apache.kafka.streams.integration.utils.IntegrationTestUtils) CountDownLatch(java.util.concurrent.CountDownLatch) List(java.util.List) Pattern(java.util.regex.Pattern) ERROR(org.apache.kafka.streams.KafkaStreams.State.ERROR) StreamsConfig(org.apache.kafka.streams.StreamsConfig) BeforeClass(org.junit.BeforeClass) RunWith(org.junit.runner.RunWith) Parameters(org.junit.runners.Parameterized.Parameters) IntegrationTest(org.apache.kafka.test.IntegrationTest) KStream(org.apache.kafka.streams.kstream.KStream) AtomicReference(java.util.concurrent.atomic.AtomicReference) ArrayList(java.util.ArrayList) JoinWindows(org.apache.kafka.streams.kstream.JoinWindows) EmbeddedKafkaCluster(org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster) TestName(org.junit.rules.TestName) Named(org.apache.kafka.streams.kstream.Named) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) TopicDescription(org.apache.kafka.clients.admin.TopicDescription) Deserializer(org.apache.kafka.common.serialization.Deserializer) Before(org.junit.Before) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Parameter(org.junit.runners.Parameterized.Parameter) Assert.assertNotNull(org.junit.Assert.assertNotNull) AdminClientConfig(org.apache.kafka.clients.admin.AdminClientConfig) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) IOException(java.io.IOException) TimeUnit(java.util.concurrent.TimeUnit) RUNNING(org.apache.kafka.streams.KafkaStreams.State.RUNNING) Rule(org.junit.Rule) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) KafkaStreams(org.apache.kafka.streams.KafkaStreams) REBALANCING(org.apache.kafka.streams.KafkaStreams.State.REBALANCING) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) KeyValue(org.apache.kafka.streams.KeyValue) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Example 3 with Repartitioned

use of org.apache.kafka.streams.kstream.Repartitioned in project kafka by apache.

the class KStreamRepartitionIntegrationTest method shouldGoThroughRebalancingCorrectly.

@Test
public void shouldGoThroughRebalancingCorrectly() throws Exception {
    final String repartitionName = "rebalancing-test";
    final long timestamp = System.currentTimeMillis();
    sendEvents(timestamp, Arrays.asList(new KeyValue<>(1, "A"), new KeyValue<>(2, "B")));
    final StreamsBuilder builder = new StreamsBuilder();
    final Repartitioned<String, String> repartitioned = Repartitioned.<String, String>as(repartitionName).withKeySerde(Serdes.String()).withValueSerde(Serdes.String()).withNumberOfPartitions(2);
    builder.stream(inputTopic, Consumed.with(Serdes.Integer(), Serdes.String())).selectKey((key, value) -> key.toString()).repartition(repartitioned).groupByKey().count().toStream().to(outputTopic);
    startStreams(builder);
    final Properties streamsToCloseConfigs = new Properties();
    streamsToCloseConfigs.putAll(streamsConfiguration);
    streamsToCloseConfigs.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath() + "-2");
    final KafkaStreams kafkaStreamsToClose = startStreams(builder, streamsToCloseConfigs);
    validateReceivedMessages(new StringDeserializer(), new LongDeserializer(), Arrays.asList(new KeyValue<>("1", 1L), new KeyValue<>("2", 1L)));
    kafkaStreamsToClose.close();
    sendEvents(timestamp, Arrays.asList(new KeyValue<>(1, "C"), new KeyValue<>(2, "D")));
    validateReceivedMessages(new StringDeserializer(), new LongDeserializer(), Arrays.asList(new KeyValue<>("1", 2L), new KeyValue<>("2", 2L)));
    final String repartitionTopicName = toRepartitionTopicName(repartitionName);
    assertTrue(topicExists(repartitionTopicName));
    assertEquals(2, getNumberOfPartitionsForTopic(repartitionTopicName));
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Arrays(java.util.Arrays) Repartitioned(org.apache.kafka.streams.kstream.Repartitioned) AdminClient(org.apache.kafka.clients.admin.AdminClient) Matcher(java.util.regex.Matcher) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) IntegrationTestUtils.safeUniqueTestName(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.safeUniqueTestName) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) After(org.junit.After) Duration(java.time.Duration) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Parameterized(org.junit.runners.Parameterized) AfterClass(org.junit.AfterClass) TestUtils(org.apache.kafka.test.TestUtils) Collection(java.util.Collection) KeyValue(org.apache.kafka.streams.KeyValue) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) Set(java.util.Set) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) State(org.apache.kafka.streams.KafkaStreams.State) Category(org.junit.experimental.categories.Category) Objects(java.util.Objects) IntegrationTestUtils(org.apache.kafka.streams.integration.utils.IntegrationTestUtils) CountDownLatch(java.util.concurrent.CountDownLatch) List(java.util.List) Pattern(java.util.regex.Pattern) ERROR(org.apache.kafka.streams.KafkaStreams.State.ERROR) StreamsConfig(org.apache.kafka.streams.StreamsConfig) BeforeClass(org.junit.BeforeClass) RunWith(org.junit.runner.RunWith) Parameters(org.junit.runners.Parameterized.Parameters) IntegrationTest(org.apache.kafka.test.IntegrationTest) KStream(org.apache.kafka.streams.kstream.KStream) AtomicReference(java.util.concurrent.atomic.AtomicReference) ArrayList(java.util.ArrayList) JoinWindows(org.apache.kafka.streams.kstream.JoinWindows) EmbeddedKafkaCluster(org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster) TestName(org.junit.rules.TestName) Named(org.apache.kafka.streams.kstream.Named) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) TopicDescription(org.apache.kafka.clients.admin.TopicDescription) Deserializer(org.apache.kafka.common.serialization.Deserializer) Before(org.junit.Before) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Parameter(org.junit.runners.Parameterized.Parameter) Assert.assertNotNull(org.junit.Assert.assertNotNull) AdminClientConfig(org.apache.kafka.clients.admin.AdminClientConfig) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) IOException(java.io.IOException) TimeUnit(java.util.concurrent.TimeUnit) RUNNING(org.apache.kafka.streams.KafkaStreams.State.RUNNING) Rule(org.junit.Rule) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) KafkaStreams(org.apache.kafka.streams.KafkaStreams) REBALANCING(org.apache.kafka.streams.KafkaStreams.State.REBALANCING) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) KafkaStreams(org.apache.kafka.streams.KafkaStreams) KeyValue(org.apache.kafka.streams.KeyValue) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Properties(java.util.Properties) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Example 4 with Repartitioned

use of org.apache.kafka.streams.kstream.Repartitioned in project kafka by apache.

the class KStreamRepartitionIntegrationTest method shouldDoProperJoiningWhenNumberOfPartitionsAreValidWhenUsingRepartitionOperation.

@Test
public void shouldDoProperJoiningWhenNumberOfPartitionsAreValidWhenUsingRepartitionOperation() throws Exception {
    final String topicBRepartitionedName = "topic-b-scale-up";
    final String inputTopicRepartitionedName = "input-topic-scale-up";
    final long timestamp = System.currentTimeMillis();
    CLUSTER.createTopic(topicB, 1, 1);
    final List<KeyValue<Integer, String>> expectedRecords = Arrays.asList(new KeyValue<>(1, "A"), new KeyValue<>(2, "B"));
    sendEvents(timestamp, expectedRecords);
    sendEvents(topicB, timestamp, expectedRecords);
    final StreamsBuilder builder = new StreamsBuilder();
    final Repartitioned<Integer, String> inputTopicRepartitioned = Repartitioned.<Integer, String>as(inputTopicRepartitionedName).withNumberOfPartitions(4);
    final Repartitioned<Integer, String> topicBRepartitioned = Repartitioned.<Integer, String>as(topicBRepartitionedName).withNumberOfPartitions(4);
    final KStream<Integer, String> topicBStream = builder.stream(topicB, Consumed.with(Serdes.Integer(), Serdes.String())).repartition(topicBRepartitioned);
    builder.stream(inputTopic, Consumed.with(Serdes.Integer(), Serdes.String())).repartition(inputTopicRepartitioned).join(topicBStream, (value1, value2) -> value2, JoinWindows.of(Duration.ofSeconds(10))).to(outputTopic);
    startStreams(builder);
    assertEquals(4, getNumberOfPartitionsForTopic(toRepartitionTopicName(topicBRepartitionedName)));
    assertEquals(4, getNumberOfPartitionsForTopic(toRepartitionTopicName(inputTopicRepartitionedName)));
    validateReceivedMessages(new IntegerDeserializer(), new StringDeserializer(), expectedRecords);
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Arrays(java.util.Arrays) Repartitioned(org.apache.kafka.streams.kstream.Repartitioned) AdminClient(org.apache.kafka.clients.admin.AdminClient) Matcher(java.util.regex.Matcher) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) IntegrationTestUtils.safeUniqueTestName(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.safeUniqueTestName) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) After(org.junit.After) Duration(java.time.Duration) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Parameterized(org.junit.runners.Parameterized) AfterClass(org.junit.AfterClass) TestUtils(org.apache.kafka.test.TestUtils) Collection(java.util.Collection) KeyValue(org.apache.kafka.streams.KeyValue) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) Set(java.util.Set) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) State(org.apache.kafka.streams.KafkaStreams.State) Category(org.junit.experimental.categories.Category) Objects(java.util.Objects) IntegrationTestUtils(org.apache.kafka.streams.integration.utils.IntegrationTestUtils) CountDownLatch(java.util.concurrent.CountDownLatch) List(java.util.List) Pattern(java.util.regex.Pattern) ERROR(org.apache.kafka.streams.KafkaStreams.State.ERROR) StreamsConfig(org.apache.kafka.streams.StreamsConfig) BeforeClass(org.junit.BeforeClass) RunWith(org.junit.runner.RunWith) Parameters(org.junit.runners.Parameterized.Parameters) IntegrationTest(org.apache.kafka.test.IntegrationTest) KStream(org.apache.kafka.streams.kstream.KStream) AtomicReference(java.util.concurrent.atomic.AtomicReference) ArrayList(java.util.ArrayList) JoinWindows(org.apache.kafka.streams.kstream.JoinWindows) EmbeddedKafkaCluster(org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster) TestName(org.junit.rules.TestName) Named(org.apache.kafka.streams.kstream.Named) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) TopicDescription(org.apache.kafka.clients.admin.TopicDescription) Deserializer(org.apache.kafka.common.serialization.Deserializer) Before(org.junit.Before) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Parameter(org.junit.runners.Parameterized.Parameter) Assert.assertNotNull(org.junit.Assert.assertNotNull) AdminClientConfig(org.apache.kafka.clients.admin.AdminClientConfig) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) IOException(java.io.IOException) TimeUnit(java.util.concurrent.TimeUnit) RUNNING(org.apache.kafka.streams.KafkaStreams.State.RUNNING) Rule(org.junit.Rule) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) KafkaStreams(org.apache.kafka.streams.KafkaStreams) REBALANCING(org.apache.kafka.streams.KafkaStreams.State.REBALANCING) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) KeyValue(org.apache.kafka.streams.KeyValue) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Example 5 with Repartitioned

use of org.apache.kafka.streams.kstream.Repartitioned in project kafka by apache.

the class KStreamRepartitionIntegrationTest method shouldThrowAnExceptionWhenNumberOfPartitionsOfRepartitionOperationDoNotMatchSourceTopicWhenJoining.

@Test
public void shouldThrowAnExceptionWhenNumberOfPartitionsOfRepartitionOperationDoNotMatchSourceTopicWhenJoining() throws InterruptedException {
    final int topicBNumberOfPartitions = 6;
    final String inputTopicRepartitionName = "join-repartition-test";
    final AtomicReference<Throwable> expectedThrowable = new AtomicReference<>();
    final int inputTopicRepartitionedNumOfPartitions = 2;
    CLUSTER.createTopic(topicB, topicBNumberOfPartitions, 1);
    final StreamsBuilder builder = new StreamsBuilder();
    final Repartitioned<Integer, String> inputTopicRepartitioned = Repartitioned.<Integer, String>as(inputTopicRepartitionName).withNumberOfPartitions(inputTopicRepartitionedNumOfPartitions);
    final KStream<Integer, String> topicBStream = builder.stream(topicB, Consumed.with(Serdes.Integer(), Serdes.String()));
    builder.stream(inputTopic, Consumed.with(Serdes.Integer(), Serdes.String())).repartition(inputTopicRepartitioned).join(topicBStream, (value1, value2) -> value2, JoinWindows.of(Duration.ofSeconds(10))).to(outputTopic);
    builder.build(streamsConfiguration);
    startStreams(builder, REBALANCING, ERROR, (t, e) -> expectedThrowable.set(e));
    final String expectedMsg = String.format("Number of partitions [%s] of repartition topic [%s] " + "doesn't match number of partitions [%s] of the source topic.", inputTopicRepartitionedNumOfPartitions, toRepartitionTopicName(inputTopicRepartitionName), topicBNumberOfPartitions);
    assertNotNull(expectedThrowable.get());
    assertTrue(expectedThrowable.get().getMessage().contains(expectedMsg));
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Arrays(java.util.Arrays) Repartitioned(org.apache.kafka.streams.kstream.Repartitioned) AdminClient(org.apache.kafka.clients.admin.AdminClient) Matcher(java.util.regex.Matcher) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) IntegrationTestUtils.safeUniqueTestName(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.safeUniqueTestName) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) After(org.junit.After) Duration(java.time.Duration) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Parameterized(org.junit.runners.Parameterized) AfterClass(org.junit.AfterClass) TestUtils(org.apache.kafka.test.TestUtils) Collection(java.util.Collection) KeyValue(org.apache.kafka.streams.KeyValue) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) Set(java.util.Set) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) State(org.apache.kafka.streams.KafkaStreams.State) Category(org.junit.experimental.categories.Category) Objects(java.util.Objects) IntegrationTestUtils(org.apache.kafka.streams.integration.utils.IntegrationTestUtils) CountDownLatch(java.util.concurrent.CountDownLatch) List(java.util.List) Pattern(java.util.regex.Pattern) ERROR(org.apache.kafka.streams.KafkaStreams.State.ERROR) StreamsConfig(org.apache.kafka.streams.StreamsConfig) BeforeClass(org.junit.BeforeClass) RunWith(org.junit.runner.RunWith) Parameters(org.junit.runners.Parameterized.Parameters) IntegrationTest(org.apache.kafka.test.IntegrationTest) KStream(org.apache.kafka.streams.kstream.KStream) AtomicReference(java.util.concurrent.atomic.AtomicReference) ArrayList(java.util.ArrayList) JoinWindows(org.apache.kafka.streams.kstream.JoinWindows) EmbeddedKafkaCluster(org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster) TestName(org.junit.rules.TestName) Named(org.apache.kafka.streams.kstream.Named) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) TopicDescription(org.apache.kafka.clients.admin.TopicDescription) Deserializer(org.apache.kafka.common.serialization.Deserializer) Before(org.junit.Before) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Parameter(org.junit.runners.Parameterized.Parameter) Assert.assertNotNull(org.junit.Assert.assertNotNull) AdminClientConfig(org.apache.kafka.clients.admin.AdminClientConfig) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) IOException(java.io.IOException) TimeUnit(java.util.concurrent.TimeUnit) RUNNING(org.apache.kafka.streams.KafkaStreams.State.RUNNING) Rule(org.junit.Rule) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) KafkaStreams(org.apache.kafka.streams.KafkaStreams) REBALANCING(org.apache.kafka.streams.KafkaStreams.State.REBALANCING) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) AtomicReference(java.util.concurrent.atomic.AtomicReference) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Aggregations

Duration (java.time.Duration)7 Properties (java.util.Properties)7 IntegerDeserializer (org.apache.kafka.common.serialization.IntegerDeserializer)7 IntegerSerializer (org.apache.kafka.common.serialization.IntegerSerializer)7 Serdes (org.apache.kafka.common.serialization.Serdes)7 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)7 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)7 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)7 StreamsConfig (org.apache.kafka.streams.StreamsConfig)7 Consumed (org.apache.kafka.streams.kstream.Consumed)7 JoinWindows (org.apache.kafka.streams.kstream.JoinWindows)7 IOException (java.io.IOException)6 ArrayList (java.util.ArrayList)6 Arrays (java.util.Arrays)6 Collection (java.util.Collection)6 Collections (java.util.Collections)6 List (java.util.List)6 Objects (java.util.Objects)6 Set (java.util.Set)6 CountDownLatch (java.util.concurrent.CountDownLatch)6