use of org.apache.storm.kafka.spout.subscription.TopicFilter in project storm by apache.
the class KafkaTridentSpoutOpaqueCoordinatorTest method testCanGetPartitions.
@Test
public void testCanGetPartitions() {
KafkaConsumer<String, String> mockConsumer = mock(KafkaConsumer.class);
TopicPartition expectedPartition = new TopicPartition("test", 0);
TopicFilter mockFilter = mock(TopicFilter.class);
when(mockFilter.getAllSubscribedPartitions(any())).thenReturn(Collections.singleton(expectedPartition));
KafkaTridentSpoutConfig<String, String> spoutConfig = SingleTopicKafkaTridentSpoutConfiguration.createKafkaSpoutConfigBuilder(mockFilter, mock(ManualPartitioner.class), -1).build();
KafkaTridentSpoutCoordinator<String, String> coordinator = new KafkaTridentSpoutCoordinator<>(spoutConfig, ignored -> mockConsumer);
List<Map<String, Object>> partitionsForBatch = coordinator.getPartitionsForBatch();
List<TopicPartition> tps = deserializePartitions(partitionsForBatch);
verify(mockFilter).getAllSubscribedPartitions(mockConsumer);
assertThat(tps, contains(expectedPartition));
}
use of org.apache.storm.kafka.spout.subscription.TopicFilter in project storm by apache.
the class KafkaTridentSpoutOpaqueCoordinatorTest method testCanUpdatePartitions.
@Test
public void testCanUpdatePartitions() {
try (SimulatedTime time = new SimulatedTime()) {
KafkaConsumer<String, String> mockConsumer = mock(KafkaConsumer.class);
TopicPartition expectedPartition = new TopicPartition("test", 0);
TopicPartition addedLaterPartition = new TopicPartition("test-2", 0);
HashSet<TopicPartition> allPartitions = new HashSet<>();
allPartitions.add(expectedPartition);
allPartitions.add(addedLaterPartition);
TopicFilter mockFilter = mock(TopicFilter.class);
when(mockFilter.getAllSubscribedPartitions(any())).thenReturn(Collections.singleton(expectedPartition)).thenReturn(allPartitions);
KafkaTridentSpoutConfig<String, String> spoutConfig = SingleTopicKafkaTridentSpoutConfiguration.createKafkaSpoutConfigBuilder(mockFilter, mock(ManualPartitioner.class), -1).build();
KafkaTridentSpoutCoordinator<String, String> coordinator = new KafkaTridentSpoutCoordinator<>(spoutConfig, ignored -> mockConsumer);
List<Map<String, Object>> partitionsForBatch = coordinator.getPartitionsForBatch();
List<TopicPartition> firstBatchTps = deserializePartitions(partitionsForBatch);
verify(mockFilter).getAllSubscribedPartitions(mockConsumer);
assertThat(firstBatchTps, contains(expectedPartition));
Time.advanceTime(KafkaTridentSpoutCoordinator.TIMER_DELAY_MS + spoutConfig.getPartitionRefreshPeriodMs());
List<Map<String, Object>> partitionsForSecondBatch = coordinator.getPartitionsForBatch();
List<TopicPartition> secondBatchTps = deserializePartitions(partitionsForSecondBatch);
verify(mockFilter, times(2)).getAllSubscribedPartitions(mockConsumer);
assertThat(new HashSet<>(secondBatchTps), is(allPartitions));
}
}
use of org.apache.storm.kafka.spout.subscription.TopicFilter in project storm by apache.
the class SpoutWithMockedConsumerSetupHelper method setupSpout.
/**
* Creates, opens and activates a KafkaSpout using a mocked consumer. The TopicFilter and ManualPartitioner should be mock objects,
* since this method shortcircuits the TopicPartition assignment process and just calls onPartitionsAssigned on the rebalance listener.
*
* @param <K> The Kafka key type
* @param <V> The Kafka value type
* @param spoutConfig The spout config to use
* @param topoConf The topo conf to pass to the spout
* @param contextMock The topo context to pass to the spout
* @param collectorMock The mocked collector to pass to the spout
* @param consumerMock The mocked consumer
* @param assignedPartitions The partitions to assign to this spout. The consumer will act like these partitions are assigned to it.
* @return The spout
*/
public static <K, V> KafkaSpout<K, V> setupSpout(KafkaSpoutConfig<K, V> spoutConfig, Map<String, Object> topoConf, TopologyContext contextMock, SpoutOutputCollector collectorMock, KafkaConsumer<K, V> consumerMock, TopicPartition... assignedPartitions) {
TopicFilter topicFilter = spoutConfig.getTopicFilter();
ManualPartitioner topicPartitioner = spoutConfig.getTopicPartitioner();
if (!mockingDetails(topicFilter).isMock() || !mockingDetails(topicPartitioner).isMock()) {
throw new IllegalStateException("Use a mocked TopicFilter and a mocked ManualPartitioner when using this method, it helps avoid complex stubbing");
}
Set<TopicPartition> assignedPartitionsSet = new HashSet<>(Arrays.asList(assignedPartitions));
TopicAssigner assigner = mock(TopicAssigner.class);
doAnswer(invocation -> {
ConsumerRebalanceListener listener = invocation.getArgument(2);
listener.onPartitionsAssigned(assignedPartitionsSet);
return null;
}).when(assigner).assignPartitions(any(), any(), any());
when(consumerMock.assignment()).thenReturn(assignedPartitionsSet);
ConsumerFactory<K, V> consumerFactory = (kafkaSpoutConfig) -> consumerMock;
KafkaSpout<K, V> spout = new KafkaSpout<>(spoutConfig, consumerFactory, assigner);
spout.open(topoConf, contextMock, collectorMock);
spout.activate();
return spout;
}
Aggregations