use of kafka.javaapi.consumer.ConsumerConnector in project druid by druid-io.
the class KafkaLookupExtractorFactoryTest method testStartStop.
@Test
public void testStartStop() {
final KafkaStream<String, String> kafkaStream = PowerMock.createStrictMock(KafkaStream.class);
final ConsumerIterator<String, String> consumerIterator = PowerMock.createStrictMock(ConsumerIterator.class);
final ConsumerConnector consumerConnector = PowerMock.createStrictMock(ConsumerConnector.class);
EasyMock.expect(consumerConnector.createMessageStreamsByFilter(EasyMock.anyObject(TopicFilter.class), EasyMock.anyInt(), EasyMock.eq(DEFAULT_STRING_DECODER), EasyMock.eq(DEFAULT_STRING_DECODER))).andReturn(ImmutableList.of(kafkaStream)).once();
EasyMock.expect(kafkaStream.iterator()).andReturn(consumerIterator).anyTimes();
EasyMock.expect(consumerIterator.hasNext()).andAnswer(getBlockingAnswer()).anyTimes();
EasyMock.expect(cacheManager.createCache()).andReturn(cacheHandler).once();
EasyMock.expect(cacheHandler.getCache()).andReturn(new ConcurrentHashMap<String, String>()).once();
cacheHandler.close();
EasyMock.expectLastCall();
final AtomicBoolean threadWasInterrupted = new AtomicBoolean(false);
consumerConnector.shutdown();
EasyMock.expectLastCall().andAnswer(new IAnswer<Object>() {
@Override
public Object answer() throws Throwable {
threadWasInterrupted.set(Thread.currentThread().isInterrupted());
return null;
}
}).times(2);
PowerMock.replay(cacheManager, cacheHandler, kafkaStream, consumerConnector, consumerIterator);
final KafkaLookupExtractorFactory factory = new KafkaLookupExtractorFactory(cacheManager, TOPIC, ImmutableMap.of("zookeeper.connect", "localhost"), 10_000L, false) {
@Override
ConsumerConnector buildConnector(Properties properties) {
return consumerConnector;
}
};
Assert.assertTrue(factory.start());
Assert.assertTrue(factory.close());
Assert.assertTrue(factory.getFuture().isDone());
Assert.assertFalse(threadWasInterrupted.get());
PowerMock.verify(cacheManager, cacheHandler);
}
use of kafka.javaapi.consumer.ConsumerConnector in project druid by druid-io.
the class KafkaLookupExtractorFactoryTest method testStartStopStart.
@Test
public void testStartStopStart() {
final KafkaStream<String, String> kafkaStream = PowerMock.createStrictMock(KafkaStream.class);
final ConsumerIterator<String, String> consumerIterator = PowerMock.createStrictMock(ConsumerIterator.class);
final ConsumerConnector consumerConnector = PowerMock.createStrictMock(ConsumerConnector.class);
EasyMock.expect(consumerConnector.createMessageStreamsByFilter(EasyMock.anyObject(TopicFilter.class), EasyMock.anyInt(), EasyMock.eq(DEFAULT_STRING_DECODER), EasyMock.eq(DEFAULT_STRING_DECODER))).andReturn(ImmutableList.of(kafkaStream)).once();
EasyMock.expect(kafkaStream.iterator()).andReturn(consumerIterator).anyTimes();
EasyMock.expect(consumerIterator.hasNext()).andAnswer(getBlockingAnswer()).anyTimes();
EasyMock.expect(cacheManager.createCache()).andReturn(cacheHandler).once();
EasyMock.expect(cacheHandler.getCache()).andReturn(new ConcurrentHashMap<String, String>()).once();
cacheHandler.close();
EasyMock.expectLastCall().once();
consumerConnector.shutdown();
EasyMock.expectLastCall().times(2);
PowerMock.replay(cacheManager, cacheHandler, kafkaStream, consumerConnector, consumerIterator);
final KafkaLookupExtractorFactory factory = new KafkaLookupExtractorFactory(cacheManager, TOPIC, ImmutableMap.of("zookeeper.connect", "localhost")) {
@Override
ConsumerConnector buildConnector(Properties properties) {
return consumerConnector;
}
};
Assert.assertTrue(factory.start());
Assert.assertTrue(factory.close());
Assert.assertFalse(factory.start());
PowerMock.verify(cacheManager, cacheHandler);
}
use of kafka.javaapi.consumer.ConsumerConnector in project druid by druid-io.
the class KafkaLookupExtractorFactoryTest method testStartStartStop.
@Test
public void testStartStartStop() {
final KafkaStream<String, String> kafkaStream = PowerMock.createStrictMock(KafkaStream.class);
final ConsumerIterator<String, String> consumerIterator = PowerMock.createStrictMock(ConsumerIterator.class);
final ConsumerConnector consumerConnector = PowerMock.createStrictMock(ConsumerConnector.class);
EasyMock.expect(consumerConnector.createMessageStreamsByFilter(EasyMock.anyObject(TopicFilter.class), EasyMock.anyInt(), EasyMock.eq(DEFAULT_STRING_DECODER), EasyMock.eq(DEFAULT_STRING_DECODER))).andReturn(ImmutableList.of(kafkaStream)).once();
EasyMock.expect(kafkaStream.iterator()).andReturn(consumerIterator).anyTimes();
EasyMock.expect(consumerIterator.hasNext()).andAnswer(getBlockingAnswer()).anyTimes();
EasyMock.expect(cacheManager.createCache()).andReturn(cacheHandler).once();
EasyMock.expect(cacheHandler.getCache()).andReturn(new ConcurrentHashMap<String, String>()).once();
cacheHandler.close();
EasyMock.expectLastCall().once();
consumerConnector.shutdown();
EasyMock.expectLastCall().times(3);
PowerMock.replay(cacheManager, cacheHandler, kafkaStream, consumerConnector, consumerIterator);
final KafkaLookupExtractorFactory factory = new KafkaLookupExtractorFactory(cacheManager, TOPIC, ImmutableMap.of("zookeeper.connect", "localhost"), 10_000L, false) {
@Override
ConsumerConnector buildConnector(Properties properties) {
return consumerConnector;
}
};
Assert.assertTrue(factory.start());
Assert.assertTrue(factory.start());
Assert.assertTrue(factory.close());
Assert.assertTrue(factory.close());
PowerMock.verify(cacheManager, cacheHandler);
}
use of kafka.javaapi.consumer.ConsumerConnector in project pinot by linkedin.
the class KafkaConsumerManager method acquireConsumerAndIteratorForConfig.
public static ConsumerAndIterator acquireConsumerAndIteratorForConfig(KafkaHighLevelStreamProviderConfig config) {
final ImmutableTriple<String, String, String> configKey = new ImmutableTriple<>(config.getTopicName(), config.getGroupId(), config.getZkString());
synchronized (KafkaConsumerManager.class) {
// If we have the consumer and it's not already acquired, return it, otherwise error out if it's already acquired
if (CONSUMER_AND_ITERATOR_FOR_CONFIG_KEY.containsKey(configKey)) {
ConsumerAndIterator consumerAndIterator = CONSUMER_AND_ITERATOR_FOR_CONFIG_KEY.get(configKey);
if (CONSUMER_RELEASE_TIME.get(consumerAndIterator).equals(IN_USE)) {
throw new RuntimeException("Consumer/iterator " + consumerAndIterator.getId() + " already in use!");
} else {
LOGGER.info("Reusing kafka consumer/iterator with id {}", consumerAndIterator.getId());
CONSUMER_RELEASE_TIME.put(consumerAndIterator, IN_USE);
return consumerAndIterator;
}
}
LOGGER.info("Creating new kafka consumer and iterator for topic {}", config.getTopicName());
// Create the consumer
ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(config.getKafkaConsumerConfig());
// Create the iterator (can only be done once per consumer)
ConsumerIterator<byte[], byte[]> iterator = consumer.createMessageStreams(config.getTopicMap(1)).get(config.getTopicName()).get(0).iterator();
// Mark both the consumer and iterator as acquired
ConsumerAndIterator consumerAndIterator = new ConsumerAndIterator(consumer, iterator);
CONSUMER_AND_ITERATOR_FOR_CONFIG_KEY.put(configKey, consumerAndIterator);
CONSUMER_RELEASE_TIME.put(consumerAndIterator, IN_USE);
LOGGER.info("Created consumer/iterator with id {} for topic {}", consumerAndIterator.getId(), config.getTopicName());
return consumerAndIterator;
}
}
use of kafka.javaapi.consumer.ConsumerConnector in project flink by apache.
the class KafkaConsumerTestBase method readTopicToList.
// ------------------------------------------------------------------------
// Debugging utilities
// ------------------------------------------------------------------------
/**
* Read topic to list, only using Kafka code.
*/
private static List<MessageAndMetadata<byte[], byte[]>> readTopicToList(String topicName, ConsumerConfig config, final int stopAfter) {
ConsumerConnector consumerConnector = Consumer.createJavaConsumerConnector(config);
// we request only one stream per consumer instance. Kafka will make sure that each consumer group
// will see each message only once.
Map<String, Integer> topicCountMap = Collections.singletonMap(topicName, 1);
Map<String, List<KafkaStream<byte[], byte[]>>> streams = consumerConnector.createMessageStreams(topicCountMap);
if (streams.size() != 1) {
throw new RuntimeException("Expected only one message stream but got " + streams.size());
}
List<KafkaStream<byte[], byte[]>> kafkaStreams = streams.get(topicName);
if (kafkaStreams == null) {
throw new RuntimeException("Requested stream not available. Available streams: " + streams.toString());
}
if (kafkaStreams.size() != 1) {
throw new RuntimeException("Requested 1 stream from Kafka, bot got " + kafkaStreams.size() + " streams");
}
LOG.info("Opening Consumer instance for topic '{}' on group '{}'", topicName, config.groupId());
ConsumerIterator<byte[], byte[]> iteratorToRead = kafkaStreams.get(0).iterator();
List<MessageAndMetadata<byte[], byte[]>> result = new ArrayList<>();
int read = 0;
while (iteratorToRead.hasNext()) {
read++;
result.add(iteratorToRead.next());
if (read == stopAfter) {
LOG.info("Read " + read + " elements");
return result;
}
}
return result;
}
Aggregations