use of kafka.consumer.KafkaStream in project voltdb by VoltDB.
the class KafkaLoader method getConsumerExecutor.
private ExecutorService getConsumerExecutor(KafkaConsumerConnector consumer, CSVDataLoader loader) throws Exception {
Map<String, Integer> topicCountMap = new HashMap<>();
// generate as many threads as there are partitions defined in kafka config
ExecutorService executor = Executors.newFixedThreadPool(m_config.kpartitions);
topicCountMap.put(m_config.topic, m_config.kpartitions);
Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.m_consumer.createMessageStreams(topicCountMap);
List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(m_config.topic);
// now launch all the threads for partitions.
for (final KafkaStream stream : streams) {
KafkaConsumer bconsumer = new KafkaConsumer(stream, loader, m_config);
executor.submit(bconsumer);
}
return executor;
}
use of kafka.consumer.KafkaStream in project incubator-atlas by apache.
the class KafkaNotification method createConsumers.
@VisibleForTesting
public <T> List<NotificationConsumer<T>> createConsumers(NotificationType notificationType, int numConsumers, boolean autoCommitEnabled) {
String topic = TOPIC_MAP.get(notificationType);
Properties consumerProperties = getConsumerProperties(notificationType);
List<NotificationConsumer<T>> consumers = new ArrayList<>(numConsumers);
for (int i = 0; i < numConsumers; i++) {
ConsumerConnector consumerConnector = createConsumerConnector(consumerProperties);
Map<String, Integer> topicCountMap = new HashMap<>();
topicCountMap.put(topic, 1);
StringDecoder decoder = new StringDecoder(null);
Map<String, List<KafkaStream<String, String>>> streamsMap = consumerConnector.createMessageStreams(topicCountMap, decoder, decoder);
List<KafkaStream<String, String>> kafkaConsumers = streamsMap.get(topic);
for (KafkaStream stream : kafkaConsumers) {
KafkaConsumer<T> kafkaConsumer = createKafkaConsumer(notificationType.getClassType(), notificationType.getDeserializer(), stream, i, consumerConnector, autoCommitEnabled);
consumers.add(kafkaConsumer);
}
consumerConnectors.add(consumerConnector);
}
return consumers;
}
use of kafka.consumer.KafkaStream in project incubator-atlas by apache.
the class KafkaNotificationMockTest method testCreateConsumers.
@Test
@SuppressWarnings("unchecked")
public void testCreateConsumers() throws Exception {
Properties properties = mock(Properties.class);
when(properties.getProperty("entities.group.id")).thenReturn("atlas");
final ConsumerConnector consumerConnector = mock(ConsumerConnector.class);
Map<String, Integer> topicCountMap = new HashMap<>();
topicCountMap.put(KafkaNotification.ATLAS_ENTITIES_TOPIC, 1);
Map<String, List<KafkaStream<String, String>>> kafkaStreamsMap = new HashMap<>();
List<KafkaStream<String, String>> kafkaStreams = new ArrayList<>();
KafkaStream kafkaStream = mock(KafkaStream.class);
kafkaStreams.add(kafkaStream);
kafkaStreamsMap.put(KafkaNotification.ATLAS_ENTITIES_TOPIC, kafkaStreams);
when(consumerConnector.createMessageStreams(eq(topicCountMap), any(StringDecoder.class), any(StringDecoder.class))).thenReturn(kafkaStreamsMap);
final KafkaConsumer consumer1 = mock(KafkaConsumer.class);
final KafkaConsumer consumer2 = mock(KafkaConsumer.class);
KafkaNotification kafkaNotification = new TestKafkaNotification(properties, consumerConnector, consumer1, consumer2);
List<NotificationConsumer<String>> consumers = kafkaNotification.createConsumers(NotificationInterface.NotificationType.ENTITIES, 2);
verify(consumerConnector, times(2)).createMessageStreams(eq(topicCountMap), any(StringDecoder.class), any(StringDecoder.class));
assertEquals(consumers.size(), 2);
assertTrue(consumers.contains(consumer1));
assertTrue(consumers.contains(consumer2));
}
Aggregations