use of com.linkedin.pinot.core.realtime.impl.kafka.SimpleConsumerWrapper in project pinot by linkedin.
the class SimpleConsumerWrapperTest method testFetchOffsets.
@Test(enabled = false)
public void testFetchOffsets() throws Exception {
MockKafkaSimpleConsumerFactory simpleConsumerFactory = new MockKafkaSimpleConsumerFactory(new String[] { "abcd", "bcde" }, new int[] { 1234, 2345 }, new long[] { 12345L, 23456L }, new long[] { 23456L, 34567L }, new int[] { 0, 1 }, "theTopic");
SimpleConsumerWrapper consumerWrapper = SimpleConsumerWrapper.forPartitionConsumption(simpleConsumerFactory, "abcd:1234,bcde:2345", "clientId", "theTopic", 0, 10000L);
consumerWrapper.fetchPartitionOffset("smallest", 10000);
}
use of com.linkedin.pinot.core.realtime.impl.kafka.SimpleConsumerWrapper in project pinot by linkedin.
the class PinotLLCRealtimeSegmentManager method getPartitionOffset.
private long getPartitionOffset(final String topicName, final String bootstrapHosts, final String offsetCriteria, int partitionId) {
SimpleConsumerWrapper kafkaConsumer = SimpleConsumerWrapper.forPartitionConsumption(new KafkaSimpleConsumerFactoryImpl(), bootstrapHosts, "dummyClientId", topicName, partitionId, KAFKA_PARTITION_OFFSET_FETCH_TIMEOUT_MILLIS);
final long startOffset;
try {
startOffset = kafkaConsumer.fetchPartitionOffset(offsetCriteria, KAFKA_PARTITION_OFFSET_FETCH_TIMEOUT_MILLIS);
} catch (TimeoutException e) {
LOGGER.warn("Timed out when fetching partition offsets for topic {} partition {}", topicName, partitionId);
throw new RuntimeException(e);
} finally {
IOUtils.closeQuietly(kafkaConsumer);
}
return startOffset;
}
use of com.linkedin.pinot.core.realtime.impl.kafka.SimpleConsumerWrapper in project pinot by linkedin.
the class PinotTableIdealStateBuilder method getPartitionCount.
public static int getPartitionCount(KafkaStreamMetadata kafkaMetadata) {
String bootstrapHosts = kafkaMetadata.getBootstrapHosts();
if (bootstrapHosts == null || bootstrapHosts.isEmpty()) {
throw new RuntimeException("Invalid value for " + Helix.DataSource.Realtime.Kafka.KAFKA_BROKER_LIST);
}
SimpleConsumerWrapper consumerWrapper = SimpleConsumerWrapper.forMetadataConsumption(new KafkaSimpleConsumerFactoryImpl(), kafkaMetadata.getBootstrapHosts(), PinotTableIdealStateBuilder.class.getSimpleName() + "-" + kafkaMetadata.getKafkaTopicName(), KAFKA_CONNECTION_TIMEOUT_MILLIS);
try {
return consumerWrapper.getPartitionCount(kafkaMetadata.getKafkaTopicName(), /*maxWaitTimeMs=*/
5000L);
} finally {
IOUtils.closeQuietly(consumerWrapper);
}
}
use of com.linkedin.pinot.core.realtime.impl.kafka.SimpleConsumerWrapper in project pinot by linkedin.
the class SimpleConsumerWrapperTest method testFetchMessages.
@Test
public void testFetchMessages() throws Exception {
MockKafkaSimpleConsumerFactory simpleConsumerFactory = new MockKafkaSimpleConsumerFactory(new String[] { "abcd", "bcde" }, new int[] { 1234, 2345 }, new long[] { 12345L, 23456L }, new long[] { 23456L, 34567L }, new int[] { 0, 1 }, "theTopic");
SimpleConsumerWrapper consumerWrapper = SimpleConsumerWrapper.forPartitionConsumption(simpleConsumerFactory, "abcd:1234,bcde:2345", "clientId", "theTopic", 0, 10000L);
consumerWrapper.fetchMessages(12345L, 23456L, 10000);
}
use of com.linkedin.pinot.core.realtime.impl.kafka.SimpleConsumerWrapper in project pinot by linkedin.
the class SimpleConsumerWrapperTest method testGetPartitionCount.
@Test
public void testGetPartitionCount() {
MockKafkaSimpleConsumerFactory simpleConsumerFactory = new MockKafkaSimpleConsumerFactory(new String[] { "abcd", "bcde" }, new int[] { 1234, 2345 }, new long[] { 12345L, 23456L }, new long[] { 23456L, 34567L }, new int[] { 0, 1 }, "theTopic");
SimpleConsumerWrapper consumerWrapper = SimpleConsumerWrapper.forMetadataConsumption(simpleConsumerFactory, "abcd:1234,bcde:2345", "clientId", 10000L);
assertEquals(consumerWrapper.getPartitionCount("theTopic", 10000L), 2);
}
Aggregations