use of org.apache.kafka.clients.consumer.KafkaConsumer in project kafka by apache.
the class BrokerCompatibilityTest method loopUntilRecordReceived.
private static void loopUntilRecordReceived(final String kafka) {
final Properties consumerProperties = new Properties();
consumerProperties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka);
consumerProperties.put(ConsumerConfig.GROUP_ID_CONFIG, "broker-compatibility-consumer");
consumerProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
consumerProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
consumerProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
final KafkaConsumer<String, String> consumer = new KafkaConsumer<>(consumerProperties);
consumer.subscribe(Collections.singletonList(SINK_TOPIC));
while (true) {
ConsumerRecords<String, String> records = consumer.poll(100);
for (ConsumerRecord<String, String> record : records) {
if (record.key().equals("key") && record.value().equals("value")) {
consumer.close();
return;
}
}
}
}
use of org.apache.kafka.clients.consumer.KafkaConsumer in project kafka by apache.
the class SmokeTestDriver method verify.
public static void verify(String kafka, Map<String, Set<Integer>> allData, int maxRecordsPerKey) {
Properties props = new Properties();
props.put(ConsumerConfig.CLIENT_ID_CONFIG, "verifier");
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
KafkaConsumer<byte[], byte[]> consumer = new KafkaConsumer<>(props);
List<TopicPartition> partitions = getAllPartitions(consumer, "echo", "max", "min", "dif", "sum", "cnt", "avg", "wcnt", "tagg");
consumer.assign(partitions);
consumer.seekToBeginning(partitions);
final int recordsGenerated = allData.size() * maxRecordsPerKey;
int recordsProcessed = 0;
HashMap<String, Integer> max = new HashMap<>();
HashMap<String, Integer> min = new HashMap<>();
HashMap<String, Integer> dif = new HashMap<>();
HashMap<String, Long> sum = new HashMap<>();
HashMap<String, Long> cnt = new HashMap<>();
HashMap<String, Double> avg = new HashMap<>();
HashMap<String, Long> wcnt = new HashMap<>();
HashMap<String, Long> tagg = new HashMap<>();
HashSet<String> keys = new HashSet<>();
HashMap<String, Set<Integer>> received = new HashMap<>();
for (String key : allData.keySet()) {
keys.add(key);
received.put(key, new HashSet<Integer>());
}
int retry = 0;
final long start = System.currentTimeMillis();
while (System.currentTimeMillis() - start < TimeUnit.MINUTES.toMillis(3)) {
ConsumerRecords<byte[], byte[]> records = consumer.poll(500);
if (records.isEmpty() && recordsProcessed >= recordsGenerated) {
if (verifyMin(min, allData, false) && verifyMax(max, allData, false) && verifyDif(dif, allData, false) && verifySum(sum, allData, false) && verifyCnt(cnt, allData, false) && verifyAvg(avg, allData, false) && verifyTAgg(tagg, allData, false)) {
break;
}
if (retry++ > MAX_RECORD_EMPTY_RETRIES) {
break;
}
} else {
for (ConsumerRecord<byte[], byte[]> record : records) {
String key = stringSerde.deserializer().deserialize("", record.key());
switch(record.topic()) {
case "echo":
Integer value = intSerde.deserializer().deserialize("", record.value());
recordsProcessed++;
if (recordsProcessed % 100 == 0) {
System.out.println("Echo records processed = " + recordsProcessed);
}
received.get(key).add(value);
break;
case "min":
min.put(key, intSerde.deserializer().deserialize("", record.value()));
break;
case "max":
max.put(key, intSerde.deserializer().deserialize("", record.value()));
break;
case "dif":
dif.put(key, intSerde.deserializer().deserialize("", record.value()));
break;
case "sum":
sum.put(key, longSerde.deserializer().deserialize("", record.value()));
break;
case "cnt":
cnt.put(key, longSerde.deserializer().deserialize("", record.value()));
break;
case "avg":
avg.put(key, doubleSerde.deserializer().deserialize("", record.value()));
break;
case "wcnt":
wcnt.put(key, longSerde.deserializer().deserialize("", record.value()));
break;
case "tagg":
tagg.put(key, longSerde.deserializer().deserialize("", record.value()));
break;
default:
System.out.println("unknown topic: " + record.topic());
}
}
}
}
consumer.close();
final long finished = System.currentTimeMillis() - start;
System.out.println("Verification time=" + finished);
System.out.println("-------------------");
System.out.println("Result Verification");
System.out.println("-------------------");
System.out.println("recordGenerated=" + recordsGenerated);
System.out.println("recordProcessed=" + recordsProcessed);
if (recordsProcessed > recordsGenerated) {
System.out.println("PROCESSED-MORE-THAN-GENERATED");
} else if (recordsProcessed < recordsGenerated) {
System.out.println("PROCESSED-LESS-THAN-GENERATED");
}
boolean success;
success = allData.equals(received);
if (success) {
System.out.println("ALL-RECORDS-DELIVERED");
} else {
int missedCount = 0;
for (Map.Entry<String, Set<Integer>> entry : allData.entrySet()) {
missedCount += received.get(entry.getKey()).size();
}
System.out.println("missedRecords=" + missedCount);
}
success &= verifyMin(min, allData, true);
success &= verifyMax(max, allData, true);
success &= verifyDif(dif, allData, true);
success &= verifySum(sum, allData, true);
success &= verifyCnt(cnt, allData, true);
success &= verifyAvg(avg, allData, true);
success &= verifyTAgg(tagg, allData, true);
System.out.println(success ? "SUCCESS" : "FAILURE");
}
use of org.apache.kafka.clients.consumer.KafkaConsumer in project kafka by apache.
the class SimpleBenchmark method consume.
public void consume(String topic) throws Exception {
if (maybeSetupPhase(topic, "simple-benchmark-consumer-load", true)) {
return;
}
Properties props = setProduceConsumeProperties("simple-benchmark-consumer");
KafkaConsumer<Integer, byte[]> consumer = new KafkaConsumer<>(props);
List<TopicPartition> partitions = getAllPartitions(consumer, topic);
consumer.assign(partitions);
consumer.seekToBeginning(partitions);
Integer key = null;
long startTime = System.currentTimeMillis();
while (true) {
ConsumerRecords<Integer, byte[]> records = consumer.poll(POLL_MS);
if (records.isEmpty()) {
if (processedRecords == numRecords)
break;
} else {
for (ConsumerRecord<Integer, byte[]> record : records) {
processedRecords++;
processedBytes += record.value().length + Integer.SIZE;
Integer recKey = record.key();
if (key == null || key < recKey)
key = recKey;
if (processedRecords == numRecords)
break;
}
}
if (processedRecords == numRecords)
break;
}
long endTime = System.currentTimeMillis();
consumer.close();
printResults("Consumer Performance [records/latency/rec-sec/MB-sec read]: ", endTime - startTime);
}
use of org.apache.kafka.clients.consumer.KafkaConsumer in project druid by druid-io.
the class KafkaIndexTask method newConsumer.
private KafkaConsumer<byte[], byte[]> newConsumer() {
ClassLoader currCtxCl = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
final Properties props = new Properties();
for (Map.Entry<String, String> entry : ioConfig.getConsumerProperties().entrySet()) {
props.setProperty(entry.getKey(), entry.getValue());
}
props.setProperty("enable.auto.commit", "false");
props.setProperty("auto.offset.reset", "none");
props.setProperty("key.deserializer", ByteArrayDeserializer.class.getName());
props.setProperty("value.deserializer", ByteArrayDeserializer.class.getName());
return new KafkaConsumer<>(props);
} finally {
Thread.currentThread().setContextClassLoader(currCtxCl);
}
}
use of org.apache.kafka.clients.consumer.KafkaConsumer in project druid by druid-io.
the class KafkaSupervisor method getKafkaConsumer.
private KafkaConsumer<byte[], byte[]> getKafkaConsumer() {
final Properties props = new Properties();
props.setProperty("metadata.max.age.ms", "10000");
props.setProperty("group.id", String.format("kafka-supervisor-%s", getRandomId()));
props.putAll(ioConfig.getConsumerProperties());
props.setProperty("enable.auto.commit", "false");
ClassLoader currCtxCl = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
return new KafkaConsumer<>(props, new ByteArrayDeserializer(), new ByteArrayDeserializer());
} finally {
Thread.currentThread().setContextClassLoader(currCtxCl);
}
}
Aggregations