use of kafka.javaapi.message.ByteBufferMessageSet in project storm by apache.
the class TridentKafkaEmitter method fetchMessages.
private ByteBufferMessageSet fetchMessages(SimpleConsumer consumer, Partition partition, long offset) {
long start = System.currentTimeMillis();
ByteBufferMessageSet msgs = null;
msgs = KafkaUtils.fetchMessages(_config, consumer, partition, offset);
long millis = System.currentTimeMillis() - start;
_kafkaMeanFetchLatencyMetric.update(millis);
_kafkaMaxFetchLatencyMetric.update(millis);
return msgs;
}
use of kafka.javaapi.message.ByteBufferMessageSet in project storm by apache.
the class KafkaBoltTest method executeWithByteArrayKeyAndMessageFire.
/* test with fireAndForget option enabled */
@Test
public void executeWithByteArrayKeyAndMessageFire() {
boolean async = true;
boolean fireAndForget = true;
bolt = generateDefaultSerializerBolt(async, fireAndForget, null);
String keyString = "test-key";
String messageString = "test-message";
byte[] key = keyString.getBytes();
byte[] message = messageString.getBytes();
Tuple tuple = generateTestTuple(key, message);
final ByteBufferMessageSet mockMsg = mockSingleMessage(key, message);
simpleConsumer.close();
simpleConsumer = mockSimpleConsumer(mockMsg);
KafkaProducer<?, ?> producer = mock(KafkaProducer.class);
// do not invoke the callback of send() in order to test whether the bolt handle the fireAndForget option
// properly.
doReturn(mock(Future.class)).when(producer).send(any(ProducerRecord.class), any(Callback.class));
bolt.execute(tuple);
verify(collector).ack(tuple);
verifyMessage(keyString, messageString);
}
use of kafka.javaapi.message.ByteBufferMessageSet in project storm by apache.
the class KafkaUtilsTest method generateTuplesWithKeyAndKeyValueScheme.
@Test
public void generateTuplesWithKeyAndKeyValueScheme() {
config.scheme = new KeyValueSchemeAsMultiScheme(new StringKeyValueScheme());
config.useStartOffsetTimeIfOffsetOutOfRange = false;
String value = "value";
String key = "key";
createTopicAndSendMessage(key, value);
ByteBufferMessageSet messageAndOffsets = getLastMessage();
for (MessageAndOffset msg : messageAndOffsets) {
Iterable<List<Object>> lists = KafkaUtils.generateTuples(config, msg.message(), config.topic);
assertEquals(ImmutableMap.of(key, value), lists.iterator().next().get(0));
}
}
use of kafka.javaapi.message.ByteBufferMessageSet in project storm by apache.
the class KafkaUtilsTest method fetchMessage.
@Test
public void fetchMessage() throws Exception {
String value = "test";
createTopicAndSendMessage(value);
long offset = KafkaUtils.getOffset(simpleConsumer, config.topic, 0, OffsetRequest.LatestTime()) - 1;
ByteBufferMessageSet messageAndOffsets = KafkaUtils.fetchMessages(config, simpleConsumer, new Partition(Broker.fromString(broker.getBrokerConnectionString()), TEST_TOPIC, 0), offset);
String message = new String(Utils.toByteArray(messageAndOffsets.iterator().next().message().payload()));
assertThat(message, is(equalTo(value)));
}
use of kafka.javaapi.message.ByteBufferMessageSet in project storm by apache.
the class TestUtils method verifyMessage.
public static boolean verifyMessage(String key, String message, KafkaTestBroker broker, SimpleConsumer simpleConsumer) {
long lastMessageOffset = KafkaUtils.getOffset(simpleConsumer, TestUtils.TOPIC, 0, OffsetRequest.LatestTime()) - 1;
ByteBufferMessageSet messageAndOffsets = KafkaUtils.fetchMessages(TestUtils.getKafkaConfig(broker), simpleConsumer, new Partition(Broker.fromString(broker.getBrokerConnectionString()), TestUtils.TOPIC, 0), lastMessageOffset);
MessageAndOffset messageAndOffset = messageAndOffsets.iterator().next();
Message kafkaMessage = messageAndOffset.message();
ByteBuffer messageKeyBuffer = kafkaMessage.key();
String keyString = null;
String messageString = new String(Utils.toByteArray(kafkaMessage.payload()));
if (messageKeyBuffer != null) {
keyString = new String(Utils.toByteArray(messageKeyBuffer));
}
assertEquals(key, keyString);
assertEquals(message, messageString);
return true;
}
Aggregations