use of kafka.message.ByteBufferMessageSet in project incubator-gobblin by apache.
the class MockKafkaStream method pushToStream.
public void pushToStream(String message) {
int streamNo = (int) this.nextStream.incrementAndGet() % this.queues.size();
AtomicLong offset = this.offsets.get(streamNo);
BlockingQueue<FetchedDataChunk> queue = this.queues.get(streamNo);
AtomicLong thisOffset = new AtomicLong(offset.incrementAndGet());
List<Message> seq = Lists.newArrayList();
seq.add(new Message(message.getBytes(Charsets.UTF_8)));
ByteBufferMessageSet messageSet = new ByteBufferMessageSet(NoCompressionCodec$.MODULE$, offset, JavaConversions.asScalaBuffer(seq));
FetchedDataChunk chunk = new FetchedDataChunk(messageSet, new PartitionTopicInfo("topic", streamNo, queue, thisOffset, thisOffset, new AtomicInteger(1), "clientId"), thisOffset.get());
queue.add(chunk);
}
use of kafka.message.ByteBufferMessageSet in project graylog2-server by Graylog2.
the class KafkaJournal method flushMessages.
private long flushMessages(List<Message> messages, long payloadSize) {
if (messages.isEmpty()) {
LOG.debug("No messages to flush, not trying to write an empty message set.");
return -1L;
}
final ByteBufferMessageSet messageSet = new ByteBufferMessageSet(JavaConversions.asScalaBuffer(messages).toSeq());
if (LOG.isDebugEnabled()) {
LOG.debug("Trying to write ByteBufferMessageSet with size of {} bytes to journal", messageSet.sizeInBytes());
}
final LogAppendInfo appendInfo = kafkaLog.append(messageSet, true);
long lastWriteOffset = appendInfo.lastOffset();
if (LOG.isDebugEnabled()) {
LOG.debug("Wrote {} messages to journal: {} bytes (payload {} bytes), log position {} to {}", messages.size(), messageSet.sizeInBytes(), payloadSize, appendInfo.firstOffset(), lastWriteOffset);
}
writtenMessages.mark(messages.size());
return lastWriteOffset;
}
Aggregations