use of org.graylog2.plugin.journal.RawMessage in project graylog2-server by Graylog2.
the class AmqpConsumer method run.
public void run() throws IOException {
if (!isConnected()) {
connect();
}
for (int i = 0; i < parallelQueues; i++) {
final String queueName = String.format(Locale.ENGLISH, queue, i);
channel.queueDeclare(queueName, true, false, false, null);
if (exchangeBind) {
channel.queueBind(queueName, exchange, routingKey);
}
channel.basicConsume(queueName, false, new DefaultConsumer(channel) {
@Override
public void handleDelivery(String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body) throws IOException {
long deliveryTag = envelope.getDeliveryTag();
try {
totalBytesRead.addAndGet(body.length);
lastSecBytesReadTmp.addAndGet(body.length);
final RawMessage rawMessage = new RawMessage(body);
// TODO figure out if we want to unsubscribe after a certain time, or if simply blocking is enough here
if (amqpTransport.isThrottled()) {
amqpTransport.blockUntilUnthrottled();
}
sourceInput.processRawMessage(rawMessage);
channel.basicAck(deliveryTag, false);
} catch (Exception e) {
LOG.error("Error while trying to process AMQP message", e);
if (channel.isOpen()) {
channel.basicNack(deliveryTag, false, requeueInvalid);
if (LOG.isDebugEnabled()) {
if (requeueInvalid) {
LOG.debug("Re-queue message with delivery tag {}", deliveryTag);
} else {
LOG.debug("Message with delivery tag {} not re-queued", deliveryTag);
}
}
}
}
}
});
}
}
use of org.graylog2.plugin.journal.RawMessage in project graylog2-server by Graylog2.
the class KafkaTransport method doLaunchLegacy.
private void doLaunchLegacy(final MessageInput input) {
final Properties props = new Properties();
props.put("group.id", configuration.getString(CK_GROUP_ID, DEFAULT_GROUP_ID));
props.put("client.id", "gl2-" + nodeId.getShortNodeId() + "-" + input.getId());
props.put("fetch.min.bytes", String.valueOf(configuration.getInt(CK_FETCH_MIN_BYTES)));
props.put("fetch.wait.max.ms", String.valueOf(configuration.getInt(CK_FETCH_WAIT_MAX)));
props.put("zookeeper.connect", configuration.getString(CK_ZOOKEEPER));
props.put("auto.offset.reset", configuration.getString(CK_OFFSET_RESET, DEFAULT_OFFSET_RESET));
// Default auto commit interval is 60 seconds. Reduce to 1 second to minimize message duplication
// if something breaks.
props.put("auto.commit.interval.ms", "1000");
// Set a consumer timeout to avoid blocking on the consumer iterator.
props.put("consumer.timeout.ms", "1000");
insertCustomProperties(props);
final int numThreads = configuration.getInt(CK_THREADS);
final ConsumerConfig consumerConfig = new ConsumerConfig(props);
cc = Consumer.createJavaConsumerConnector(consumerConfig);
final TopicFilter filter = new Whitelist(configuration.getString(CK_TOPIC_FILTER));
final List<KafkaStream<byte[], byte[]>> streams = cc.createMessageStreamsByFilter(filter, numThreads);
// this is being used during shutdown to first stop all submitted jobs before committing the offsets back to zookeeper
// and then shutting down the connection.
// this is to avoid yanking away the connection from the consumer runnables
stopLatch = new CountDownLatch(streams.size());
for (final KafkaStream<byte[], byte[]> stream : streams) {
executor.submit(new Runnable() {
@Override
public void run() {
final ConsumerIterator<byte[], byte[]> consumerIterator = stream.iterator();
boolean retry;
do {
retry = false;
try {
// noinspection WhileLoopReplaceableByForEach
while (consumerIterator.hasNext()) {
if (paused) {
// we try not to spin here, so we wait until the lifecycle goes back to running.
LOG.debug("Message processing is paused, blocking until message processing is turned back on.");
Uninterruptibles.awaitUninterruptibly(pausedLatch);
}
// check for being stopped before actually getting the message, otherwise we could end up losing that message
if (stopped) {
break;
}
if (isThrottled()) {
blockUntilUnthrottled();
}
// process the message, this will immediately mark the message as having been processed. this gets tricky
// if we get an exception about processing it down below.
final MessageAndMetadata<byte[], byte[]> message = consumerIterator.next();
final byte[] bytes = message.message();
// it is possible that the message is null
if (bytes == null) {
continue;
}
totalBytesRead.addAndGet(bytes.length);
lastSecBytesReadTmp.addAndGet(bytes.length);
final RawMessage rawMessage = new RawMessage(bytes);
input.processRawMessage(rawMessage);
}
} catch (ConsumerTimeoutException e) {
// Happens when there is nothing to consume, retry to check again.
retry = true;
} catch (Exception e) {
LOG.error("Kafka consumer error, stopping consumer thread.", e);
}
} while (retry && !stopped);
// explicitly commit our offsets when stopping.
// this might trigger a couple of times, but it won't hurt
cc.commitOffsets();
stopLatch.countDown();
}
});
}
}
use of org.graylog2.plugin.journal.RawMessage in project graylog2-server by Graylog2.
the class RandomMessageTransport method produceRawMessage.
@Override
protected RawMessage produceRawMessage(MessageInput input) {
final byte[] payload;
try {
final FakeHttpRawMessageGenerator.GeneratorState state = generator.generateState();
payload = objectMapper.writeValueAsBytes(state);
final RawMessage raw = new RawMessage(payload);
Thread.sleep(rateDeviation(sleepMs, maxSleepDeviation, rand));
return raw;
} catch (JsonProcessingException e) {
log.error("Unable to serialize generator state", e);
} catch (InterruptedException ignored) {
}
return null;
}
use of org.graylog2.plugin.journal.RawMessage in project graylog2-server by Graylog2.
the class GeneratorTransport method doLaunch.
@Override
public void doLaunch(final MessageInput input) throws MisfireException {
generatorService = new AbstractExecutionThreadService() {
Thread runThread;
@Override
protected void run() throws Exception {
while (isRunning()) {
if (isThrottled()) {
blockUntilUnthrottled();
}
final RawMessage rawMessage = GeneratorTransport.this.produceRawMessage(input);
if (rawMessage != null) {
input.processRawMessage(rawMessage);
}
}
}
@Override
protected void startUp() throws Exception {
runThread = Thread.currentThread();
}
@Override
protected void triggerShutdown() {
runThread.interrupt();
}
};
generatorService.startAsync();
}
use of org.graylog2.plugin.journal.RawMessage in project graylog2-server by Graylog2.
the class MessageResource method decodeMessage.
private Message decodeMessage(Codec codec, ResolvableInetSocketAddress remoteAddress, RawMessage rawMessage) {
Message message;
try {
message = codec.decode(rawMessage);
} catch (Exception e) {
throw new BadRequestException("Could not decode message");
}
if (message == null) {
throw new BadRequestException("Could not decode message");
}
// Ensure the decoded Message has a source, otherwise creating a ResultMessage will fail
if (isNullOrEmpty(message.getSource())) {
final String address = InetAddresses.toAddrString(remoteAddress.getAddress());
message.setSource(address);
}
// Override source
final Configuration configuration = codec.getConfiguration();
if (configuration.stringIsSet(Codec.Config.CK_OVERRIDE_SOURCE)) {
message.setSource(configuration.getString(Codec.Config.CK_OVERRIDE_SOURCE));
}
return message;
}
Aggregations