Search in sources :

Example 51 with RawMessage

use of org.graylog2.plugin.journal.RawMessage in project graylog2-server by Graylog2.

the class AmqpConsumer method run.

public void run() throws IOException {
    if (!isConnected()) {
        connect();
    }
    for (int i = 0; i < parallelQueues; i++) {
        final String queueName = String.format(Locale.ENGLISH, queue, i);
        channel.queueDeclare(queueName, true, false, false, null);
        if (exchangeBind) {
            channel.queueBind(queueName, exchange, routingKey);
        }
        channel.basicConsume(queueName, false, new DefaultConsumer(channel) {

            @Override
            public void handleDelivery(String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body) throws IOException {
                long deliveryTag = envelope.getDeliveryTag();
                try {
                    totalBytesRead.addAndGet(body.length);
                    lastSecBytesReadTmp.addAndGet(body.length);
                    final RawMessage rawMessage = new RawMessage(body);
                    // TODO figure out if we want to unsubscribe after a certain time, or if simply blocking is enough here
                    if (amqpTransport.isThrottled()) {
                        amqpTransport.blockUntilUnthrottled();
                    }
                    sourceInput.processRawMessage(rawMessage);
                    channel.basicAck(deliveryTag, false);
                } catch (Exception e) {
                    LOG.error("Error while trying to process AMQP message", e);
                    if (channel.isOpen()) {
                        channel.basicNack(deliveryTag, false, requeueInvalid);
                        if (LOG.isDebugEnabled()) {
                            if (requeueInvalid) {
                                LOG.debug("Re-queue message with delivery tag {}", deliveryTag);
                            } else {
                                LOG.debug("Message with delivery tag {} not re-queued", deliveryTag);
                            }
                        }
                    }
                }
            }
        });
    }
}
Also used : DefaultConsumer(com.rabbitmq.client.DefaultConsumer) AMQP(com.rabbitmq.client.AMQP) IOException(java.io.IOException) Envelope(com.rabbitmq.client.Envelope) RawMessage(org.graylog2.plugin.journal.RawMessage) TimeoutException(java.util.concurrent.TimeoutException) IOException(java.io.IOException) KeyManagementException(java.security.KeyManagementException) NoSuchAlgorithmException(java.security.NoSuchAlgorithmException)

Example 52 with RawMessage

use of org.graylog2.plugin.journal.RawMessage in project graylog2-server by Graylog2.

the class KafkaTransport method doLaunchLegacy.

private void doLaunchLegacy(final MessageInput input) {
    final Properties props = new Properties();
    props.put("group.id", configuration.getString(CK_GROUP_ID, DEFAULT_GROUP_ID));
    props.put("client.id", "gl2-" + nodeId.getShortNodeId() + "-" + input.getId());
    props.put("fetch.min.bytes", String.valueOf(configuration.getInt(CK_FETCH_MIN_BYTES)));
    props.put("fetch.wait.max.ms", String.valueOf(configuration.getInt(CK_FETCH_WAIT_MAX)));
    props.put("zookeeper.connect", configuration.getString(CK_ZOOKEEPER));
    props.put("auto.offset.reset", configuration.getString(CK_OFFSET_RESET, DEFAULT_OFFSET_RESET));
    // Default auto commit interval is 60 seconds. Reduce to 1 second to minimize message duplication
    // if something breaks.
    props.put("auto.commit.interval.ms", "1000");
    // Set a consumer timeout to avoid blocking on the consumer iterator.
    props.put("consumer.timeout.ms", "1000");
    insertCustomProperties(props);
    final int numThreads = configuration.getInt(CK_THREADS);
    final ConsumerConfig consumerConfig = new ConsumerConfig(props);
    cc = Consumer.createJavaConsumerConnector(consumerConfig);
    final TopicFilter filter = new Whitelist(configuration.getString(CK_TOPIC_FILTER));
    final List<KafkaStream<byte[], byte[]>> streams = cc.createMessageStreamsByFilter(filter, numThreads);
    // this is being used during shutdown to first stop all submitted jobs before committing the offsets back to zookeeper
    // and then shutting down the connection.
    // this is to avoid yanking away the connection from the consumer runnables
    stopLatch = new CountDownLatch(streams.size());
    for (final KafkaStream<byte[], byte[]> stream : streams) {
        executor.submit(new Runnable() {

            @Override
            public void run() {
                final ConsumerIterator<byte[], byte[]> consumerIterator = stream.iterator();
                boolean retry;
                do {
                    retry = false;
                    try {
                        // noinspection WhileLoopReplaceableByForEach
                        while (consumerIterator.hasNext()) {
                            if (paused) {
                                // we try not to spin here, so we wait until the lifecycle goes back to running.
                                LOG.debug("Message processing is paused, blocking until message processing is turned back on.");
                                Uninterruptibles.awaitUninterruptibly(pausedLatch);
                            }
                            // check for being stopped before actually getting the message, otherwise we could end up losing that message
                            if (stopped) {
                                break;
                            }
                            if (isThrottled()) {
                                blockUntilUnthrottled();
                            }
                            // process the message, this will immediately mark the message as having been processed. this gets tricky
                            // if we get an exception about processing it down below.
                            final MessageAndMetadata<byte[], byte[]> message = consumerIterator.next();
                            final byte[] bytes = message.message();
                            // it is possible that the message is null
                            if (bytes == null) {
                                continue;
                            }
                            totalBytesRead.addAndGet(bytes.length);
                            lastSecBytesReadTmp.addAndGet(bytes.length);
                            final RawMessage rawMessage = new RawMessage(bytes);
                            input.processRawMessage(rawMessage);
                        }
                    } catch (ConsumerTimeoutException e) {
                        // Happens when there is nothing to consume, retry to check again.
                        retry = true;
                    } catch (Exception e) {
                        LOG.error("Kafka consumer error, stopping consumer thread.", e);
                    }
                } while (retry && !stopped);
                // explicitly commit our offsets when stopping.
                // this might trigger a couple of times, but it won't hurt
                cc.commitOffsets();
                stopLatch.countDown();
            }
        });
    }
}
Also used : TopicFilter(org.graylog.shaded.kafka09.consumer.TopicFilter) MessageAndMetadata(org.graylog.shaded.kafka09.message.MessageAndMetadata) KafkaStream(org.graylog.shaded.kafka09.consumer.KafkaStream) Properties(java.util.Properties) CountDownLatch(java.util.concurrent.CountDownLatch) KafkaException(org.apache.kafka.common.KafkaException) ConsumerTimeoutException(org.graylog.shaded.kafka09.consumer.ConsumerTimeoutException) WakeupException(org.apache.kafka.common.errors.WakeupException) AuthorizationException(org.apache.kafka.common.errors.AuthorizationException) IOException(java.io.IOException) InvalidOffsetException(org.apache.kafka.clients.consumer.InvalidOffsetException) ConsumerIterator(org.graylog.shaded.kafka09.consumer.ConsumerIterator) Whitelist(org.graylog.shaded.kafka09.consumer.Whitelist) ConsumerTimeoutException(org.graylog.shaded.kafka09.consumer.ConsumerTimeoutException) ConsumerConfig(org.graylog.shaded.kafka09.consumer.ConsumerConfig) RawMessage(org.graylog2.plugin.journal.RawMessage)

Example 53 with RawMessage

use of org.graylog2.plugin.journal.RawMessage in project graylog2-server by Graylog2.

the class RandomMessageTransport method produceRawMessage.

@Override
protected RawMessage produceRawMessage(MessageInput input) {
    final byte[] payload;
    try {
        final FakeHttpRawMessageGenerator.GeneratorState state = generator.generateState();
        payload = objectMapper.writeValueAsBytes(state);
        final RawMessage raw = new RawMessage(payload);
        Thread.sleep(rateDeviation(sleepMs, maxSleepDeviation, rand));
        return raw;
    } catch (JsonProcessingException e) {
        log.error("Unable to serialize generator state", e);
    } catch (InterruptedException ignored) {
    }
    return null;
}
Also used : FakeHttpRawMessageGenerator(org.graylog2.inputs.random.generators.FakeHttpRawMessageGenerator) RawMessage(org.graylog2.plugin.journal.RawMessage) JsonProcessingException(com.fasterxml.jackson.core.JsonProcessingException)

Example 54 with RawMessage

use of org.graylog2.plugin.journal.RawMessage in project graylog2-server by Graylog2.

the class GeneratorTransport method doLaunch.

@Override
public void doLaunch(final MessageInput input) throws MisfireException {
    generatorService = new AbstractExecutionThreadService() {

        Thread runThread;

        @Override
        protected void run() throws Exception {
            while (isRunning()) {
                if (isThrottled()) {
                    blockUntilUnthrottled();
                }
                final RawMessage rawMessage = GeneratorTransport.this.produceRawMessage(input);
                if (rawMessage != null) {
                    input.processRawMessage(rawMessage);
                }
            }
        }

        @Override
        protected void startUp() throws Exception {
            runThread = Thread.currentThread();
        }

        @Override
        protected void triggerShutdown() {
            runThread.interrupt();
        }
    };
    generatorService.startAsync();
}
Also used : AbstractExecutionThreadService(com.google.common.util.concurrent.AbstractExecutionThreadService) RawMessage(org.graylog2.plugin.journal.RawMessage) MisfireException(org.graylog2.plugin.inputs.MisfireException)

Example 55 with RawMessage

use of org.graylog2.plugin.journal.RawMessage in project graylog2-server by Graylog2.

the class MessageResource method decodeMessage.

private Message decodeMessage(Codec codec, ResolvableInetSocketAddress remoteAddress, RawMessage rawMessage) {
    Message message;
    try {
        message = codec.decode(rawMessage);
    } catch (Exception e) {
        throw new BadRequestException("Could not decode message");
    }
    if (message == null) {
        throw new BadRequestException("Could not decode message");
    }
    // Ensure the decoded Message has a source, otherwise creating a ResultMessage will fail
    if (isNullOrEmpty(message.getSource())) {
        final String address = InetAddresses.toAddrString(remoteAddress.getAddress());
        message.setSource(address);
    }
    // Override source
    final Configuration configuration = codec.getConfiguration();
    if (configuration.stringIsSet(Codec.Config.CK_OVERRIDE_SOURCE)) {
        message.setSource(configuration.getString(Codec.Config.CK_OVERRIDE_SOURCE));
    }
    return message;
}
Also used : ResultMessage(org.graylog2.indexer.results.ResultMessage) RawMessage(org.graylog2.plugin.journal.RawMessage) Message(org.graylog2.plugin.Message) Configuration(org.graylog2.plugin.configuration.Configuration) BadRequestException(javax.ws.rs.BadRequestException) BadRequestException(javax.ws.rs.BadRequestException) ForbiddenException(javax.ws.rs.ForbiddenException) IOException(java.io.IOException) NotFoundException(javax.ws.rs.NotFoundException) DocumentNotFoundException(org.graylog2.indexer.messages.DocumentNotFoundException)

Aggregations

RawMessage (org.graylog2.plugin.journal.RawMessage)59 Test (org.junit.Test)35 Message (org.graylog2.plugin.Message)23 InetSocketAddress (java.net.InetSocketAddress)13 IOException (java.io.IOException)7 Nullable (javax.annotation.Nullable)7 MappedMessage (org.graylog.plugins.cef.parser.MappedMessage)6 ResolvableInetSocketAddress (org.graylog2.plugin.ResolvableInetSocketAddress)6 DateTime (org.joda.time.DateTime)5 Configuration (org.graylog2.plugin.configuration.Configuration)4 ByteBuf (io.netty.buffer.ByteBuf)3 URL (java.net.URL)3 ZonedDateTime (java.time.ZonedDateTime)3 Timer (com.codahale.metrics.Timer)2 List (java.util.List)2 Map (java.util.Map)2 Properties (java.util.Properties)2 CountDownLatch (java.util.concurrent.CountDownLatch)2 NotFoundException (javax.ws.rs.NotFoundException)2 DocumentNotFoundException (org.graylog2.indexer.messages.DocumentNotFoundException)2