Search in sources :

Example 1 with Pipeline

use of org.graylog.plugins.pipelineprocessor.ast.Pipeline in project graylog2-server by Graylog2.

the class PipelineInterpreterTest method testMetrics.

@Test
@SuppressForbidden("Allow using default thread factory")
public void testMetrics() {
    final RuleMetricsConfigService ruleMetricsConfigService = mock(RuleMetricsConfigService.class);
    when(ruleMetricsConfigService.get()).thenReturn(RuleMetricsConfigDto.createDefault());
    final ClusterEventBus clusterEventBus = new ClusterEventBus("cluster-event-bus", Executors.newSingleThreadExecutor());
    final RuleService ruleService = new InMemoryRuleService(clusterEventBus);
    ruleService.save(RuleDao.create("abc", "title", "description", "rule \"match_all\"\n" + "when true\n" + "then\n" + "end", Tools.nowUTC(), null));
    final PipelineService pipelineService = new InMemoryPipelineService(new ClusterEventBus());
    pipelineService.save(PipelineDao.create("cde", "title", "description", "pipeline \"pipeline\"\n" + "stage 0 match all\n" + "    rule \"match_all\";\n" + "stage 1 match all\n" + "    rule \"match_all\";\n" + "end\n", Tools.nowUTC(), null));
    final PipelineStreamConnectionsService pipelineStreamConnectionsService = new InMemoryPipelineStreamConnectionsService(clusterEventBus);
    pipelineStreamConnectionsService.save(PipelineConnections.create(null, DEFAULT_STREAM_ID, Collections.singleton("cde")));
    final FunctionRegistry functionRegistry = new FunctionRegistry(Collections.emptyMap());
    final PipelineRuleParser parser = new PipelineRuleParser(functionRegistry);
    final MetricRegistry metricRegistry = new MetricRegistry();
    final ConfigurationStateUpdater stateUpdater = new ConfigurationStateUpdater(ruleService, pipelineService, pipelineStreamConnectionsService, parser, ruleMetricsConfigService, metricRegistry, Executors.newScheduledThreadPool(1), mock(EventBus.class), (currentPipelines, streamPipelineConnections, ruleMetricsConfig) -> new PipelineInterpreter.State(currentPipelines, streamPipelineConnections, ruleMetricsConfig, new MetricRegistry(), 1, true));
    final PipelineInterpreter interpreter = new PipelineInterpreter(mock(MessageQueueAcknowledger.class), metricRegistry, stateUpdater);
    interpreter.process(messageInDefaultStream("", ""));
    final SortedMap<String, Meter> meters = metricRegistry.getMeters((name, metric) -> name.startsWith(name(Pipeline.class, "cde")) || name.startsWith(name(Rule.class, "abc")));
    assertThat(meters.keySet()).containsExactlyInAnyOrder(name(Pipeline.class, "cde", "executed"), name(Pipeline.class, "cde", "stage", "0", "executed"), name(Pipeline.class, "cde", "stage", "1", "executed"), name(Rule.class, "abc", "executed"), name(Rule.class, "abc", "cde", "0", "executed"), name(Rule.class, "abc", "cde", "1", "executed"), name(Rule.class, "abc", "matched"), name(Rule.class, "abc", "cde", "0", "matched"), name(Rule.class, "abc", "cde", "1", "matched"), name(Rule.class, "abc", "not-matched"), name(Rule.class, "abc", "cde", "0", "not-matched"), name(Rule.class, "abc", "cde", "1", "not-matched"), name(Rule.class, "abc", "failed"), name(Rule.class, "abc", "cde", "0", "failed"), name(Rule.class, "abc", "cde", "1", "failed"));
    assertThat(meters.get(name(Pipeline.class, "cde", "executed")).getCount()).isEqualTo(1L);
    assertThat(meters.get(name(Pipeline.class, "cde", "stage", "0", "executed")).getCount()).isEqualTo(1L);
    assertThat(meters.get(name(Pipeline.class, "cde", "stage", "1", "executed")).getCount()).isEqualTo(1L);
    assertThat(meters.get(name(Rule.class, "abc", "executed")).getCount()).isEqualTo(2L);
    assertThat(meters.get(name(Rule.class, "abc", "cde", "0", "executed")).getCount()).isEqualTo(1L);
    assertThat(meters.get(name(Rule.class, "abc", "cde", "1", "executed")).getCount()).isEqualTo(1L);
    assertThat(meters.get(name(Rule.class, "abc", "matched")).getCount()).isEqualTo(2L);
    assertThat(meters.get(name(Rule.class, "abc", "cde", "0", "matched")).getCount()).isEqualTo(1L);
    assertThat(meters.get(name(Rule.class, "abc", "cde", "1", "matched")).getCount()).isEqualTo(1L);
    assertThat(meters.get(name(Rule.class, "abc", "not-matched")).getCount()).isEqualTo(0L);
    assertThat(meters.get(name(Rule.class, "abc", "cde", "0", "not-matched")).getCount()).isEqualTo(0L);
    assertThat(meters.get(name(Rule.class, "abc", "cde", "1", "not-matched")).getCount()).isEqualTo(0L);
    assertThat(meters.get(name(Rule.class, "abc", "failed")).getCount()).isEqualTo(0L);
    assertThat(meters.get(name(Rule.class, "abc", "cde", "0", "failed")).getCount()).isEqualTo(0L);
    assertThat(meters.get(name(Rule.class, "abc", "cde", "1", "failed")).getCount()).isEqualTo(0L);
}
Also used : InMemoryRuleService(org.graylog.plugins.pipelineprocessor.db.memory.InMemoryRuleService) PipelineStreamConnectionsService(org.graylog.plugins.pipelineprocessor.db.PipelineStreamConnectionsService) MongoDbPipelineStreamConnectionsService(org.graylog.plugins.pipelineprocessor.db.mongodb.MongoDbPipelineStreamConnectionsService) InMemoryPipelineStreamConnectionsService(org.graylog.plugins.pipelineprocessor.db.memory.InMemoryPipelineStreamConnectionsService) InMemoryPipelineStreamConnectionsService(org.graylog.plugins.pipelineprocessor.db.memory.InMemoryPipelineStreamConnectionsService) MessageQueueAcknowledger(org.graylog2.shared.messageq.MessageQueueAcknowledger) Meter(com.codahale.metrics.Meter) MetricRegistry(com.codahale.metrics.MetricRegistry) PipelineRuleParser(org.graylog.plugins.pipelineprocessor.parser.PipelineRuleParser) ClusterEventBus(org.graylog2.events.ClusterEventBus) EventBus(com.google.common.eventbus.EventBus) InMemoryPipelineService(org.graylog.plugins.pipelineprocessor.db.memory.InMemoryPipelineService) ClusterEventBus(org.graylog2.events.ClusterEventBus) RuleMetricsConfigService(org.graylog.plugins.pipelineprocessor.db.RuleMetricsConfigService) Pipeline(org.graylog.plugins.pipelineprocessor.ast.Pipeline) FunctionRegistry(org.graylog.plugins.pipelineprocessor.parser.FunctionRegistry) PipelineService(org.graylog.plugins.pipelineprocessor.db.PipelineService) MongoDbPipelineService(org.graylog.plugins.pipelineprocessor.db.mongodb.MongoDbPipelineService) InMemoryPipelineService(org.graylog.plugins.pipelineprocessor.db.memory.InMemoryPipelineService) RuleService(org.graylog.plugins.pipelineprocessor.db.RuleService) MongoDbRuleService(org.graylog.plugins.pipelineprocessor.db.mongodb.MongoDbRuleService) InMemoryRuleService(org.graylog.plugins.pipelineprocessor.db.memory.InMemoryRuleService) Rule(org.graylog.plugins.pipelineprocessor.ast.Rule) Test(org.junit.Test) SuppressForbidden(org.graylog2.shared.SuppressForbidden)

Example 2 with Pipeline

use of org.graylog.plugins.pipelineprocessor.ast.Pipeline in project graylog2-server by Graylog2.

the class PipelineResourceTest method shouldParseAPipelineSuccessfully.

@Test
public void shouldParseAPipelineSuccessfully() {
    final PipelineSource pipelineSource = PipelineSource.builder().source("pipeline \"Graylog Git Pipline\"\nstage 0 match either\n" + "rule \"geo loc of dev\"\nrule \"open source dev\"\nend").stages(Collections.emptyList()).title("Graylog Git Pipeline").build();
    final SortedSet stages = ImmutableSortedSet.of(Stage.builder().stage(0).ruleReferences(ImmutableList.of("geo loc of dev", "open source dev")).match(Stage.Match.EITHER).build());
    final List<StageSource> expectedStages = ImmutableList.of(StageSource.create(0, Stage.Match.EITHER, ImmutableList.of("geo loc of dev", "open source dev")));
    final Pipeline pipeline = Pipeline.builder().name("Graylog Git Pipeline").stages(stages).build();
    when(pipelineRuleParser.parsePipeline(pipelineSource.id(), pipelineSource.source())).thenReturn(pipeline);
    final PipelineSource result = this.pipelineResource.parse(pipelineSource);
    verify(pipelineRuleParser).parsePipeline(pipelineSource.id(), pipelineSource.source());
    assertThat(result.source()).isEqualTo(pipelineSource.source());
    assertThat(result.stages()).isEqualTo(expectedStages);
}
Also used : ImmutableSortedSet(com.google.common.collect.ImmutableSortedSet) SortedSet(java.util.SortedSet) Pipeline(org.graylog.plugins.pipelineprocessor.ast.Pipeline) Test(org.junit.Test)

Example 3 with Pipeline

use of org.graylog.plugins.pipelineprocessor.ast.Pipeline in project graylog2-server by Graylog2.

the class PipelineResource method parse.

@ApiOperation(value = "Parse a processing pipeline without saving it")
@POST
@Path("/parse")
@NoAuditEvent("only used to parse a pipeline, no changes made in the system")
public PipelineSource parse(@ApiParam(name = "pipeline", required = true) @NotNull PipelineSource pipelineSource) throws ParseException {
    final Pipeline pipeline;
    try {
        pipeline = pipelineRuleParser.parsePipeline(pipelineSource.id(), pipelineSource.source());
    } catch (ParseException e) {
        throw new BadRequestException(Response.status(Response.Status.BAD_REQUEST).entity(e.getErrors()).build());
    }
    final DateTime now = DateTime.now(DateTimeZone.UTC);
    return PipelineSource.builder().title(pipeline.name()).description(pipelineSource.description()).source(pipelineSource.source()).stages(pipeline.stages().stream().map(stage -> StageSource.create(stage.stage(), stage.match(), stage.ruleReferences())).collect(Collectors.toList())).createdAt(now).modifiedAt(now).build();
}
Also used : DateTimeZone(org.joda.time.DateTimeZone) PathParam(javax.ws.rs.PathParam) Produces(javax.ws.rs.Produces) SearchQueryParser(org.graylog2.search.SearchQueryParser) GET(javax.ws.rs.GET) ParseException(org.graylog.plugins.pipelineprocessor.parser.ParseException) LoggerFactory(org.slf4j.LoggerFactory) Path(javax.ws.rs.Path) ApiParam(io.swagger.annotations.ApiParam) ArrayList(java.util.ArrayList) Inject(javax.inject.Inject) ApiOperation(io.swagger.annotations.ApiOperation) PaginatedList(org.graylog2.database.PaginatedList) RequiresPermissions(org.apache.shiro.authz.annotation.RequiresPermissions) MediaType(javax.ws.rs.core.MediaType) Lists(com.google.common.collect.Lists) QueryParam(javax.ws.rs.QueryParam) PipelineService(org.graylog.plugins.pipelineprocessor.db.PipelineService) Consumes(javax.ws.rs.Consumes) SearchQueryField(org.graylog2.search.SearchQueryField) AuditEvent(org.graylog2.audit.jersey.AuditEvent) PluginRestResource(org.graylog2.plugin.rest.PluginRestResource) DefaultValue(javax.ws.rs.DefaultValue) PipelineRuleParser(org.graylog.plugins.pipelineprocessor.parser.PipelineRuleParser) BadRequestException(javax.ws.rs.BadRequestException) Api(io.swagger.annotations.Api) SearchQuery(org.graylog2.search.SearchQuery) NotFoundException(org.graylog2.database.NotFoundException) DELETE(javax.ws.rs.DELETE) NoAuditEvent(org.graylog2.audit.jersey.NoAuditEvent) Pipeline(org.graylog.plugins.pipelineprocessor.ast.Pipeline) Logger(org.slf4j.Logger) POST(javax.ws.rs.POST) ImmutableMap(com.google.common.collect.ImmutableMap) Predicate(java.util.function.Predicate) Collection(java.util.Collection) DateTime(org.joda.time.DateTime) RestResource(org.graylog2.shared.rest.resources.RestResource) PipelineDao(org.graylog.plugins.pipelineprocessor.db.PipelineDao) NotNull(javax.validation.constraints.NotNull) PipelineProcessorAuditEventTypes(org.graylog.plugins.pipelineprocessor.audit.PipelineProcessorAuditEventTypes) Collectors(java.util.stream.Collectors) List(java.util.List) Response(javax.ws.rs.core.Response) PaginatedPipelineService(org.graylog.plugins.pipelineprocessor.db.PaginatedPipelineService) PUT(javax.ws.rs.PUT) PaginatedResponse(org.graylog2.rest.models.PaginatedResponse) RequiresAuthentication(org.apache.shiro.authz.annotation.RequiresAuthentication) BadRequestException(javax.ws.rs.BadRequestException) ParseException(org.graylog.plugins.pipelineprocessor.parser.ParseException) DateTime(org.joda.time.DateTime) Pipeline(org.graylog.plugins.pipelineprocessor.ast.Pipeline) Path(javax.ws.rs.Path) POST(javax.ws.rs.POST) ApiOperation(io.swagger.annotations.ApiOperation) NoAuditEvent(org.graylog2.audit.jersey.NoAuditEvent)

Example 4 with Pipeline

use of org.graylog.plugins.pipelineprocessor.ast.Pipeline in project graylog2-server by Graylog2.

the class PipelineResource method createFromParser.

@ApiOperation(value = "Create a processing pipeline from source")
@POST
@RequiresPermissions(PipelineRestPermissions.PIPELINE_CREATE)
@AuditEvent(type = PipelineProcessorAuditEventTypes.PIPELINE_CREATE)
public PipelineSource createFromParser(@ApiParam(name = "pipeline", required = true) @NotNull PipelineSource pipelineSource) throws ParseException {
    final Pipeline pipeline;
    try {
        pipeline = pipelineRuleParser.parsePipeline(pipelineSource.id(), pipelineSource.source());
    } catch (ParseException e) {
        throw new BadRequestException(Response.status(Response.Status.BAD_REQUEST).entity(e.getErrors()).build());
    }
    final DateTime now = DateTime.now(DateTimeZone.UTC);
    final PipelineDao pipelineDao = PipelineDao.builder().title(pipeline.name()).description(pipelineSource.description()).source(pipelineSource.source()).createdAt(now).modifiedAt(now).build();
    final PipelineDao save = pipelineService.save(pipelineDao);
    log.debug("Created new pipeline {}", save);
    return PipelineSource.fromDao(pipelineRuleParser, save);
}
Also used : BadRequestException(javax.ws.rs.BadRequestException) PipelineDao(org.graylog.plugins.pipelineprocessor.db.PipelineDao) ParseException(org.graylog.plugins.pipelineprocessor.parser.ParseException) DateTime(org.joda.time.DateTime) Pipeline(org.graylog.plugins.pipelineprocessor.ast.Pipeline) RequiresPermissions(org.apache.shiro.authz.annotation.RequiresPermissions) POST(javax.ws.rs.POST) ApiOperation(io.swagger.annotations.ApiOperation) AuditEvent(org.graylog2.audit.jersey.AuditEvent) NoAuditEvent(org.graylog2.audit.jersey.NoAuditEvent)

Example 5 with Pipeline

use of org.graylog.plugins.pipelineprocessor.ast.Pipeline in project graylog2-server by Graylog2.

the class PipelineInterpreter method process.

/**
 * Evaluates all pipelines that apply to the given messages, based on the current stream routing
 * of the messages.
 *
 * The processing loops on each single message (passed in or created by pipelines) until the set
 * of streams does not change anymore. No cycle detection is performed.
 *
 * @param messages            the messages to process through the pipelines
 * @param interpreterListener a listener which gets called for each processing stage (e.g. to
 *                            trace execution)
 * @param state               the pipeline/stage/rule/stream connection state to use during
 *                            processing
 * @return the processed messages
 */
public Messages process(Messages messages, InterpreterListener interpreterListener, State state) {
    interpreterListener.startProcessing();
    // message id + stream id
    final Set<Tuple2<String, String>> processingBlacklist = Sets.newHashSet();
    final List<Message> toProcess = Lists.newArrayList(messages);
    final List<Message> fullyProcessed = Lists.newArrayListWithExpectedSize(toProcess.size());
    while (!toProcess.isEmpty()) {
        final MessageCollection currentSet = new MessageCollection(toProcess);
        // we'll add them back below
        toProcess.clear();
        for (Message message : currentSet) {
            final String msgId = message.getId();
            // this makes a copy of the list, which is mutated later in updateStreamBlacklist
            // it serves as a worklist, to keep track of which <msg, stream> tuples need to be re-run again
            final Set<String> initialStreamIds = message.getStreams().stream().map(Stream::getId).collect(Collectors.toSet());
            final ImmutableSet<Pipeline> pipelinesToRun = selectPipelines(interpreterListener, processingBlacklist, message, initialStreamIds, state.getStreamPipelineConnections());
            toProcess.addAll(processForResolvedPipelines(message, msgId, pipelinesToRun, interpreterListener, state));
            // add each processed message-stream combination to the blacklist set and figure out if the processing
            // has added a stream to the message, in which case we need to cycle and determine whether to process
            // its pipeline connections, too
            boolean addedStreams = updateStreamBlacklist(processingBlacklist, message, initialStreamIds);
            potentiallyDropFilteredMessage(message);
            // go to 1 and iterate over all messages again until no more streams are being assigned
            if (!addedStreams || message.getFilterOut()) {
                log.debug("[{}] no new streams matches or dropped message, not running again", msgId);
                fullyProcessed.add(message);
            } else {
                // process again, we've added a stream
                log.debug("[{}] new streams assigned, running again for those streams", msgId);
                toProcess.add(message);
            }
        }
    }
    interpreterListener.finishProcessing();
    // 7. return the processed messages
    return new MessageCollection(fullyProcessed);
}
Also used : MessageCollection(org.graylog2.plugin.MessageCollection) Message(org.graylog2.plugin.Message) Tuple2(org.jooq.lambda.tuple.Tuple2) Pipeline(org.graylog.plugins.pipelineprocessor.ast.Pipeline)

Aggregations

Pipeline (org.graylog.plugins.pipelineprocessor.ast.Pipeline)18 Stage (org.graylog.plugins.pipelineprocessor.ast.Stage)8 PipelineDao (org.graylog.plugins.pipelineprocessor.db.PipelineDao)6 Test (org.junit.Test)6 PipelineService (org.graylog.plugins.pipelineprocessor.db.PipelineService)5 RuleDao (org.graylog.plugins.pipelineprocessor.db.RuleDao)5 ParseException (org.graylog.plugins.pipelineprocessor.parser.ParseException)5 EntityDescriptor (org.graylog2.contentpacks.model.entities.EntityDescriptor)5 Collection (java.util.Collection)4 Collectors (java.util.stream.Collectors)4 Inject (javax.inject.Inject)4 PipelineStreamConnectionsService (org.graylog.plugins.pipelineprocessor.db.PipelineStreamConnectionsService)4 PipelineRuleParser (org.graylog.plugins.pipelineprocessor.parser.PipelineRuleParser)4 PipelineConnections (org.graylog.plugins.pipelineprocessor.rest.PipelineConnections)4 DateTime (org.joda.time.DateTime)4 JsonNode (com.fasterxml.jackson.databind.JsonNode)3 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)3 VisibleForTesting (com.google.common.annotations.VisibleForTesting)3 ImmutableSet (com.google.common.collect.ImmutableSet)3 Graph (com.google.common.graph.Graph)3