use of org.graylog.plugins.pipelineprocessor.parser.PipelineRuleParser in project graylog2-server by Graylog2.
the class BaseParserTest method setup.
@Before
public void setup() {
parser = new PipelineRuleParser(functionRegistry);
// initialize before every test!
actionsTriggered.set(false);
}
use of org.graylog.plugins.pipelineprocessor.parser.PipelineRuleParser in project graylog2-server by Graylog2.
the class PipelineInterpreterTest method testMetrics.
@Test
@SuppressForbidden("Allow using default thread factory")
public void testMetrics() {
final RuleMetricsConfigService ruleMetricsConfigService = mock(RuleMetricsConfigService.class);
when(ruleMetricsConfigService.get()).thenReturn(RuleMetricsConfigDto.createDefault());
final ClusterEventBus clusterEventBus = new ClusterEventBus("cluster-event-bus", Executors.newSingleThreadExecutor());
final RuleService ruleService = new InMemoryRuleService(clusterEventBus);
ruleService.save(RuleDao.create("abc", "title", "description", "rule \"match_all\"\n" + "when true\n" + "then\n" + "end", Tools.nowUTC(), null));
final PipelineService pipelineService = new InMemoryPipelineService(new ClusterEventBus());
pipelineService.save(PipelineDao.create("cde", "title", "description", "pipeline \"pipeline\"\n" + "stage 0 match all\n" + " rule \"match_all\";\n" + "stage 1 match all\n" + " rule \"match_all\";\n" + "end\n", Tools.nowUTC(), null));
final PipelineStreamConnectionsService pipelineStreamConnectionsService = new InMemoryPipelineStreamConnectionsService(clusterEventBus);
pipelineStreamConnectionsService.save(PipelineConnections.create(null, DEFAULT_STREAM_ID, Collections.singleton("cde")));
final FunctionRegistry functionRegistry = new FunctionRegistry(Collections.emptyMap());
final PipelineRuleParser parser = new PipelineRuleParser(functionRegistry);
final MetricRegistry metricRegistry = new MetricRegistry();
final ConfigurationStateUpdater stateUpdater = new ConfigurationStateUpdater(ruleService, pipelineService, pipelineStreamConnectionsService, parser, ruleMetricsConfigService, metricRegistry, Executors.newScheduledThreadPool(1), mock(EventBus.class), (currentPipelines, streamPipelineConnections, ruleMetricsConfig) -> new PipelineInterpreter.State(currentPipelines, streamPipelineConnections, ruleMetricsConfig, new MetricRegistry(), 1, true));
final PipelineInterpreter interpreter = new PipelineInterpreter(mock(MessageQueueAcknowledger.class), metricRegistry, stateUpdater);
interpreter.process(messageInDefaultStream("", ""));
final SortedMap<String, Meter> meters = metricRegistry.getMeters((name, metric) -> name.startsWith(name(Pipeline.class, "cde")) || name.startsWith(name(Rule.class, "abc")));
assertThat(meters.keySet()).containsExactlyInAnyOrder(name(Pipeline.class, "cde", "executed"), name(Pipeline.class, "cde", "stage", "0", "executed"), name(Pipeline.class, "cde", "stage", "1", "executed"), name(Rule.class, "abc", "executed"), name(Rule.class, "abc", "cde", "0", "executed"), name(Rule.class, "abc", "cde", "1", "executed"), name(Rule.class, "abc", "matched"), name(Rule.class, "abc", "cde", "0", "matched"), name(Rule.class, "abc", "cde", "1", "matched"), name(Rule.class, "abc", "not-matched"), name(Rule.class, "abc", "cde", "0", "not-matched"), name(Rule.class, "abc", "cde", "1", "not-matched"), name(Rule.class, "abc", "failed"), name(Rule.class, "abc", "cde", "0", "failed"), name(Rule.class, "abc", "cde", "1", "failed"));
assertThat(meters.get(name(Pipeline.class, "cde", "executed")).getCount()).isEqualTo(1L);
assertThat(meters.get(name(Pipeline.class, "cde", "stage", "0", "executed")).getCount()).isEqualTo(1L);
assertThat(meters.get(name(Pipeline.class, "cde", "stage", "1", "executed")).getCount()).isEqualTo(1L);
assertThat(meters.get(name(Rule.class, "abc", "executed")).getCount()).isEqualTo(2L);
assertThat(meters.get(name(Rule.class, "abc", "cde", "0", "executed")).getCount()).isEqualTo(1L);
assertThat(meters.get(name(Rule.class, "abc", "cde", "1", "executed")).getCount()).isEqualTo(1L);
assertThat(meters.get(name(Rule.class, "abc", "matched")).getCount()).isEqualTo(2L);
assertThat(meters.get(name(Rule.class, "abc", "cde", "0", "matched")).getCount()).isEqualTo(1L);
assertThat(meters.get(name(Rule.class, "abc", "cde", "1", "matched")).getCount()).isEqualTo(1L);
assertThat(meters.get(name(Rule.class, "abc", "not-matched")).getCount()).isEqualTo(0L);
assertThat(meters.get(name(Rule.class, "abc", "cde", "0", "not-matched")).getCount()).isEqualTo(0L);
assertThat(meters.get(name(Rule.class, "abc", "cde", "1", "not-matched")).getCount()).isEqualTo(0L);
assertThat(meters.get(name(Rule.class, "abc", "failed")).getCount()).isEqualTo(0L);
assertThat(meters.get(name(Rule.class, "abc", "cde", "0", "failed")).getCount()).isEqualTo(0L);
assertThat(meters.get(name(Rule.class, "abc", "cde", "1", "failed")).getCount()).isEqualTo(0L);
}
use of org.graylog.plugins.pipelineprocessor.parser.PipelineRuleParser in project graylog2-server by Graylog2.
the class RuleResource method getPage.
@GET
@Path("/paginated")
@ApiOperation(value = "Get a paginated list of pipeline rules")
@Produces(MediaType.APPLICATION_JSON)
@RequiresPermissions(PipelineRestPermissions.PIPELINE_RULE_READ)
public PaginatedResponse<RuleSource> getPage(@ApiParam(name = "page") @QueryParam("page") @DefaultValue("1") int page, @ApiParam(name = "per_page") @QueryParam("per_page") @DefaultValue("50") int perPage, @ApiParam(name = "query") @QueryParam("query") @DefaultValue("") String query, @ApiParam(name = "sort", value = "The field to sort the result on", required = true, allowableValues = "title,description,id") @DefaultValue(RuleDao.FIELD_TITLE) @QueryParam("sort") String sort, @ApiParam(name = "order", value = "The sort direction", allowableValues = "asc, desc") @DefaultValue("asc") @QueryParam("order") String order) {
SearchQuery searchQuery;
try {
searchQuery = searchQueryParser.parse(query);
} catch (IllegalArgumentException e) {
throw new BadRequestException("Invalid argument in search query: " + e.getMessage());
}
final PaginatedList<RuleDao> result = paginatedRuleService.findPaginated(searchQuery, page, perPage, sort, order);
final List<RuleSource> ruleSourceList = result.stream().map(dao -> RuleSource.fromDao(pipelineRuleParser, dao)).collect(Collectors.toList());
final PaginatedList<RuleSource> rules = new PaginatedList<>(ruleSourceList, result.pagination().total(), result.pagination().page(), result.pagination().perPage());
return PaginatedResponse.create("rules", rules, prepareContextForPaginatedResponse(result.delegate()));
}
use of org.graylog.plugins.pipelineprocessor.parser.PipelineRuleParser in project graylog2-server by Graylog2.
the class PipelineResource method getPage.
@GET
@Path("/paginated")
@ApiOperation(value = "Get a paginated list of pipelines")
@Produces(MediaType.APPLICATION_JSON)
public PaginatedResponse<PipelineSource> getPage(@ApiParam(name = "page") @QueryParam("page") @DefaultValue("1") int page, @ApiParam(name = "per_page") @QueryParam("per_page") @DefaultValue("50") int perPage, @ApiParam(name = "query") @QueryParam("query") @DefaultValue("") String query, @ApiParam(name = "sort", value = "The field to sort the result on", required = true, allowableValues = "title,description,id") @DefaultValue(PipelineDao.FIELD_TITLE) @QueryParam("sort") String sort, @ApiParam(name = "order", value = "The sort direction", allowableValues = "asc, desc") @DefaultValue("asc") @QueryParam("order") String order) {
SearchQuery searchQuery;
try {
searchQuery = searchQueryParser.parse(query);
} catch (IllegalArgumentException e) {
throw new BadRequestException("Invalid argument in search query: " + e.getMessage());
}
Predicate<PipelineDao> filter = dao -> isPermitted(PipelineRestPermissions.PIPELINE_READ, dao.id());
final PaginatedList<PipelineDao> result = paginatedPipelineService.findPaginated(searchQuery, filter, page, perPage, sort, order);
final List<PipelineSource> pipelineList = result.stream().map(dao -> PipelineSource.fromDao(pipelineRuleParser, dao)).collect(Collectors.toList());
final PaginatedList<PipelineSource> pipelines = new PaginatedList<>(pipelineList, result.pagination().total(), result.pagination().page(), result.pagination().perPage());
return PaginatedResponse.create("pipelines", pipelines);
}
use of org.graylog.plugins.pipelineprocessor.parser.PipelineRuleParser in project graylog2-server by Graylog2.
the class PipelineInterpreterTest method createPipelineInterpreter.
@SuppressForbidden("Allow using default thread factory")
private PipelineInterpreter createPipelineInterpreter(RuleService ruleService, PipelineService pipelineService, Map<String, Function<?>> functions) {
final RuleMetricsConfigService ruleMetricsConfigService = mock(RuleMetricsConfigService.class);
when(ruleMetricsConfigService.get()).thenReturn(RuleMetricsConfigDto.createDefault());
final PipelineStreamConnectionsService pipelineStreamConnectionsService = mock(MongoDbPipelineStreamConnectionsService.class);
final Set<String> pipelineIds = pipelineService.loadAll().stream().map(PipelineDao::id).collect(Collectors.toSet());
final PipelineConnections pipelineConnections = PipelineConnections.create("p1", DEFAULT_STREAM_ID, pipelineIds);
when(pipelineStreamConnectionsService.loadAll()).thenReturn(Collections.singleton(pipelineConnections));
final FunctionRegistry functionRegistry = new FunctionRegistry(functions);
final PipelineRuleParser parser = new PipelineRuleParser(functionRegistry);
final ConfigurationStateUpdater stateUpdater = new ConfigurationStateUpdater(ruleService, pipelineService, pipelineStreamConnectionsService, parser, ruleMetricsConfigService, new MetricRegistry(), Executors.newScheduledThreadPool(1), mock(EventBus.class), (currentPipelines, streamPipelineConnections, ruleMetricsConfig) -> new PipelineInterpreter.State(currentPipelines, streamPipelineConnections, ruleMetricsConfig, new MetricRegistry(), 1, true));
return new PipelineInterpreter(messageQueueAcknowledger, new MetricRegistry(), stateUpdater);
}
Aggregations