use of org.graylog2.plugin.inputs.Extractor.Result in project graylog2-server by Graylog2.
the class SearchesTest method testFieldStats.
@Test
@UsingDataSet(loadStrategy = LoadStrategyEnum.CLEAN_INSERT)
public void testFieldStats() throws Exception {
FieldStatsResult result = searches.fieldStats("n", "*", AbsoluteRange.create(new DateTime(2015, 1, 1, 0, 0, DateTimeZone.UTC), new DateTime(2015, 1, 2, 0, 0, DateTimeZone.UTC)));
assertThat(result.getSearchHits()).hasSize(10);
assertThat(result.getCount()).isEqualTo(8);
assertThat(result.getMin()).isEqualTo(1.0);
assertThat(result.getMax()).isEqualTo(4.0);
assertThat(result.getMean()).isEqualTo(2.375);
assertThat(result.getSum()).isEqualTo(19.0);
assertThat(result.getSumOfSquares()).isEqualTo(53.0);
assertThat(result.getVariance()).isEqualTo(0.984375);
assertThat(result.getStdDeviation()).isEqualTo(0.9921567416492215);
}
use of org.graylog2.plugin.inputs.Extractor.Result in project graylog2-server by Graylog2.
the class GelfChunkAggregatorTest method tooManyChunks.
@Test
public void tooManyChunks() {
final ChannelBuffer[] chunks = createChunkedMessage(129 * 1024, 1024);
int i = 1;
for (final ChannelBuffer chunk : chunks) {
final CodecAggregator.Result result = aggregator.addChunk(chunk);
if (i == 129) {
assertFalse("Message invalidated (chunk #" + i + ")", result.isValid());
assertNull("Message discarded (chunk #" + i + ")", result.getMessage());
} else {
assertTrue("Incomplete message valid (chunk #" + i + ")", result.isValid());
assertNull("Message not complete (chunk #" + i + ")", result.getMessage());
}
i++;
}
}
use of org.graylog2.plugin.inputs.Extractor.Result in project graylog2-server by Graylog2.
the class GelfChunkAggregatorTest method manyChunks.
@Test
public void manyChunks() {
// creates 5 chunks
final ChannelBuffer[] chunks = createChunkedMessage(4096 + 512, 1024);
int i = 0;
for (final ChannelBuffer chunk : chunks) {
i++;
final CodecAggregator.Result result = aggregator.addChunk(chunk);
assertTrue(result.isValid());
if (i == 5) {
assertNotNull("message should've been assembled from chunks", result.getMessage());
assertEquals(1, counterValueNamed(metricRegistry, COMPLETE_MESSAGES));
assertEquals(5, counterValueNamed(metricRegistry, CHUNK_COUNTER));
assertEquals(0, counterValueNamed(metricRegistry, WAITING_MESSAGES));
assertEquals(0, counterValueNamed(metricRegistry, EXPIRED_CHUNKS));
assertEquals(0, counterValueNamed(metricRegistry, EXPIRED_MESSAGES));
assertEquals(0, counterValueNamed(metricRegistry, DUPLICATE_CHUNKS));
} else {
assertNull("chunks not complete", result.getMessage());
assertEquals("message not complete yet", 0, counterValueNamed(metricRegistry, COMPLETE_MESSAGES));
assertEquals(i, counterValueNamed(metricRegistry, CHUNK_COUNTER));
assertEquals("one message waiting", 1, counterValueNamed(metricRegistry, WAITING_MESSAGES));
assertEquals(0, counterValueNamed(metricRegistry, EXPIRED_CHUNKS));
assertEquals(0, counterValueNamed(metricRegistry, EXPIRED_MESSAGES));
assertEquals(0, counterValueNamed(metricRegistry, DUPLICATE_CHUNKS));
}
}
}
use of org.graylog2.plugin.inputs.Extractor.Result in project graylog2-server by Graylog2.
the class KeywordSearchResource method searchKeyword.
@GET
@Timed
@ApiOperation(value = "Message search with keyword as timerange.", notes = "Search for messages in a timerange defined by a keyword like \"yesterday\" or \"2 weeks ago to wednesday\".")
@Produces(MediaType.APPLICATION_JSON)
@ApiResponses(value = { @ApiResponse(code = 400, message = "Invalid keyword provided.") })
public SearchResponse searchKeyword(@ApiParam(name = "query", value = "Query (Lucene syntax)", required = true) @QueryParam("query") @NotEmpty String query, @ApiParam(name = "keyword", value = "Range keyword", required = true) @QueryParam("keyword") @NotEmpty String keyword, @ApiParam(name = "limit", value = "Maximum number of messages to return.", required = false) @QueryParam("limit") int limit, @ApiParam(name = "offset", value = "Offset", required = false) @QueryParam("offset") int offset, @ApiParam(name = "filter", value = "Filter", required = false) @QueryParam("filter") String filter, @ApiParam(name = "fields", value = "Comma separated list of fields to return", required = false) @QueryParam("fields") String fields, @ApiParam(name = "sort", value = "Sorting (field:asc / field:desc)", required = false) @QueryParam("sort") String sort, @ApiParam(name = "decorate", value = "Run decorators on search result", required = false) @QueryParam("decorate") @DefaultValue("true") boolean decorate) {
checkSearchPermission(filter, RestPermissions.SEARCHES_KEYWORD);
final List<String> fieldList = parseOptionalFields(fields);
final Sorting sorting = buildSorting(sort);
final TimeRange timeRange = buildKeywordTimeRange(keyword);
final SearchesConfig searchesConfig = SearchesConfig.builder().query(query).filter(filter).fields(fieldList).range(timeRange).limit(limit).offset(offset).sorting(sorting).build();
final Optional<String> streamId = Searches.extractStreamId(filter);
try {
return buildSearchResponse(searches.search(searchesConfig), timeRange, decorate, streamId);
} catch (SearchPhaseExecutionException e) {
throw createRequestExceptionForParseFailure(query, e);
}
}
use of org.graylog2.plugin.inputs.Extractor.Result in project graylog2-server by Graylog2.
the class StreamResource method create.
@POST
@Timed
@ApiOperation(value = "Create a stream")
@RequiresPermissions(RestPermissions.STREAMS_CREATE)
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@AuditEvent(type = AuditEventTypes.STREAM_CREATE)
public Response create(@ApiParam(name = "JSON body", required = true) final CreateStreamRequest cr) throws ValidationException {
// Create stream.
final Stream stream = streamService.create(cr, getCurrentUser().getName());
stream.setDisabled(true);
if (!stream.getIndexSet().getConfig().isWritable()) {
throw new BadRequestException("Assigned index set must be writable!");
}
final String id = streamService.save(stream);
final List<CreateStreamRuleRequest> rules = firstNonNull(cr.rules(), Collections.<CreateStreamRuleRequest>emptyList());
for (CreateStreamRuleRequest request : rules) {
StreamRule streamRule = streamRuleService.create(id, request);
streamRuleService.save(streamRule);
}
clusterEventBus.post(StreamsChangedEvent.create(stream.getId()));
final Map<String, String> result = ImmutableMap.of("stream_id", id);
final URI streamUri = getUriBuilderToSelf().path(StreamResource.class).path("{streamId}").build(id);
return Response.created(streamUri).entity(result).build();
}
Aggregations