use of org.graylog.plugins.views.search.Query in project graylog2-server by Graylog2.
the class OffsetRangeTest method throwsExceptionIfInvalidSearchTypeIsReferenced.
@Test
public void throwsExceptionIfInvalidSearchTypeIsReferenced() throws Exception {
final OffsetRange offsetRange = constructRange("300", "search_type", "invalidSearchType");
final TimeRange sourceRange = mock(TimeRange.class);
when(sourceRange.getFrom()).thenReturn(DateTime.parse("2019-11-18T10:00:00.000Z"));
when(sourceRange.getTo()).thenReturn(DateTime.parse("2019-11-21T12:00:00.000Z"));
final Query query = mock(Query.class);
final SearchType searchType = mock(SearchType.class);
when(searchType.id()).thenReturn("searchTypeId");
when(searchType.timerange()).thenReturn(Optional.of(DerivedTimeRange.of(sourceRange)));
when(query.searchTypes()).thenReturn(ImmutableSet.of(searchType));
when(query.effectiveTimeRange(searchType)).thenReturn(sourceRange);
assertThatExceptionOfType(RuntimeException.class).isThrownBy(() -> offsetRange.deriveTimeRange(query, searchType)).withMessage("Search type searchTypeId has offset timerange referencing invalid search type: invalidSearchType");
}
use of org.graylog.plugins.views.search.Query in project graylog2-server by Graylog2.
the class OffsetRangeTest method returnsCorrectRangeForTimeRangeOfQueryWithOffsetInUnits.
@Test
public void returnsCorrectRangeForTimeRangeOfQueryWithOffsetInUnits() throws Exception {
final OffsetRange offsetRange = constructRange("3i", "query", "");
final TimeRange sourceRange = mock(TimeRange.class);
when(sourceRange.getFrom()).thenReturn(DateTime.parse("2019-11-18T10:00:00.000Z"));
when(sourceRange.getTo()).thenReturn(DateTime.parse("2019-11-21T12:00:00.000Z"));
final Query query = queryWithTimeRange(sourceRange);
final TimeRange result = offsetRange.deriveTimeRange(query, null);
assertThat(result).isEqualTo(AbsoluteRange.create("2019-11-09T04:00:00.000Z", "2019-11-12T06:00:00.000Z"));
}
use of org.graylog.plugins.views.search.Query in project graylog2-server by Graylog2.
the class PivotAggregationSearch method getAggregationQuery.
/**
* Returns the query to compute the aggregation.
*
* @param parameters processor parameters
* @param searchWithinMs processor search within period. Used to build the date range buckets
* @param executeEveryMs
* @return aggregation query
*/
private Query getAggregationQuery(AggregationEventProcessorParameters parameters, long searchWithinMs, long executeEveryMs) {
final Pivot.Builder pivotBuilder = Pivot.builder().id(PIVOT_ID).rollup(true);
final ImmutableList<SeriesSpec> series = config.series().stream().map(entry -> entry.function().toSeriesSpec(metricName(entry), entry.field().orElse(null))).collect(ImmutableList.toImmutableList());
if (!series.isEmpty()) {
pivotBuilder.series(series);
}
// Wrap every aggregation with date range buckets of the searchWithin time range.
// If the aggregation is configured to be using a sliding window (searchWithin > executeEveryMs)
// the time ranges will overlap.
// This allows us to run aggregations over larger time ranges than the searchWithin time.
// The results will be received in time buckets of the searchWithin time size.
final DateRangeBucket dateRangeBucket = buildDateRangeBuckets(parameters.timerange(), searchWithinMs, executeEveryMs);
final List<BucketSpec> groupBy = new ArrayList<>();
// The first bucket must be the date range!
groupBy.add(dateRangeBucket);
if (!config.groupBy().isEmpty()) {
// Then we add the configured groups
groupBy.addAll(config.groupBy().stream().map(field -> Values.builder().limit(Integer.MAX_VALUE).field(field).build()).collect(Collectors.toList()));
}
// We always have row groups because of the date range buckets
pivotBuilder.rowGroups(groupBy);
final Set<SearchType> searchTypes = Collections.singleton(pivotBuilder.build());
final Query.Builder queryBuilder = Query.builder().id(QUERY_ID).searchTypes(searchTypes).query(ElasticsearchQueryString.of(config.query())).timerange(parameters.timerange());
final Set<String> streams = getStreams(parameters);
if (!streams.isEmpty()) {
queryBuilder.filter(filteringForStreamIds(streams));
}
return queryBuilder.build();
}
use of org.graylog.plugins.views.search.Query in project graylog2-server by Graylog2.
the class PivotAggregationSearch method getSourceStreamsQuery.
/**
* Returns the query to compute the sources streams for the aggregation.
*
* @param parameters processor parameters
* @return source streams query
*/
private Query getSourceStreamsQuery(AggregationEventProcessorParameters parameters) {
final Pivot pivot = Pivot.builder().id(STREAMS_PIVOT_ID).rollup(true).rowGroups(ImmutableList.of(Values.builder().limit(Integer.MAX_VALUE).field("streams").build())).series(ImmutableList.of(Count.builder().id(STREAMS_PIVOT_COUNT_ID).build())).build();
final Set<SearchType> searchTypes = Collections.singleton(pivot);
final Query.Builder queryBuilder = Query.builder().id(STREAMS_QUERY_ID).searchTypes(searchTypes).query(ElasticsearchQueryString.of(config.query())).timerange(parameters.timerange());
final Set<String> streams = getStreams(parameters);
if (!streams.isEmpty()) {
queryBuilder.filter(filteringForStreamIds(streams));
}
return queryBuilder.build();
}
use of org.graylog.plugins.views.search.Query in project graylog2-server by Graylog2.
the class SearchMetadataResource method metadataForObject.
@POST
@ApiOperation(value = "Metadata for the posted Search object", notes = "Intended for search objects that aren't yet persisted (e.g. for validation or interactive purposes)")
@NoAuditEvent("Only returning metadata for given search, not changing any data")
public SearchMetadata metadataForObject(@ApiParam @NotNull(message = "Search body is mandatory") SearchDTO searchDTO) {
if (searchDTO == null) {
throw new IllegalArgumentException("Search must not be null.");
}
final Search search = searchDTO.toSearch();
final Map<String, QueryMetadata> queryMetadatas = StreamEx.of(search.queries()).toMap(Query::id, query -> queryEngine.parse(search, query));
return SearchMetadata.create(queryMetadatas, Maps.uniqueIndex(search.parameters(), Parameter::name));
}
Aggregations