use of org.graylog2.indexer.messages.Messages in project graylog2-server by Graylog2.
the class IndicesAdapterES6 method move.
@Override
public void move(String source, String target, Consumer<IndexMoveResult> resultCallback) {
// TODO: This method should use the Re-index API: https://www.elastic.co/guide/en/elasticsearch/reference/5.3/docs-reindex.html
final String query = SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()).size(350).sort(SortBuilders.fieldSort(FieldSortBuilder.DOC_FIELD_NAME)).toString();
final Search request = new Search.Builder(query).setParameter(Parameters.SCROLL, "10s").addIndex(source).build();
final SearchResult searchResult = JestUtils.execute(jestClient, request, () -> "Couldn't process search query response");
final String scrollId = searchResult.getJsonObject().path("_scroll_id").asText(null);
if (scrollId == null) {
throw new ElasticsearchException("Couldn't find scroll ID in search query response");
}
while (true) {
final SearchScroll scrollRequest = new SearchScroll.Builder(scrollId, "1m").build();
final JestResult scrollResult = JestUtils.execute(jestClient, scrollRequest, () -> "Couldn't process result of scroll query");
final JsonNode scrollHits = scrollResult.getJsonObject().path("hits").path("hits");
// No more hits.
if (scrollHits.size() == 0) {
break;
}
final Bulk.Builder bulkRequestBuilder = new Bulk.Builder();
for (JsonNode jsonElement : scrollHits) {
Optional.ofNullable(jsonElement.path("_source")).map(sourceJson -> objectMapper.<Map<String, Object>>convertValue(sourceJson, TypeReferences.MAP_STRING_OBJECT)).ifPresent(doc -> {
final String id = (String) doc.remove("_id");
if (!Strings.isNullOrEmpty(id)) {
bulkRequestBuilder.addAction(indexingHelper.prepareIndexRequest(target, doc, id));
}
});
}
final BulkResult bulkResult = JestUtils.execute(jestClient, bulkRequestBuilder.build(), () -> "Couldn't bulk index messages into index " + target);
final boolean hasFailedItems = !bulkResult.getFailedItems().isEmpty();
final IndexMoveResult result = IndexMoveResult.create(bulkResult.getItems().size(), bulkResult.getJsonObject().path("took").asLong(), hasFailedItems);
resultCallback.accept(result);
}
}
use of org.graylog2.indexer.messages.Messages in project graylog2-server by Graylog2.
the class AggregationEventProcessor method sourceMessagesForEvent.
@Override
public void sourceMessagesForEvent(Event event, Consumer<List<MessageSummary>> messageConsumer, long limit) throws EventProcessorException {
if (config.series().isEmpty()) {
if (limit <= 0) {
return;
}
final EventOriginContext.ESEventOriginContext esContext = EventOriginContext.parseESContext(event.getOriginContext()).orElseThrow(() -> new EventProcessorException("Failed to parse origin context", false, eventDefinition));
try {
final ResultMessage message;
message = messages.get(esContext.messageId(), esContext.indexName());
messageConsumer.accept(Lists.newArrayList(new MessageSummary(message.getIndex(), message.getMessage())));
} catch (IOException e) {
throw new EventProcessorException("Failed to query origin context message", false, eventDefinition, e);
}
} else {
final AtomicLong msgCount = new AtomicLong(0L);
final MoreSearch.ScrollCallback callback = (messages, continueScrolling) -> {
final List<MessageSummary> summaries = Lists.newArrayList();
for (final ResultMessage resultMessage : messages) {
if (msgCount.incrementAndGet() > limit) {
continueScrolling.set(false);
break;
}
final Message msg = resultMessage.getMessage();
summaries.add(new MessageSummary(resultMessage.getIndex(), msg));
}
messageConsumer.accept(summaries);
};
ElasticsearchQueryString scrollQueryString = ElasticsearchQueryString.of(config.query());
scrollQueryString = scrollQueryString.concatenate(groupByQueryString(event));
LOG.debug("scrollQueryString: {}", scrollQueryString);
final TimeRange timeRange = AbsoluteRange.create(event.getTimerangeStart(), event.getTimerangeEnd());
moreSearch.scrollQuery(scrollQueryString.queryString(), config.streams(), config.queryParameters(), timeRange, Math.min(500, Ints.saturatedCast(limit)), callback);
}
}
use of org.graylog2.indexer.messages.Messages in project graylog2-server by Graylog2.
the class GelfChunkAggregator method checkForCompletion.
/**
* Checks whether the presented gelf message chunk completes the incoming raw message and returns it if it does.
* If the message isn't complete, it adds the chunk to the internal buffer and waits for more incoming messages.
* Outdated chunks are being purged regularly.
*
* @param gelfMessage the gelf message chunk
* @return null or a {@link org.graylog2.plugin.journal.RawMessage raw message} object
*/
@Nullable
private ByteBuf checkForCompletion(GELFMessage gelfMessage) {
if (!chunks.isEmpty() && log.isDebugEnabled()) {
log.debug("Dumping GELF chunk map [chunks for {} messages]:\n{}", chunks.size(), humanReadableChunkMap());
}
// TODO second parameter
final GELFMessageChunk chunk = new GELFMessageChunk(gelfMessage, null);
final int sequenceCount = chunk.getSequenceCount();
final String messageId = chunk.getId();
ChunkEntry entry = new ChunkEntry(sequenceCount, chunk.getArrival(), messageId);
final ChunkEntry existing = chunks.putIfAbsent(messageId, entry);
if (existing == null) {
// add this chunk entry to the eviction set
waitingMessages.inc();
sortedEvictionSet.add(entry);
} else {
// the entry is already in the eviction set and chunk map
entry = existing;
}
final int sequenceNumber = chunk.getSequenceNumber();
if (!entry.payloadArray.compareAndSet(sequenceNumber, null, chunk)) {
log.error("Received duplicate chunk {} for message {} from {}", sequenceNumber, messageId, gelfMessage.getSourceAddress());
duplicateChunks.inc();
return null;
}
final int chunkWatermark = entry.chunkSlotsWritten.incrementAndGet();
if (chunkWatermark > MAX_CHUNKS) {
getAndCleanupEntry(messageId);
throw new IllegalStateException("Maximum number of chunks reached, discarding message");
}
if (chunkWatermark == sequenceCount) {
// message is complete by chunk count, assemble and return it.
// it might still be corrupt etc, but we've seen enough chunks
// remove before operating on it, to avoid racing too much with the clean up job, some race is inevitable, though.
entry = getAndCleanupEntry(messageId);
final byte[][] allChunks = new byte[sequenceCount][];
for (int i = 0; i < entry.payloadArray.length(); i++) {
final GELFMessageChunk messageChunk = entry.payloadArray.get(i);
if (messageChunk == null) {
log.debug("Couldn't read chunk {} of message {}, skipping this chunk.", i, messageId);
} else {
allChunks[i] = messageChunk.getData();
}
}
completeMessages.inc();
return Unpooled.wrappedBuffer(allChunks);
}
// message isn't complete yet, check if we should remove the other parts as well
if (isOutdated(entry)) {
// chunks are outdated, the oldest came in over 5 seconds ago, clean them all up
log.debug("Not all chunks of <{}> arrived within {}ms. Dropping chunks.", messageId, VALIDITY_PERIOD);
expireEntry(messageId);
}
return null;
}
use of org.graylog2.indexer.messages.Messages in project graylog2-server by Graylog2.
the class AbsoluteSearchResource method searchAbsoluteChunked.
@GET
@Timed
@ApiOperation(value = "Message search with absolute timerange.", notes = "Search for messages using an absolute timerange, specified as from/to " + "with format yyyy-MM-ddTHH:mm:ss.SSSZ (e.g. 2014-01-23T15:34:49.000Z) or yyyy-MM-dd HH:mm:ss.")
@Produces(MoreMediaTypes.TEXT_CSV)
@ApiResponses(value = { @ApiResponse(code = 400, message = "Invalid timerange parameters provided.") })
public ChunkedOutput<ScrollResult.ScrollChunk> searchAbsoluteChunked(@ApiParam(name = "query", value = "Query (Lucene syntax)", required = true) @QueryParam("query") @NotEmpty String query, @ApiParam(name = "from", value = "Timerange start. See description for date format", required = true) @QueryParam("from") @NotEmpty String from, @ApiParam(name = "to", value = "Timerange end. See description for date format", required = true) @QueryParam("to") @NotEmpty String to, @ApiParam(name = "limit", value = "Maximum number of messages to return.", required = false) @QueryParam("limit") int limit, @ApiParam(name = "offset", value = "Offset", required = false) @QueryParam("offset") int offset, @ApiParam(name = "batch_size", value = "Batch size for the backend storage export request.", required = false) @QueryParam("batch_size") @DefaultValue(DEFAULT_SCROLL_BATCH_SIZE) int batchSize, @ApiParam(name = "filter", value = "Filter", required = false) @QueryParam("filter") String filter, @ApiParam(name = "fields", value = "Comma separated list of fields to return", required = true) @QueryParam("fields") @NotEmpty String fields) {
checkSearchPermission(filter, RestPermissions.SEARCHES_ABSOLUTE);
final List<String> fieldList = parseFields(fields);
final TimeRange timeRange = buildAbsoluteTimeRange(from, to);
final ScrollResult scroll = searches.scroll(query, timeRange, limit, offset, fieldList, filter, batchSize);
return buildChunkedOutput(scroll);
}
use of org.graylog2.indexer.messages.Messages in project graylog2-server by Graylog2.
the class KeywordSearchResource method searchKeywordChunked.
@GET
@Timed
@ApiOperation(value = "Message search with keyword as timerange.", notes = "Search for messages in a timerange defined by a keyword like \"yesterday\" or \"2 weeks ago to wednesday\".")
@Produces(MoreMediaTypes.TEXT_CSV)
@ApiResponses(value = { @ApiResponse(code = 400, message = "Invalid keyword provided.") })
public ChunkedOutput<ScrollResult.ScrollChunk> searchKeywordChunked(@ApiParam(name = "query", value = "Query (Lucene syntax)", required = true) @QueryParam("query") @NotEmpty String query, @ApiParam(name = "keyword", value = "Range keyword", required = true) @QueryParam("keyword") @NotEmpty String keyword, @QueryParam("timezone") @NotEmpty String timezone, @ApiParam(name = "limit", value = "Maximum number of messages to return.", required = false) @QueryParam("limit") int limit, @ApiParam(name = "offset", value = "Offset", required = false) @QueryParam("offset") int offset, @ApiParam(name = "batch_size", value = "Batch size for the backend storage export request.", required = false) @QueryParam("batch_size") @DefaultValue(DEFAULT_SCROLL_BATCH_SIZE) int batchSize, @ApiParam(name = "filter", value = "Filter", required = false) @QueryParam("filter") String filter, @ApiParam(name = "fields", value = "Comma separated list of fields to return", required = true) @QueryParam("fields") @NotEmpty String fields) {
checkSearchPermission(filter, RestPermissions.SEARCHES_KEYWORD);
final List<String> fieldList = parseFields(fields);
final TimeRange timeRange = buildKeywordTimeRange(keyword, timezone);
final ScrollResult scroll = searches.scroll(query, timeRange, limit, offset, fieldList, filter, batchSize);
return buildChunkedOutput(scroll);
}
Aggregations