use of org.graylog.shaded.elasticsearch7.org.elasticsearch.ElasticsearchException in project elasticsearch by elastic.
the class JsonProcessorFactoryTests method testCreateWithMissingField.
public void testCreateWithMissingField() throws Exception {
Map<String, Object> config = new HashMap<>();
String processorTag = randomAsciiOfLength(10);
ElasticsearchException exception = expectThrows(ElasticsearchParseException.class, () -> FACTORY.create(null, processorTag, config));
assertThat(exception.getMessage(), equalTo("[field] required property is missing"));
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.ElasticsearchException in project elasticsearch by elastic.
the class KeyValueProcessorFactoryTests method testCreateWithMissingValueSplit.
public void testCreateWithMissingValueSplit() {
KeyValueProcessor.Factory factory = new KeyValueProcessor.Factory();
Map<String, Object> config = new HashMap<>();
config.put("field", "field1");
config.put("field_split", "&");
String processorTag = randomAsciiOfLength(10);
ElasticsearchException exception = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, processorTag, config));
assertThat(exception.getMessage(), equalTo("[value_split] required property is missing"));
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.ElasticsearchException in project elasticsearch by elastic.
the class PercolateQueryBuilder method createMultiDocumentSearcher.
static IndexSearcher createMultiDocumentSearcher(Analyzer analyzer, ParsedDocument doc) {
RAMDirectory ramDirectory = new RAMDirectory();
try (IndexWriter indexWriter = new IndexWriter(ramDirectory, new IndexWriterConfig(analyzer))) {
indexWriter.addDocuments(doc.docs());
indexWriter.commit();
DirectoryReader directoryReader = DirectoryReader.open(ramDirectory);
assert directoryReader.leaves().size() == 1 : "Expected single leaf, but got [" + directoryReader.leaves().size() + "]";
final IndexSearcher slowSearcher = new IndexSearcher(directoryReader) {
@Override
public Weight createNormalizedWeight(Query query, boolean needsScores) throws IOException {
BooleanQuery.Builder bq = new BooleanQuery.Builder();
bq.add(query, BooleanClause.Occur.MUST);
bq.add(Queries.newNestedFilter(), BooleanClause.Occur.MUST_NOT);
return super.createNormalizedWeight(bq.build(), needsScores);
}
};
slowSearcher.setQueryCache(null);
return slowSearcher;
} catch (IOException e) {
throw new ElasticsearchException("Failed to create index for percolator with nested document ", e);
}
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.ElasticsearchException in project elasticsearch by elastic.
the class RestDeleteByQueryAction method buildRequest.
@Override
protected DeleteByQueryRequest buildRequest(RestRequest request) throws IOException {
if (false == request.hasContent()) {
throw new ElasticsearchException("_delete_by_query requires a request body");
}
/*
* Passing the search request through DeleteByQueryRequest first allows
* it to set its own defaults which differ from SearchRequest's
* defaults. Then the parseInternalRequest can override them.
*/
DeleteByQueryRequest internal = new DeleteByQueryRequest(new SearchRequest());
Map<String, Consumer<Object>> consumers = new HashMap<>();
consumers.put("conflicts", o -> internal.setConflicts((String) o));
parseInternalRequest(internal, request, consumers);
return internal;
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.ElasticsearchException in project elasticsearch by elastic.
the class RemoteScrollableHitSource method execute.
private <T> void execute(String method, String uri, Map<String, String> params, HttpEntity entity, BiFunction<XContentParser, XContentType, T> parser, Consumer<? super T> listener) {
// Preserve the thread context so headers survive after the call
java.util.function.Supplier<ThreadContext.StoredContext> contextSupplier = threadPool.getThreadContext().newRestorableContext(true);
class RetryHelper extends AbstractRunnable {
private final Iterator<TimeValue> retries = backoffPolicy.iterator();
@Override
protected void doRun() throws Exception {
client.performRequestAsync(method, uri, params, entity, new ResponseListener() {
@Override
public void onSuccess(org.elasticsearch.client.Response response) {
// Restore the thread context to get the precious headers
try (ThreadContext.StoredContext ctx = contextSupplier.get()) {
// eliminates compiler warning
assert ctx != null;
T parsedResponse;
try {
HttpEntity responseEntity = response.getEntity();
InputStream content = responseEntity.getContent();
XContentType xContentType = null;
if (responseEntity.getContentType() != null) {
final String mimeType = ContentType.parse(responseEntity.getContentType().getValue()).getMimeType();
xContentType = XContentType.fromMediaType(mimeType);
}
if (xContentType == null) {
try {
throw new ElasticsearchException("Response didn't include Content-Type: " + bodyMessage(response.getEntity()));
} catch (IOException e) {
ElasticsearchException ee = new ElasticsearchException("Error extracting body from response");
ee.addSuppressed(e);
throw ee;
}
}
// EMPTY is safe here because we don't call namedObject
try (XContentParser xContentParser = xContentType.xContent().createParser(NamedXContentRegistry.EMPTY, content)) {
parsedResponse = parser.apply(xContentParser, xContentType);
} catch (ParsingException e) {
/* Because we're streaming the response we can't get a copy of it here. The best we can do is hint that it
* is totally wrong and we're probably not talking to Elasticsearch. */
throw new ElasticsearchException("Error parsing the response, remote is likely not an Elasticsearch instance", e);
}
} catch (IOException e) {
throw new ElasticsearchException("Error deserializing response, remote is likely not an Elasticsearch instance", e);
}
listener.accept(parsedResponse);
}
}
@Override
public void onFailure(Exception e) {
try (ThreadContext.StoredContext ctx = contextSupplier.get()) {
// eliminates compiler warning
assert ctx != null;
if (e instanceof ResponseException) {
ResponseException re = (ResponseException) e;
if (RestStatus.TOO_MANY_REQUESTS.getStatus() == re.getResponse().getStatusLine().getStatusCode()) {
if (retries.hasNext()) {
TimeValue delay = retries.next();
logger.trace((Supplier<?>) () -> new ParameterizedMessage("retrying rejected search after [{}]", delay), e);
countSearchRetry.run();
threadPool.schedule(delay, ThreadPool.Names.SAME, RetryHelper.this);
return;
}
}
e = wrapExceptionToPreserveStatus(re.getResponse().getStatusLine().getStatusCode(), re.getResponse().getEntity(), re);
} else if (e instanceof ContentTooLongException) {
e = new IllegalArgumentException("Remote responded with a chunk that was too large. Use a smaller batch size.", e);
}
fail.accept(e);
}
}
});
}
@Override
public void onFailure(Exception t) {
fail.accept(t);
}
}
new RetryHelper().run();
}
Aggregations