use of org.opensearch.index.reindex.ScrollableHitSource.SearchFailure in project OpenSearch by opensearch-project.
the class BulkIndexByScrollResponseTests method testMergeConstructor.
public void testMergeConstructor() {
int mergeCount = between(2, 10);
List<BulkByScrollResponse> responses = new ArrayList<>(mergeCount);
int took = between(1000, 10000);
int tookIndex = between(0, mergeCount - 1);
List<BulkItemResponse.Failure> allBulkFailures = new ArrayList<>();
List<SearchFailure> allSearchFailures = new ArrayList<>();
boolean timedOut = false;
String reasonCancelled = rarely() ? randomAlphaOfLength(5) : null;
for (int i = 0; i < mergeCount; i++) {
// One of the merged responses gets the expected value for took, the others get a smaller value
TimeValue thisTook = timeValueMillis(i == tookIndex ? took : between(0, took));
// The actual status doesn't matter too much - we test merging those elsewhere
String thisReasonCancelled = rarely() ? randomAlphaOfLength(5) : null;
BulkByScrollTask.Status status = new BulkByScrollTask.Status(i, 0, 0, 0, 0, 0, 0, 0, 0, 0, timeValueMillis(0), 0f, thisReasonCancelled, timeValueMillis(0));
List<BulkItemResponse.Failure> bulkFailures = frequently() ? emptyList() : IntStream.range(0, between(1, 3)).mapToObj(j -> new BulkItemResponse.Failure("idx", "id", new Exception())).collect(Collectors.toList());
allBulkFailures.addAll(bulkFailures);
List<SearchFailure> searchFailures = frequently() ? emptyList() : IntStream.range(0, between(1, 3)).mapToObj(j -> new SearchFailure(new Exception())).collect(Collectors.toList());
allSearchFailures.addAll(searchFailures);
boolean thisTimedOut = rarely();
timedOut |= thisTimedOut;
responses.add(new BulkByScrollResponse(thisTook, status, bulkFailures, searchFailures, thisTimedOut));
}
BulkByScrollResponse merged = new BulkByScrollResponse(responses, reasonCancelled);
assertEquals(timeValueMillis(took), merged.getTook());
assertEquals(allBulkFailures, merged.getBulkFailures());
assertEquals(allSearchFailures, merged.getSearchFailures());
assertEquals(timedOut, merged.isTimedOut());
assertEquals(reasonCancelled, merged.getReasonCancelled());
}
use of org.opensearch.index.reindex.ScrollableHitSource.SearchFailure in project OpenSearch by opensearch-project.
the class AsyncBulkByScrollActionTests method testShardFailuresAbortRequest.
/**
* Mimicks shard search failures usually caused by the data node serving the
* scroll request going down.
*/
public void testShardFailuresAbortRequest() throws Exception {
SearchFailure shardFailure = new SearchFailure(new RuntimeException("test"));
ScrollableHitSource.Response scrollResponse = new ScrollableHitSource.Response(false, singletonList(shardFailure), 0, emptyList(), null);
simulateScrollResponse(new DummyAsyncBulkByScrollAction(), System.nanoTime(), 0, scrollResponse);
BulkByScrollResponse response = listener.get();
assertThat(response.getBulkFailures(), empty());
assertThat(response.getSearchFailures(), contains(shardFailure));
assertFalse(response.isTimedOut());
assertNull(response.getReasonCancelled());
assertThat(client.scrollsCleared, contains(scrollId));
}
use of org.opensearch.index.reindex.ScrollableHitSource.SearchFailure in project OpenSearch by opensearch-project.
the class BulkByScrollResponse method parseFailure.
private static Object parseFailure(XContentParser parser) throws IOException {
ensureExpectedToken(Token.START_OBJECT, parser.currentToken(), parser);
Token token;
String index = null;
String id = null;
Integer status = null;
Integer shardId = null;
String nodeId = null;
OpenSearchException bulkExc = null;
OpenSearchException searchExc = null;
while ((token = parser.nextToken()) != Token.END_OBJECT) {
ensureExpectedToken(Token.FIELD_NAME, token, parser);
String name = parser.currentName();
token = parser.nextToken();
if (token == Token.START_ARRAY) {
parser.skipChildren();
} else if (token == Token.START_OBJECT) {
switch(name) {
case SearchFailure.REASON_FIELD:
searchExc = OpenSearchException.fromXContent(parser);
break;
case Failure.CAUSE_FIELD:
bulkExc = OpenSearchException.fromXContent(parser);
break;
default:
parser.skipChildren();
}
} else if (token == Token.VALUE_STRING) {
switch(name) {
// This field is the same as SearchFailure.index
case Failure.INDEX_FIELD:
index = parser.text();
break;
case Failure.ID_FIELD:
id = parser.text();
break;
case SearchFailure.NODE_FIELD:
nodeId = parser.text();
break;
default:
// Do nothing
break;
}
} else if (token == Token.VALUE_NUMBER) {
switch(name) {
case Failure.STATUS_FIELD:
status = parser.intValue();
break;
case SearchFailure.SHARD_FIELD:
shardId = parser.intValue();
break;
default:
// Do nothing
break;
}
}
}
if (bulkExc != null) {
return new Failure(index, id, bulkExc, RestStatus.fromCode(status));
} else if (searchExc != null) {
if (status == null) {
return new SearchFailure(searchExc, index, shardId, nodeId);
} else {
return new SearchFailure(searchExc, index, shardId, nodeId, RestStatus.fromCode(status));
}
} else {
throw new OpenSearchParseException("failed to parse failures array. At least one of {reason,cause} must be present");
}
}
Aggregations