use of org.opensearch.index.IndexNotFoundException in project anomaly-detection by opensearch-project.
the class CheckpointReadWorkerTests method testIndexNotFound.
public void testIndexNotFound() {
doAnswer(invocation -> {
MultiGetItemResponse[] items = new MultiGetItemResponse[1];
items[0] = new MultiGetItemResponse(null, new MultiGetResponse.Failure(CommonName.CHECKPOINT_INDEX_NAME, "_doc", entity.getModelId(detectorId).get(), new IndexNotFoundException(CommonName.CHECKPOINT_INDEX_NAME)));
ActionListener<MultiGetResponse> listener = invocation.getArgument(1);
listener.onResponse(new MultiGetResponse(items));
return null;
}).when(checkpoint).batchRead(any(), any());
worker.put(request);
verify(coldstartQueue, times(1)).put(any());
verify(entityCache, never()).hostIfPossible(any(), any());
}
use of org.opensearch.index.IndexNotFoundException in project anomaly-detection by opensearch-project.
the class CheckpointDao method deleteModelCheckpointByDetectorId.
/**
* Delete checkpoints associated with a detector. Used in multi-entity detector.
* @param detectorID Detector Id
*/
public void deleteModelCheckpointByDetectorId(String detectorID) {
// A bulk delete request is performed for each batch of matching documents. If a
// search or bulk request is rejected, the requests are retried up to 10 times,
// with exponential back off. If the maximum retry limit is reached, processing
// halts and all failed requests are returned in the response. Any delete
// requests that completed successfully still stick, they are not rolled back.
DeleteByQueryRequest deleteRequest = new DeleteByQueryRequest(CommonName.CHECKPOINT_INDEX_NAME).setQuery(new MatchQueryBuilder(DETECTOR_ID, detectorID)).setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN).setAbortOnVersionConflict(// when current delete happens, previous might not finish.
false).setRequestsPerSecond(// throttle delete requests
500);
logger.info("Delete checkpoints of detector {}", detectorID);
client.execute(DeleteByQueryAction.INSTANCE, deleteRequest, ActionListener.wrap(response -> {
if (response.isTimedOut() || !response.getBulkFailures().isEmpty() || !response.getSearchFailures().isEmpty()) {
logFailure(response, detectorID);
}
// can return 0 docs get deleted because:
// 1) we cannot find matching docs
// 2) bad stats from OpenSearch. In this case, docs are deleted, but
// OpenSearch says deleted is 0.
logger.info("{} " + DOC_GOT_DELETED_LOG_MSG, response.getDeleted());
}, exception -> {
if (exception instanceof IndexNotFoundException) {
logger.info(INDEX_DELETED_LOG_MSG + " {}", detectorID);
} else {
// Gonna eventually delete in daily cron.
logger.error(NOT_ABLE_TO_DELETE_LOG_MSG, exception);
}
}));
}
use of org.opensearch.index.IndexNotFoundException in project anomaly-detection by opensearch-project.
the class DailyCron method run.
@Override
public void run() {
DeleteByQueryRequest deleteRequest = new DeleteByQueryRequest(CommonName.CHECKPOINT_INDEX_NAME).setQuery(QueryBuilders.boolQuery().filter(QueryBuilders.rangeQuery(CheckpointDao.TIMESTAMP).lte(clock.millis() - checkpointTtl.toMillis()).format(CommonName.EPOCH_MILLIS_FORMAT))).setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN);
clientUtil.execute(DeleteByQueryAction.INSTANCE, deleteRequest, ActionListener.wrap(response -> {
// if 0 docs get deleted, it means our query cannot find any matching doc
LOG.info("{} " + CHECKPOINT_DELETED_MSG, response.getDeleted());
}, exception -> {
if (exception instanceof IndexNotFoundException) {
LOG.info(CHECKPOINT_NOT_EXIST_MSG);
} else {
// Gonna eventually delete in maintenance window.
LOG.error(CANNOT_DELETE_OLD_CHECKPOINT_MSG, exception);
}
}));
}
use of org.opensearch.index.IndexNotFoundException in project anomaly-detection by opensearch-project.
the class EntityProfileRunner method getJob.
private void getJob(String detectorId, Entity entityValue, Set<EntityProfileName> profilesToCollect, AnomalyDetector detector, EntityProfileResponse entityProfileResponse, ActionListener<EntityProfile> listener) {
GetRequest getRequest = new GetRequest(ANOMALY_DETECTOR_JOB_INDEX, detectorId);
client.get(getRequest, ActionListener.wrap(getResponse -> {
if (getResponse != null && getResponse.isExists()) {
try (XContentParser parser = XContentType.JSON.xContent().createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, getResponse.getSourceAsString())) {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
AnomalyDetectorJob job = AnomalyDetectorJob.parse(parser);
int totalResponsesToWait = 0;
if (profilesToCollect.contains(EntityProfileName.INIT_PROGRESS) || profilesToCollect.contains(EntityProfileName.STATE)) {
totalResponsesToWait++;
}
if (profilesToCollect.contains(EntityProfileName.ENTITY_INFO)) {
totalResponsesToWait++;
}
if (profilesToCollect.contains(EntityProfileName.MODELS)) {
totalResponsesToWait++;
}
MultiResponsesDelegateActionListener<EntityProfile> delegateListener = new MultiResponsesDelegateActionListener<EntityProfile>(listener, totalResponsesToWait, CommonErrorMessages.FAIL_FETCH_ERR_MSG + entityValue + " of detector " + detectorId, false);
if (profilesToCollect.contains(EntityProfileName.MODELS)) {
EntityProfile.Builder builder = new EntityProfile.Builder();
if (false == job.isEnabled()) {
delegateListener.onResponse(builder.build());
} else {
delegateListener.onResponse(builder.modelProfile(entityProfileResponse.getModelProfile()).build());
}
}
if (profilesToCollect.contains(EntityProfileName.INIT_PROGRESS) || profilesToCollect.contains(EntityProfileName.STATE)) {
profileStateRelated(entityProfileResponse.getTotalUpdates(), detectorId, entityValue, profilesToCollect, detector, job, delegateListener);
}
if (profilesToCollect.contains(EntityProfileName.ENTITY_INFO)) {
long enabledTimeMs = job.getEnabledTime().toEpochMilli();
SearchRequest lastSampleTimeRequest = createLastSampleTimeRequest(detectorId, enabledTimeMs, entityValue, detector.getResultIndex());
EntityProfile.Builder builder = new EntityProfile.Builder();
Optional<Boolean> isActiveOp = entityProfileResponse.isActive();
if (isActiveOp.isPresent()) {
builder.isActive(isActiveOp.get());
}
builder.lastActiveTimestampMs(entityProfileResponse.getLastActiveMs());
client.search(lastSampleTimeRequest, ActionListener.wrap(searchResponse -> {
Optional<Long> latestSampleTimeMs = ParseUtils.getLatestDataTime(searchResponse);
if (latestSampleTimeMs.isPresent()) {
builder.lastSampleTimestampMs(latestSampleTimeMs.get());
}
delegateListener.onResponse(builder.build());
}, exception -> {
// sth wrong like result index not created. Return what we have
if (exception instanceof IndexNotFoundException) {
// don't print out stack trace since it is not helpful
logger.info("Result index hasn't been created", exception.getMessage());
} else {
logger.warn("fail to get last sample time", exception);
}
delegateListener.onResponse(builder.build());
}));
}
} catch (Exception e) {
logger.error(CommonErrorMessages.FAIL_TO_GET_PROFILE_MSG, e);
listener.onFailure(e);
}
} else {
sendUnknownState(profilesToCollect, entityValue, true, listener);
}
}, exception -> {
if (exception instanceof IndexNotFoundException) {
logger.info(exception.getMessage());
sendUnknownState(profilesToCollect, entityValue, true, listener);
} else {
logger.error(CommonErrorMessages.FAIL_TO_GET_PROFILE_MSG + detectorId, exception);
listener.onFailure(exception);
}
}));
}
use of org.opensearch.index.IndexNotFoundException in project anomaly-detection by opensearch-project.
the class ADTaskManagerTests method testDeleteADTasksWithException.
@SuppressWarnings("unchecked")
public void testDeleteADTasksWithException() {
doAnswer(invocation -> {
ActionListener<BulkByScrollResponse> actionListener = invocation.getArgument(2);
actionListener.onFailure(new IndexNotFoundException(DETECTION_STATE_INDEX));
return null;
}).doAnswer(invocation -> {
ActionListener<BulkByScrollResponse> actionListener = invocation.getArgument(2);
actionListener.onFailure(new RuntimeException("test"));
return null;
}).when(client).execute(any(), any(), any());
String detectorId = randomAlphaOfLength(5);
AnomalyDetectorFunction function = mock(AnomalyDetectorFunction.class);
ActionListener<DeleteResponse> listener = mock(ActionListener.class);
adTaskManager.deleteADTasks(detectorId, function, listener);
verify(function, times(1)).execute();
verify(listener, never()).onFailure(any());
adTaskManager.deleteADTasks(detectorId, function, listener);
verify(function, times(1)).execute();
verify(listener, times(1)).onFailure(any());
}
Aggregations