use of org.opensearch.ad.model.AnomalyDetector in project anomaly-detection by opensearch-project.
the class CheckpointReadWorker method onGetDetector.
private ActionListener<Optional<AnomalyDetector>> onGetDetector(EntityFeatureRequest origRequest, int index, String detectorId, List<EntityFeatureRequest> toProcess, Map<String, MultiGetItemResponse> successfulRequests, Set<String> retryableRequests, Optional<Entry<EntityModel, Instant>> checkpoint, Entity entity, String modelId) {
return ActionListener.wrap(detectorOptional -> {
if (false == detectorOptional.isPresent()) {
LOG.warn(new ParameterizedMessage("AnomalyDetector [{}] is not available.", detectorId));
processCheckpointIteration(index + 1, toProcess, successfulRequests, retryableRequests);
return;
}
AnomalyDetector detector = detectorOptional.get();
ModelState<EntityModel> modelState = modelManager.processEntityCheckpoint(checkpoint, entity, modelId, detectorId, detector.getShingleSize());
EntityModel entityModel = modelState.getModel();
ThresholdingResult result = null;
if (entityModel.getTrcf().isPresent()) {
result = modelManager.score(origRequest.getCurrentFeature(), modelId, modelState);
} else {
entityModel.addSample(origRequest.getCurrentFeature());
}
if (result != null && result.getRcfScore() > 0) {
AnomalyResult resultToSave = result.toAnomalyResult(detector, Instant.ofEpochMilli(origRequest.getDataStartTimeMillis()), Instant.ofEpochMilli(origRequest.getDataStartTimeMillis() + detector.getDetectorIntervalInMilliseconds()), Instant.now(), Instant.now(), ParseUtils.getFeatureData(origRequest.getCurrentFeature(), detector), entity, indexUtil.getSchemaVersion(ADIndex.RESULT), modelId, null, null);
resultWriteQueue.put(new ResultWriteRequest(origRequest.getExpirationEpochMs(), detectorId, result.getGrade() > 0 ? RequestPriority.HIGH : RequestPriority.MEDIUM, resultToSave, detector.getResultIndex()));
}
// try to load to cache
boolean loaded = cacheProvider.get().hostIfPossible(detector, modelState);
if (false == loaded) {
// not in memory. Maybe cold entities or some other entities
// have filled the slot while waiting for loading checkpoints.
checkpointWriteQueue.write(modelState, true, RequestPriority.LOW);
}
processCheckpointIteration(index + 1, toProcess, successfulRequests, retryableRequests);
}, exception -> {
LOG.error(new ParameterizedMessage("fail to get checkpoint [{}]", modelId, exception));
nodeStateManager.setException(detectorId, exception);
processCheckpointIteration(index + 1, toProcess, successfulRequests, retryableRequests);
});
}
use of org.opensearch.ad.model.AnomalyDetector in project anomaly-detection by opensearch-project.
the class CheckpointWriteWorker method writeAll.
public void writeAll(List<ModelState<EntityModel>> modelStates, String detectorId, boolean forceWrite, RequestPriority priority) {
ActionListener<Optional<AnomalyDetector>> onGetForAll = ActionListener.wrap(detectorOptional -> {
if (false == detectorOptional.isPresent()) {
LOG.warn(new ParameterizedMessage("AnomalyDetector [{}] is not available.", detectorId));
return;
}
AnomalyDetector detector = detectorOptional.get();
try {
List<CheckpointWriteRequest> allRequests = new ArrayList<>();
for (ModelState<EntityModel> state : modelStates) {
Instant instant = state.getLastCheckpointTime();
if (!shouldSave(instant, forceWrite)) {
continue;
}
Map<String, Object> source = checkpoint.toIndexSource(state);
String modelId = state.getModelId();
// the model state is bloated or empty (empty samples and models), skip
if (source == null || source.isEmpty() || Strings.isEmpty(modelId)) {
continue;
}
state.setLastCheckpointTime(clock.instant());
allRequests.add(new CheckpointWriteRequest(System.currentTimeMillis() + detector.getDetectorIntervalInMilliseconds(), detectorId, priority, // If the document exists, update fields in the map
new UpdateRequest(indexName, modelId).docAsUpsert(true).doc(source)));
}
putAll(allRequests);
} catch (Exception e) {
// Example exception:
// ConcurrentModificationException when calling toCheckpoint
// and updating rcf model at the same time. To prevent this,
// we need to have a deep copy of models or have a lock. Both
// options are costly.
// As we are gonna retry serializing either when the entity is
// evicted out of cache or during the next maintenance period,
// don't do anything when the exception happens.
LOG.info(new ParameterizedMessage("Exception while serializing models for [{}]", detectorId), e);
}
}, exception -> {
LOG.error(new ParameterizedMessage("fail to get detector [{}]", detectorId), exception);
});
nodeStateManager.getAnomalyDetector(detectorId, onGetForAll);
}
use of org.opensearch.ad.model.AnomalyDetector in project anomaly-detection by opensearch-project.
the class IndexAnomalyDetectorTransportActionTests method setUp.
@SuppressWarnings("unchecked")
@Override
@Before
public void setUp() throws Exception {
super.setUp();
clusterService = mock(ClusterService.class);
clusterSettings = new ClusterSettings(Settings.EMPTY, Collections.unmodifiableSet(new HashSet<>(Arrays.asList(AnomalyDetectorSettings.FILTER_BY_BACKEND_ROLES))));
when(clusterService.getClusterSettings()).thenReturn(clusterSettings);
ClusterName clusterName = new ClusterName("test");
Settings indexSettings = Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build();
final Settings.Builder existingSettings = Settings.builder().put(indexSettings).put(IndexMetadata.SETTING_INDEX_UUID, "test2UUID");
IndexMetadata indexMetaData = IndexMetadata.builder(AnomalyDetector.ANOMALY_DETECTORS_INDEX).settings(existingSettings).build();
final ImmutableOpenMap<String, IndexMetadata> indices = ImmutableOpenMap.<String, IndexMetadata>builder().fPut(AnomalyDetector.ANOMALY_DETECTORS_INDEX, indexMetaData).build();
ClusterState clusterState = ClusterState.builder(clusterName).metadata(Metadata.builder().indices(indices).build()).build();
when(clusterService.state()).thenReturn(clusterState);
adTaskManager = mock(ADTaskManager.class);
searchFeatureDao = mock(SearchFeatureDao.class);
action = new IndexAnomalyDetectorTransportAction(mock(TransportService.class), mock(ActionFilters.class), client(), clusterService, indexSettings(), mock(AnomalyDetectionIndices.class), xContentRegistry(), adTaskManager, searchFeatureDao);
task = mock(Task.class);
AnomalyDetector detector = TestHelpers.randomAnomalyDetector(ImmutableMap.of("testKey", "testValue"), Instant.now());
GetResponse getDetectorResponse = TestHelpers.createGetResponse(detector, detector.getDetectorId(), AnomalyDetector.ANOMALY_DETECTORS_INDEX);
doAnswer(invocation -> {
Object[] args = invocation.getArguments();
assertTrue(String.format("The size of args is %d. Its content is %s", args.length, Arrays.toString(args)), args.length == 2);
assertTrue(args[0] instanceof GetRequest);
assertTrue(args[1] instanceof ActionListener);
ActionListener<GetResponse> listener = (ActionListener<GetResponse>) args[1];
listener.onResponse(getDetectorResponse);
return null;
}).when(client).get(any(GetRequest.class), any());
SearchHits hits = new SearchHits(new SearchHit[] {}, null, Float.NaN);
SearchResponseSections searchSections = new SearchResponseSections(hits, null, null, false, false, null, 1);
SearchResponse searchResponse = new SearchResponse(searchSections, null, 1, 1, 0, 30, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY);
doAnswer(invocation -> {
Object[] args = invocation.getArguments();
assertTrue(String.format("The size of args is %d. Its content is %s", args.length, Arrays.toString(args)), args.length == 2);
assertTrue(args[0] instanceof SearchRequest);
assertTrue(args[1] instanceof ActionListener);
ActionListener<SearchResponse> listener = (ActionListener<SearchResponse>) args[1];
listener.onResponse(searchResponse);
return null;
}).when(client).search(any(SearchRequest.class), any());
request = new IndexAnomalyDetectorRequest("1234", 4567, 7890, WriteRequest.RefreshPolicy.IMMEDIATE, detector, RestRequest.Method.PUT, TimeValue.timeValueSeconds(60), 1000, 10, 5);
response = new ActionListener<IndexAnomalyDetectorResponse>() {
@Override
public void onResponse(IndexAnomalyDetectorResponse indexResponse) {
// onResponse will not be called as we do not have the AD index
Assert.assertTrue(false);
}
@Override
public void onFailure(Exception e) {
Assert.assertTrue(true);
}
};
}
use of org.opensearch.ad.model.AnomalyDetector in project anomaly-detection by opensearch-project.
the class PreviewAnomalyDetectorActionTests method testPreviewResponse.
@Test
public void testPreviewResponse() throws Exception {
BytesStreamOutput out = new BytesStreamOutput();
AnomalyDetector detector = TestHelpers.randomAnomalyDetector(ImmutableMap.of("testKey", "testValue"), Instant.now());
AnomalyResult result = TestHelpers.randomHCADAnomalyDetectResult(0.8d, 0d);
PreviewAnomalyDetectorResponse response = new PreviewAnomalyDetectorResponse(ImmutableList.of(result), detector);
response.writeTo(out);
NamedWriteableAwareStreamInput input = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), writableRegistry());
PreviewAnomalyDetectorResponse newResponse = new PreviewAnomalyDetectorResponse(input);
Assert.assertNotNull(newResponse.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS));
}
use of org.opensearch.ad.model.AnomalyDetector in project anomaly-detection by opensearch-project.
the class PreviewAnomalyDetectorActionTests method testPreviewRequest.
@Test
public void testPreviewRequest() throws Exception {
BytesStreamOutput out = new BytesStreamOutput();
AnomalyDetector detector = TestHelpers.randomAnomalyDetector(ImmutableMap.of("testKey", "testValue"), Instant.now());
PreviewAnomalyDetectorRequest request = new PreviewAnomalyDetectorRequest(detector, "1234", Instant.now().minusSeconds(60), Instant.now());
request.writeTo(out);
NamedWriteableAwareStreamInput input = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), writableRegistry());
PreviewAnomalyDetectorRequest newRequest = new PreviewAnomalyDetectorRequest(input);
Assert.assertEquals(request.getDetectorId(), newRequest.getDetectorId());
Assert.assertEquals(request.getStartTime(), newRequest.getStartTime());
Assert.assertEquals(request.getEndTime(), newRequest.getEndTime());
Assert.assertNotNull(newRequest.getDetector());
Assert.assertNull(newRequest.validate());
}
Aggregations