use of org.opensearch.ad.model.AnomalyResult in project anomaly-detection by opensearch-project.
the class AnomalyDetectorRunner method parsePreviewResult.
private List<AnomalyResult> parsePreviewResult(AnomalyDetector detector, Features features, List<ThresholdingResult> results, Entity entity) {
// unprocessedFeatures[][], each row is for one date range.
// For example, unprocessedFeatures[0][2] is for the first time range, the third feature
double[][] unprocessedFeatures = features.getUnprocessedFeatures();
List<Map.Entry<Long, Long>> timeRanges = features.getTimeRanges();
List<Feature> featureAttributes = detector.getFeatureAttributes().stream().filter(Feature::getEnabled).collect(Collectors.toList());
List<AnomalyResult> anomalyResults = new ArrayList<>();
if (timeRanges != null && timeRanges.size() > 0) {
for (int i = 0; i < timeRanges.size(); i++) {
Map.Entry<Long, Long> timeRange = timeRanges.get(i);
List<FeatureData> featureDatas = new ArrayList<>();
int featureSize = featureAttributes.size();
for (int j = 0; j < featureSize; j++) {
double value = unprocessedFeatures[i][j];
Feature feature = featureAttributes.get(j);
FeatureData data = new FeatureData(feature.getId(), feature.getName(), value);
featureDatas.add(data);
}
AnomalyResult result;
if (results != null && results.size() > i) {
ThresholdingResult thresholdingResult = results.get(i);
result = thresholdingResult.toAnomalyResult(detector, Instant.ofEpochMilli(timeRange.getKey()), Instant.ofEpochMilli(timeRange.getValue()), null, null, featureDatas, entity, CommonValue.NO_SCHEMA_VERSION, null, null, null);
} else {
result = new AnomalyResult(detector.getDetectorId(), null, featureDatas, Instant.ofEpochMilli(timeRange.getKey()), Instant.ofEpochMilli(timeRange.getValue()), null, null, null, entity, detector.getUser(), CommonValue.NO_SCHEMA_VERSION, null);
}
anomalyResults.add(result);
}
}
return anomalyResults;
}
use of org.opensearch.ad.model.AnomalyResult in project anomaly-detection by opensearch-project.
the class AnomalyDetectorRunner method executeDetector.
/**
* run anomaly detector and return anomaly result.
*
* @param detector anomaly detector instance
* @param startTime detection period start time
* @param endTime detection period end time
* @param context stored thread context
* @param listener handle anomaly result
* @throws IOException - if a user gives wrong query input when defining a detector
*/
public void executeDetector(AnomalyDetector detector, Instant startTime, Instant endTime, ThreadContext.StoredContext context, ActionListener<List<AnomalyResult>> listener) throws IOException {
context.restore();
List<String> categoryField = detector.getCategoryField();
if (categoryField != null && !categoryField.isEmpty()) {
featureManager.getPreviewEntities(detector, startTime.toEpochMilli(), endTime.toEpochMilli(), ActionListener.wrap(entities -> {
if (entities == null || entities.isEmpty()) {
// TODO return exception like IllegalArgumentException to explain data is not enough for preview
// This also requires front-end change to handle error message correspondingly
// We return empty list for now to avoid breaking front-end
listener.onResponse(Collections.emptyList());
return;
}
ActionListener<EntityAnomalyResult> entityAnomalyResultListener = ActionListener.wrap(entityAnomalyResult -> {
listener.onResponse(entityAnomalyResult.getAnomalyResults());
}, e -> onFailure(e, listener, detector.getDetectorId()));
MultiResponsesDelegateActionListener<EntityAnomalyResult> multiEntitiesResponseListener = new MultiResponsesDelegateActionListener<EntityAnomalyResult>(entityAnomalyResultListener, entities.size(), String.format(Locale.ROOT, "Fail to get preview result for multi entity detector %s", detector.getDetectorId()), true);
for (Entity entity : entities) {
featureManager.getPreviewFeaturesForEntity(detector, entity, startTime.toEpochMilli(), endTime.toEpochMilli(), ActionListener.wrap(features -> {
List<ThresholdingResult> entityResults = modelManager.getPreviewResults(features.getProcessedFeatures(), detector.getShingleSize());
List<AnomalyResult> sampledEntityResults = sample(parsePreviewResult(detector, features, entityResults, entity), maxPreviewResults);
multiEntitiesResponseListener.onResponse(new EntityAnomalyResult(sampledEntityResults));
}, e -> multiEntitiesResponseListener.onFailure(e)));
}
}, e -> onFailure(e, listener, detector.getDetectorId())));
} else {
featureManager.getPreviewFeatures(detector, startTime.toEpochMilli(), endTime.toEpochMilli(), ActionListener.wrap(features -> {
try {
List<ThresholdingResult> results = modelManager.getPreviewResults(features.getProcessedFeatures(), detector.getShingleSize());
listener.onResponse(sample(parsePreviewResult(detector, features, results, null), maxPreviewResults));
} catch (Exception e) {
onFailure(e, listener, detector.getDetectorId());
}
}, e -> onFailure(e, listener, detector.getDetectorId())));
}
}
use of org.opensearch.ad.model.AnomalyResult in project anomaly-detection by opensearch-project.
the class ResultWriteWorker method enqueueRetryRequestIteration.
private void enqueueRetryRequestIteration(List<IndexRequest> requestToRetry, int index) {
if (index >= requestToRetry.size()) {
return;
}
DocWriteRequest<?> currentRequest = requestToRetry.get(index);
Optional<AnomalyResult> resultToRetry = getAnomalyResult(currentRequest);
if (false == resultToRetry.isPresent()) {
enqueueRetryRequestIteration(requestToRetry, index + 1);
return;
}
AnomalyResult result = resultToRetry.get();
String detectorId = result.getDetectorId();
nodeStateManager.getAnomalyDetector(detectorId, onGetDetector(requestToRetry, index, detectorId, result));
}
use of org.opensearch.ad.model.AnomalyResult in project anomaly-detection by opensearch-project.
the class AnomalyDetectionIndices method validateCustomResultIndexAndExecute.
public <T> void validateCustomResultIndexAndExecute(String resultIndex, AnomalyDetectorFunction function, ActionListener<T> listener) {
try {
if (!isValidResultIndexMapping(resultIndex)) {
logger.warn("Can't create detector with custom result index {} as its mapping is invalid", resultIndex);
listener.onFailure(new IllegalArgumentException(CommonErrorMessages.INVALID_RESULT_INDEX_MAPPING + resultIndex));
return;
}
AnomalyResult dummyResult = AnomalyResult.getDummyResult();
IndexRequest indexRequest = new IndexRequest(resultIndex).id(DUMMY_AD_RESULT_ID).source(dummyResult.toXContent(XContentBuilder.builder(XContentType.JSON.xContent()), ToXContent.EMPTY_PARAMS));
// User may have no write permission on custom result index. Talked with security plugin team, seems no easy way to verify
// if user has write permission. So just tried to write and delete a dummy anomaly result to verify.
client.index(indexRequest, ActionListener.wrap(response -> {
logger.debug("Successfully wrote dummy AD result to result index {}", resultIndex);
client.delete(new DeleteRequest(resultIndex).id(DUMMY_AD_RESULT_ID), ActionListener.wrap(deleteResponse -> {
logger.debug("Successfully deleted dummy AD result from result index {}", resultIndex);
function.execute();
}, ex -> {
logger.error("Failed to delete dummy AD result from result index " + resultIndex, ex);
listener.onFailure(ex);
}));
}, exception -> {
logger.error("Failed to write dummy AD result to result index " + resultIndex, exception);
listener.onFailure(exception);
}));
} catch (Exception e) {
logger.error("Failed to create detector with custom result index " + resultIndex, e);
listener.onFailure(e);
}
}
use of org.opensearch.ad.model.AnomalyResult in project anomaly-detection by opensearch-project.
the class AnomalyDetectorRestApiIT method testSearchTopAnomalyResultsWithCustomResultIndex.
public void testSearchTopAnomalyResultsWithCustomResultIndex() throws IOException {
String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT);
String customResultIndexName = CommonName.CUSTOM_RESULT_INDEX_PREFIX + randomAlphaOfLength(5).toLowerCase(Locale.ROOT);
Map<String, String> categoryFieldsAndTypes = new HashMap<String, String>() {
{
put("keyword-field", "keyword");
put("ip-field", "ip");
}
};
String testIndexData = "{\"keyword-field\": \"field-1\", \"ip-field\": \"1.2.3.4\", \"timestamp\": 1}";
TestHelpers.createIndexWithHCADFields(client(), indexName, categoryFieldsAndTypes);
TestHelpers.ingestDataToIndex(client(), indexName, TestHelpers.toHttpEntity(testIndexData));
AnomalyDetector detector = createAnomalyDetector(TestHelpers.randomAnomalyDetectorUsingCategoryFields(randomAlphaOfLength(10), TIME_FIELD, ImmutableList.of(indexName), categoryFieldsAndTypes.keySet().stream().collect(Collectors.toList()), customResultIndexName), true, client());
Map<String, Object> entityAttrs = new HashMap<String, Object>() {
{
put("keyword-field", "field-1");
put("ip-field", "1.2.3.4");
}
};
AnomalyResult anomalyResult = TestHelpers.randomHCADAnomalyDetectResult(detector.getDetectorId(), null, entityAttrs, 0.5, 0.8, null, 5L, 5L);
TestHelpers.ingestDataToIndex(client(), customResultIndexName, TestHelpers.toHttpEntity(anomalyResult));
Response response = searchTopAnomalyResults(detector.getDetectorId(), false, "{\"start_time_ms\":0, \"end_time_ms\":10}", client());
Map<String, Object> responseMap = entityAsMap(response);
@SuppressWarnings("unchecked") List<Map<String, Object>> buckets = (ArrayList<Map<String, Object>>) XContentMapValues.extractValue("buckets", responseMap);
assertEquals(1, buckets.size());
@SuppressWarnings("unchecked") Map<String, String> bucketKey1 = (Map<String, String>) buckets.get(0).get("key");
assertEquals("field-1", bucketKey1.get("keyword-field"));
assertEquals("1.2.3.4", bucketKey1.get("ip-field"));
}
Aggregations