use of org.opensearch.ad.model.AnomalyResult in project anomaly-detection by opensearch-project.
the class MultiEntityProfileRunnerTests method setUpClientSearch.
@SuppressWarnings("unchecked")
private void setUpClientSearch(InittedEverResultStatus inittedEverResultStatus) {
doAnswer(invocation -> {
Object[] args = invocation.getArguments();
SearchRequest request = (SearchRequest) args[0];
ActionListener<SearchResponse> listener = (ActionListener<SearchResponse>) args[1];
AnomalyResult result = null;
if (request.source().query().toString().contains(AnomalyResult.ANOMALY_SCORE_FIELD)) {
switch(inittedEverResultStatus) {
case INITTED:
result = TestHelpers.randomAnomalyDetectResult(0.87);
listener.onResponse(TestHelpers.createSearchResponse(result));
break;
case NOT_INITTED:
listener.onResponse(TestHelpers.createEmptySearchResponse());
break;
default:
assertTrue("should not reach here", false);
break;
}
}
return null;
}).when(client).search(any(), any());
}
use of org.opensearch.ad.model.AnomalyResult in project anomaly-detection by opensearch-project.
the class TestHelpers method randomHCADAnomalyDetectResult.
public static AnomalyResult randomHCADAnomalyDetectResult(String detectorId, String taskId, Map<String, Object> entityAttrs, double score, double grade, String error, Long startTimeEpochMillis, Long endTimeEpochMillis) {
List<DataByFeatureId> relavantAttribution = new ArrayList<DataByFeatureId>();
relavantAttribution.add(new DataByFeatureId(randomAlphaOfLength(5), randomDoubleBetween(0, 1.0, true)));
relavantAttribution.add(new DataByFeatureId(randomAlphaOfLength(5), randomDoubleBetween(0, 1.0, true)));
List<DataByFeatureId> pastValues = new ArrayList<DataByFeatureId>();
pastValues.add(new DataByFeatureId(randomAlphaOfLength(5), randomDouble()));
pastValues.add(new DataByFeatureId(randomAlphaOfLength(5), randomDouble()));
List<ExpectedValueList> expectedValuesList = new ArrayList<ExpectedValueList>();
List<DataByFeatureId> expectedValues = new ArrayList<DataByFeatureId>();
expectedValues.add(new DataByFeatureId(randomAlphaOfLength(5), randomDouble()));
expectedValues.add(new DataByFeatureId(randomAlphaOfLength(5), randomDouble()));
expectedValuesList.add(new ExpectedValueList(randomDoubleBetween(0, 1.0, true), expectedValues));
return new AnomalyResult(detectorId == null ? randomAlphaOfLength(5) : detectorId, taskId, score, grade, randomDouble(), ImmutableList.of(randomFeatureData(), randomFeatureData()), startTimeEpochMillis == null ? Instant.now().truncatedTo(ChronoUnit.SECONDS) : Instant.ofEpochMilli(startTimeEpochMillis), endTimeEpochMillis == null ? Instant.now().truncatedTo(ChronoUnit.SECONDS) : Instant.ofEpochMilli(endTimeEpochMillis), startTimeEpochMillis == null ? Instant.now().truncatedTo(ChronoUnit.SECONDS) : Instant.ofEpochMilli(startTimeEpochMillis), endTimeEpochMillis == null ? Instant.now().truncatedTo(ChronoUnit.SECONDS) : Instant.ofEpochMilli(endTimeEpochMillis), error, entityAttrs == null ? Entity.createSingleAttributeEntity(randomAlphaOfLength(5), randomAlphaOfLength(5)) : Entity.createEntityByReordering(entityAttrs), randomUser(), CommonValue.NO_SCHEMA_VERSION, null, Instant.now().truncatedTo(ChronoUnit.SECONDS), relavantAttribution, pastValues, expectedValuesList, randomDoubleBetween(1.1, 10.0, true));
}
use of org.opensearch.ad.model.AnomalyResult in project anomaly-detection by opensearch-project.
the class AnomalyResultBulkIndexHandler method bulkSaveDetectorResult.
private void bulkSaveDetectorResult(String resultIndex, List<AnomalyResult> anomalyResults, ActionListener<BulkResponse> listener) {
BulkRequestBuilder bulkRequestBuilder = client.prepareBulk();
anomalyResults.forEach(anomalyResult -> {
try (XContentBuilder builder = jsonBuilder()) {
IndexRequest indexRequest = new IndexRequest(resultIndex).source(anomalyResult.toXContent(builder, RestHandlerUtils.XCONTENT_WITH_TYPE));
bulkRequestBuilder.add(indexRequest);
} catch (Exception e) {
String error = "Failed to prepare request to bulk index anomaly results";
LOG.error(error, e);
throw new AnomalyDetectionException(error);
}
});
client.bulk(bulkRequestBuilder.request(), ActionListener.wrap(r -> {
if (r.hasFailures()) {
String failureMessage = r.buildFailureMessage();
LOG.warn("Failed to bulk index AD result " + failureMessage);
listener.onFailure(new AnomalyDetectionException(failureMessage));
} else {
listener.onResponse(r);
}
}, e -> {
LOG.error("bulk index ad result failed", e);
listener.onFailure(e);
}));
}
use of org.opensearch.ad.model.AnomalyResult in project anomaly-detection by opensearch-project.
the class ADBatchTaskRunner method storeAnomalyResultAndRunNextPiece.
private void storeAnomalyResultAndRunNextPiece(ADTask adTask, long pieceEndTime, long dataStartTime, long dataEndTime, long interval, ActionListener<String> internalListener, List<AnomalyResult> anomalyResults, String resultIndex, CheckedRunnable<?> runBefore) {
ActionListener actionListener = new ThreadedActionListener<>(logger, threadPool, AD_BATCH_TASK_THREAD_POOL_NAME, ActionListener.wrap(r -> {
try {
runNextPiece(adTask, pieceEndTime, dataStartTime, dataEndTime, interval, internalListener);
} catch (Exception e) {
internalListener.onFailure(e);
}
}, e -> {
logger.error("Fail to bulk index anomaly result", e);
internalListener.onFailure(e);
}), false);
anomalyResultBulkIndexHandler.bulkIndexAnomalyResult(resultIndex, anomalyResults, runBefore == null ? actionListener : ActionListener.runBefore(actionListener, runBefore));
}
use of org.opensearch.ad.model.AnomalyResult in project anomaly-detection by opensearch-project.
the class ADBatchTaskRunner method detectAnomaly.
private void detectAnomaly(ADTask adTask, Map<Long, Optional<double[]>> dataPoints, long pieceStartTime, long pieceEndTime, long dataStartTime, long dataEndTime, long interval, Instant executeStartTime, ActionListener<String> internalListener) {
String taskId = adTask.getTaskId();
ThresholdedRandomCutForest trcf = adTaskCacheManager.getTRcfModel(taskId);
Deque<Map.Entry<Long, Optional<double[]>>> shingle = adTaskCacheManager.getShingle(taskId);
List<AnomalyResult> anomalyResults = new ArrayList<>();
long intervalEndTime = pieceStartTime;
for (int i = 0; i < pieceSize && intervalEndTime < dataEndTime; i++) {
Optional<double[]> dataPoint = dataPoints.containsKey(intervalEndTime) ? dataPoints.get(intervalEndTime) : Optional.empty();
intervalEndTime = intervalEndTime + interval;
SinglePointFeatures feature = featureManager.getShingledFeatureForHistoricalAnalysis(adTask.getDetector(), shingle, dataPoint, intervalEndTime);
List<FeatureData> featureData = null;
if (feature.getUnprocessedFeatures().isPresent()) {
featureData = ParseUtils.getFeatureData(feature.getUnprocessedFeatures().get(), adTask.getDetector());
}
if (!feature.getProcessedFeatures().isPresent()) {
String error = feature.getUnprocessedFeatures().isPresent() ? "No full shingle in current detection window" : "No data in current detection window";
AnomalyResult anomalyResult = new AnomalyResult(adTask.getDetectorId(), adTask.getDetectorLevelTaskId(), featureData, Instant.ofEpochMilli(intervalEndTime - interval), Instant.ofEpochMilli(intervalEndTime), executeStartTime, Instant.now(), error, adTask.getEntity(), adTask.getDetector().getUser(), anomalyDetectionIndices.getSchemaVersion(ADIndex.RESULT), adTask.getEntityModelId());
anomalyResults.add(anomalyResult);
} else {
double[] point = feature.getProcessedFeatures().get();
// 0 is placeholder for timestamp. In the future, we will add
// data time stamp there.
AnomalyDescriptor descriptor = trcf.process(point, 0);
double score = descriptor.getRCFScore();
if (!adTaskCacheManager.isThresholdModelTrained(taskId) && score > 0) {
adTaskCacheManager.setThresholdModelTrained(taskId, true);
}
AnomalyResult anomalyResult = AnomalyResult.fromRawTRCFResult(adTask.getDetectorId(), adTask.getDetector().getDetectorIntervalInMilliseconds(), adTask.getDetectorLevelTaskId(), score, descriptor.getAnomalyGrade(), descriptor.getDataConfidence(), featureData, Instant.ofEpochMilli(intervalEndTime - interval), Instant.ofEpochMilli(intervalEndTime), executeStartTime, Instant.now(), null, adTask.getEntity(), adTask.getDetector().getUser(), anomalyDetectionIndices.getSchemaVersion(ADIndex.RESULT), adTask.getEntityModelId(), modelManager.normalizeAttribution(trcf.getForest(), descriptor.getRelevantAttribution()), descriptor.getRelativeIndex(), descriptor.getPastValues(), descriptor.getExpectedValuesList(), descriptor.getLikelihoodOfValues(), descriptor.getThreshold());
anomalyResults.add(anomalyResult);
}
}
String user;
List<String> roles;
if (adTask.getUser() == null) {
// It's possible that user create domain with security disabled, then enable security
// after upgrading. This is for BWC, for old detectors which created when security
// disabled, the user will be null.
user = "";
roles = settings.getAsList("", ImmutableList.of("all_access", "AmazonES_all_access"));
} else {
user = adTask.getUser().getName();
roles = adTask.getUser().getRoles();
}
String resultIndex = adTask.getDetector().getResultIndex();
if (resultIndex == null) {
// if result index is null, store anomaly result directly
storeAnomalyResultAndRunNextPiece(adTask, pieceEndTime, dataStartTime, dataEndTime, interval, internalListener, anomalyResults, resultIndex, null);
return;
}
try (InjectSecurity injectSecurity = new InjectSecurity(adTask.getTaskId(), settings, client.threadPool().getThreadContext())) {
// Injecting user role to verify if the user has permissions to write result to result index.
injectSecurity.inject(user, roles);
storeAnomalyResultAndRunNextPiece(adTask, pieceEndTime, dataStartTime, dataEndTime, interval, internalListener, anomalyResults, resultIndex, () -> injectSecurity.close());
} catch (Exception exception) {
logger.error("Failed to inject user roles", exception);
internalListener.onFailure(exception);
}
}
Aggregations