use of org.opensearch.ad.model.FeatureData in project anomaly-detection by opensearch-project.
the class AnomalyResultResponse method writeTo.
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeDouble(anomalyGrade);
out.writeDouble(confidence);
out.writeDouble(anomalyScore);
out.writeVInt(features.size());
for (FeatureData feature : features) {
feature.writeTo(out);
}
out.writeOptionalString(error);
out.writeOptionalLong(rcfTotalUpdates);
out.writeOptionalLong(detectorIntervalInMinutes);
out.writeOptionalBoolean(isHCDetector);
out.writeOptionalInt(relativeIndex);
// instead.
if (relevantAttribution != null) {
out.writeBoolean(true);
out.writeDoubleArray(relevantAttribution);
} else {
out.writeBoolean(false);
}
if (pastValues != null) {
out.writeBoolean(true);
out.writeDoubleArray(pastValues);
} else {
out.writeBoolean(false);
}
if (expectedValuesList != null) {
out.writeBoolean(true);
int numberofExpectedVals = expectedValuesList.length;
out.writeVInt(expectedValuesList.length);
for (int i = 0; i < numberofExpectedVals; i++) {
out.writeDoubleArray(expectedValuesList[i]);
}
} else {
out.writeBoolean(false);
}
if (likelihoodOfValues != null) {
out.writeBoolean(true);
out.writeDoubleArray(likelihoodOfValues);
} else {
out.writeBoolean(false);
}
out.writeOptionalDouble(threshold);
}
use of org.opensearch.ad.model.FeatureData in project anomaly-detection by opensearch-project.
the class ADBatchTaskRunner method detectAnomaly.
private void detectAnomaly(ADTask adTask, Map<Long, Optional<double[]>> dataPoints, long pieceStartTime, long pieceEndTime, long dataStartTime, long dataEndTime, long interval, Instant executeStartTime, ActionListener<String> internalListener) {
String taskId = adTask.getTaskId();
ThresholdedRandomCutForest trcf = adTaskCacheManager.getTRcfModel(taskId);
Deque<Map.Entry<Long, Optional<double[]>>> shingle = adTaskCacheManager.getShingle(taskId);
List<AnomalyResult> anomalyResults = new ArrayList<>();
long intervalEndTime = pieceStartTime;
for (int i = 0; i < pieceSize && intervalEndTime < dataEndTime; i++) {
Optional<double[]> dataPoint = dataPoints.containsKey(intervalEndTime) ? dataPoints.get(intervalEndTime) : Optional.empty();
intervalEndTime = intervalEndTime + interval;
SinglePointFeatures feature = featureManager.getShingledFeatureForHistoricalAnalysis(adTask.getDetector(), shingle, dataPoint, intervalEndTime);
List<FeatureData> featureData = null;
if (feature.getUnprocessedFeatures().isPresent()) {
featureData = ParseUtils.getFeatureData(feature.getUnprocessedFeatures().get(), adTask.getDetector());
}
if (!feature.getProcessedFeatures().isPresent()) {
String error = feature.getUnprocessedFeatures().isPresent() ? "No full shingle in current detection window" : "No data in current detection window";
AnomalyResult anomalyResult = new AnomalyResult(adTask.getDetectorId(), adTask.getDetectorLevelTaskId(), featureData, Instant.ofEpochMilli(intervalEndTime - interval), Instant.ofEpochMilli(intervalEndTime), executeStartTime, Instant.now(), error, adTask.getEntity(), adTask.getDetector().getUser(), anomalyDetectionIndices.getSchemaVersion(ADIndex.RESULT), adTask.getEntityModelId());
anomalyResults.add(anomalyResult);
} else {
double[] point = feature.getProcessedFeatures().get();
// 0 is placeholder for timestamp. In the future, we will add
// data time stamp there.
AnomalyDescriptor descriptor = trcf.process(point, 0);
double score = descriptor.getRCFScore();
if (!adTaskCacheManager.isThresholdModelTrained(taskId) && score > 0) {
adTaskCacheManager.setThresholdModelTrained(taskId, true);
}
AnomalyResult anomalyResult = AnomalyResult.fromRawTRCFResult(adTask.getDetectorId(), adTask.getDetector().getDetectorIntervalInMilliseconds(), adTask.getDetectorLevelTaskId(), score, descriptor.getAnomalyGrade(), descriptor.getDataConfidence(), featureData, Instant.ofEpochMilli(intervalEndTime - interval), Instant.ofEpochMilli(intervalEndTime), executeStartTime, Instant.now(), null, adTask.getEntity(), adTask.getDetector().getUser(), anomalyDetectionIndices.getSchemaVersion(ADIndex.RESULT), adTask.getEntityModelId(), modelManager.normalizeAttribution(trcf.getForest(), descriptor.getRelevantAttribution()), descriptor.getRelativeIndex(), descriptor.getPastValues(), descriptor.getExpectedValuesList(), descriptor.getLikelihoodOfValues(), descriptor.getThreshold());
anomalyResults.add(anomalyResult);
}
}
String user;
List<String> roles;
if (adTask.getUser() == null) {
// It's possible that user create domain with security disabled, then enable security
// after upgrading. This is for BWC, for old detectors which created when security
// disabled, the user will be null.
user = "";
roles = settings.getAsList("", ImmutableList.of("all_access", "AmazonES_all_access"));
} else {
user = adTask.getUser().getName();
roles = adTask.getUser().getRoles();
}
String resultIndex = adTask.getDetector().getResultIndex();
if (resultIndex == null) {
// if result index is null, store anomaly result directly
storeAnomalyResultAndRunNextPiece(adTask, pieceEndTime, dataStartTime, dataEndTime, interval, internalListener, anomalyResults, resultIndex, null);
return;
}
try (InjectSecurity injectSecurity = new InjectSecurity(adTask.getTaskId(), settings, client.threadPool().getThreadContext())) {
// Injecting user role to verify if the user has permissions to write result to result index.
injectSecurity.inject(user, roles);
storeAnomalyResultAndRunNextPiece(adTask, pieceEndTime, dataStartTime, dataEndTime, interval, internalListener, anomalyResults, resultIndex, () -> injectSecurity.close());
} catch (Exception exception) {
logger.error("Failed to inject user roles", exception);
internalListener.onFailure(exception);
}
}
use of org.opensearch.ad.model.FeatureData in project anomaly-detection by opensearch-project.
the class AnomalyResultResponse method toXContent.
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(ANOMALY_GRADE_JSON_KEY, anomalyGrade);
builder.field(CONFIDENCE_JSON_KEY, confidence);
builder.field(ANOMALY_SCORE_JSON_KEY, anomalyScore);
builder.field(ERROR_JSON_KEY, error);
builder.startArray(FEATURES_JSON_KEY);
for (FeatureData feature : features) {
feature.toXContent(builder, params);
}
builder.endArray();
builder.field(RCF_TOTAL_UPDATES_JSON_KEY, rcfTotalUpdates);
builder.field(DETECTOR_INTERVAL_IN_MINUTES_JSON_KEY, detectorIntervalInMinutes);
builder.field(RELATIVE_INDEX_FIELD_JSON_KEY, relativeIndex);
builder.field(RELEVANT_ATTRIBUTION_FIELD_JSON_KEY, relevantAttribution);
builder.field(PAST_VALUES_FIELD_JSON_KEY, pastValues);
builder.field(EXPECTED_VAL_LIST_FIELD_JSON_KEY, expectedValuesList);
builder.field(LIKELIHOOD_FIELD_JSON_KEY, likelihoodOfValues);
builder.field(THRESHOLD_FIELD_JSON_KEY, threshold);
builder.endObject();
return builder;
}
use of org.opensearch.ad.model.FeatureData in project anomaly-detection by opensearch-project.
the class ParseUtils method getFeatureData.
/**
* Map feature data to its Id and name
* @param currentFeature Feature data
* @param detector Detector Config object
* @return a list of feature data with Id and name
*/
public static List<FeatureData> getFeatureData(double[] currentFeature, AnomalyDetector detector) {
List<String> featureIds = detector.getEnabledFeatureIds();
List<String> featureNames = detector.getEnabledFeatureNames();
int featureLen = featureIds.size();
List<FeatureData> featureData = new ArrayList<>();
for (int i = 0; i < featureLen; i++) {
featureData.add(new FeatureData(featureIds.get(i), featureNames.get(i), currentFeature[i]));
}
return featureData;
}
use of org.opensearch.ad.model.FeatureData in project anomaly-detection by opensearch-project.
the class AnomalyDetectorJobRunner method indexAnomalyResultException.
private void indexAnomalyResultException(AnomalyDetectorJob jobParameter, LockService lockService, LockModel lock, Instant detectionStartTime, Instant executionStartTime, String errorMessage, boolean releaseLock, String taskState) {
String detectorId = jobParameter.getName();
try {
IntervalTimeConfiguration windowDelay = (IntervalTimeConfiguration) jobParameter.getWindowDelay();
Instant dataStartTime = detectionStartTime.minus(windowDelay.getInterval(), windowDelay.getUnit());
Instant dataEndTime = executionStartTime.minus(windowDelay.getInterval(), windowDelay.getUnit());
User user = jobParameter.getUser();
AnomalyResult anomalyResult = new AnomalyResult(detectorId, // no task id
null, new ArrayList<FeatureData>(), dataStartTime, dataEndTime, executionStartTime, Instant.now(), errorMessage, // single-stream detectors have no entity
null, user, anomalyDetectionIndices.getSchemaVersion(ADIndex.RESULT), // no model id
null);
String resultIndex = jobParameter.getResultIndex();
if (resultIndex != null && !anomalyDetectionIndices.doesIndexExist(resultIndex)) {
// Set result index as null, will write exception to default result index.
anomalyResultHandler.index(anomalyResult, detectorId, null);
} else {
anomalyResultHandler.index(anomalyResult, detectorId, resultIndex);
}
updateLatestRealtimeTask(detectorId, taskState, null, null, errorMessage);
} catch (Exception e) {
log.error("Failed to index anomaly result for " + detectorId, e);
} finally {
if (releaseLock) {
releaseLock(jobParameter, lockService, lock);
}
}
}
Aggregations