use of org.opensearch.ad.common.exception.ADValidationException in project anomaly-detection by opensearch-project.
the class AbstractAnomalyDetectorActionHandler method onSearchADNameResponse.
protected void onSearchADNameResponse(SearchResponse response, String detectorId, String name, boolean indexingDryRun) throws IOException {
if (response.getHits().getTotalHits().value > 0) {
String errorMsg = String.format(Locale.ROOT, DUPLICATE_DETECTOR_MSG, name, Arrays.stream(response.getHits().getHits()).map(hit -> hit.getId()).collect(Collectors.toList()));
logger.warn(errorMsg);
listener.onFailure(new ADValidationException(errorMsg, DetectorValidationIssueType.NAME, ValidationAspect.DETECTOR));
} else {
tryIndexingAnomalyDetector(indexingDryRun);
}
}
use of org.opensearch.ad.common.exception.ADValidationException in project anomaly-detection by opensearch-project.
the class AbstractAnomalyDetectorActionHandler method validateTimeField.
protected void validateTimeField(boolean indexingDryRun) {
String givenTimeField = anomalyDetector.getTimeField();
GetFieldMappingsRequest getMappingsRequest = new GetFieldMappingsRequest();
getMappingsRequest.indices(anomalyDetector.getIndices().toArray(new String[0])).fields(givenTimeField);
getMappingsRequest.indicesOptions(IndicesOptions.strictExpand());
// comments explaining fieldMappingResponse parsing can be found inside following method:
// AbstractAnomalyDetectorActionHandler.validateCategoricalField(String, boolean)
ActionListener<GetFieldMappingsResponse> mappingsListener = ActionListener.wrap(getMappingsResponse -> {
boolean foundField = false;
Map<String, Map<String, Map<String, GetFieldMappingsResponse.FieldMappingMetadata>>> mappingsByIndex = getMappingsResponse.mappings();
for (Map<String, Map<String, GetFieldMappingsResponse.FieldMappingMetadata>> mappingsByType : mappingsByIndex.values()) {
for (Map<String, GetFieldMappingsResponse.FieldMappingMetadata> mappingsByField : mappingsByType.values()) {
for (Map.Entry<String, GetFieldMappingsResponse.FieldMappingMetadata> field2Metadata : mappingsByField.entrySet()) {
GetFieldMappingsResponse.FieldMappingMetadata fieldMetadata = field2Metadata.getValue();
if (fieldMetadata != null) {
// sourceAsMap returns sth like {host2={type=keyword}} with host2 being a nested field
Map<String, Object> fieldMap = fieldMetadata.sourceAsMap();
if (fieldMap != null) {
for (Object type : fieldMap.values()) {
if (type instanceof Map) {
foundField = true;
Map<String, Object> metadataMap = (Map<String, Object>) type;
String typeName = (String) metadataMap.get(CommonName.TYPE);
if (!typeName.equals(CommonName.DATE_TYPE)) {
listener.onFailure(new ADValidationException(String.format(Locale.ROOT, CommonErrorMessages.INVALID_TIMESTAMP, givenTimeField), DetectorValidationIssueType.TIMEFIELD_FIELD, ValidationAspect.DETECTOR));
return;
}
}
}
}
}
}
}
}
if (!foundField) {
listener.onFailure(new ADValidationException(String.format(Locale.ROOT, CommonErrorMessages.NON_EXISTENT_TIMESTAMP, givenTimeField), DetectorValidationIssueType.TIMEFIELD_FIELD, ValidationAspect.DETECTOR));
return;
}
prepareAnomalyDetectorIndexing(indexingDryRun);
}, error -> {
String message = String.format(Locale.ROOT, "Fail to get the index mapping of %s", anomalyDetector.getIndices());
logger.error(message, error);
listener.onFailure(new IllegalArgumentException(message));
});
client.execute(GetFieldMappingsAction.INSTANCE, getMappingsRequest, mappingsListener);
}
use of org.opensearch.ad.common.exception.ADValidationException in project anomaly-detection by opensearch-project.
the class ModelValidationActionHandler method processDataFilterResults.
private void processDataFilterResults(SearchResponse response, long latestTime) {
Histogram aggregate = checkBucketResultErrors(response);
if (aggregate == null) {
return;
}
double fullBucketRate = processBucketAggregationResults(aggregate);
if (fullBucketRate < CONFIG_BUCKET_MINIMUM_SUCCESS_RATE) {
listener.onFailure(new ADValidationException(CommonErrorMessages.FILTER_QUERY_TOO_SPARSE, DetectorValidationIssueType.FILTER_QUERY, ValidationAspect.MODEL));
// blocks below are executed if data is dense enough with filter query applied.
// If HCAD then category fields will be added to bucket aggregation to see if they
// are the root cause of the issues and if not the feature queries will be checked for sparsity
} else if (anomalyDetector.isMultientityDetector()) {
getTopEntityForCategoryField(latestTime);
} else {
try {
checkFeatureQueryDelegate(latestTime);
} catch (Exception ex) {
logger.error(ex);
listener.onFailure(ex);
}
}
}
use of org.opensearch.ad.common.exception.ADValidationException in project anomaly-detection by opensearch-project.
the class ModelValidationActionHandler method processRawDataResults.
private void processRawDataResults(SearchResponse response, long latestTime) {
Histogram aggregate = checkBucketResultErrors(response);
if (aggregate == null) {
return;
}
double fullBucketRate = processBucketAggregationResults(aggregate);
if (fullBucketRate < INTERVAL_BUCKET_MINIMUM_SUCCESS_RATE) {
listener.onFailure(new ADValidationException(CommonErrorMessages.RAW_DATA_TOO_SPARSE, DetectorValidationIssueType.INDICES, ValidationAspect.MODEL));
} else {
checkDataFilterSparsity(latestTime);
}
}
use of org.opensearch.ad.common.exception.ADValidationException in project anomaly-detection by opensearch-project.
the class ModelValidationActionHandler method checkFeatureQueryDelegate.
private void checkFeatureQueryDelegate(long latestTime) throws IOException {
ActionListener<MergeableList<double[]>> validateFeatureQueriesListener = ActionListener.wrap(response -> {
windowDelayRecommendation(latestTime);
}, exception -> {
listener.onFailure(new ADValidationException(exception.getMessage(), DetectorValidationIssueType.FEATURE_ATTRIBUTES, ValidationAspect.MODEL));
});
MultiResponsesDelegateActionListener<MergeableList<double[]>> multiFeatureQueriesResponseListener = new MultiResponsesDelegateActionListener<>(validateFeatureQueriesListener, anomalyDetector.getFeatureAttributes().size(), CommonErrorMessages.FEATURE_QUERY_TOO_SPARSE, false);
for (Feature feature : anomalyDetector.getFeatureAttributes()) {
AggregationBuilder aggregation = getBucketAggregation(latestTime, (IntervalTimeConfiguration) anomalyDetector.getDetectionInterval());
BoolQueryBuilder query = QueryBuilders.boolQuery().filter(anomalyDetector.getFilterQuery());
List<String> featureFields = ParseUtils.getFieldNamesForFeature(feature, xContentRegistry);
for (String featureField : featureFields) {
query.filter(QueryBuilders.existsQuery(featureField));
}
SearchSourceBuilder searchSourceBuilder = getSearchSourceBuilder(query, aggregation);
SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().toArray(new String[0])).source(searchSourceBuilder);
client.search(searchRequest, ActionListener.wrap(response -> {
Histogram aggregate = checkBucketResultErrors(response);
if (aggregate == null) {
return;
}
double fullBucketRate = processBucketAggregationResults(aggregate);
if (fullBucketRate < CONFIG_BUCKET_MINIMUM_SUCCESS_RATE) {
multiFeatureQueriesResponseListener.onFailure(new ADValidationException(CommonErrorMessages.FEATURE_QUERY_TOO_SPARSE, DetectorValidationIssueType.FEATURE_ATTRIBUTES, ValidationAspect.MODEL));
} else {
multiFeatureQueriesResponseListener.onResponse(new MergeableList<>(new ArrayList<>(Collections.singletonList(new double[] { fullBucketRate }))));
}
}, e -> {
logger.error(e);
multiFeatureQueriesResponseListener.onFailure(new OpenSearchStatusException(CommonErrorMessages.FEATURE_QUERY_TOO_SPARSE, RestStatus.BAD_REQUEST, e));
}));
}
}
Aggregations