Search in sources :

Example 6 with Entity

use of org.opensearch.ad.model.Entity in project anomaly-detection by opensearch-project.

the class SearchFeatureDao method getColdStartSamplesForPeriods.

public void getColdStartSamplesForPeriods(AnomalyDetector detector, List<Entry<Long, Long>> ranges, Entity entity, boolean includesEmptyBucket, ActionListener<List<Optional<double[]>>> listener) throws IOException {
    SearchRequest request = createColdStartFeatureSearchRequest(detector, ranges, entity);
    client.search(request, ActionListener.wrap(response -> {
        Aggregations aggs = response.getAggregations();
        if (aggs == null) {
            listener.onResponse(Collections.emptyList());
            return;
        }
        long docCountThreshold = includesEmptyBucket ? -1 : 0;
        // Extract buckets and order by from_as_string. Currently by default it is ascending. Better not to assume it.
        // Example responses from date range bucket aggregation:
        // "aggregations":{"date_range":{"buckets":[{"key":"1598865166000-1598865226000","from":1.598865166E12,"
        // from_as_string":"1598865166000","to":1.598865226E12,"to_as_string":"1598865226000","doc_count":3,
        // "deny_max":{"value":154.0}},{"key":"1598869006000-1598869066000","from":1.598869006E12,
        // "from_as_string":"1598869006000","to":1.598869066E12,"to_as_string":"1598869066000","doc_count":3,
        // "deny_max":{"value":141.0}},
        // We don't want to use default 0 for sum/count aggregation as it might cause false positives during scoring.
        // Terms aggregation only returns non-zero count values. If we use a lot of 0s during cold start,
        // we will see alarming very easily.
        listener.onResponse(aggs.asList().stream().filter(InternalDateRange.class::isInstance).flatMap(agg -> ((InternalDateRange) agg).getBuckets().stream()).filter(bucket -> bucket.getFrom() != null && bucket.getFrom() instanceof ZonedDateTime).filter(bucket -> bucket.getDocCount() > docCountThreshold).sorted(Comparator.comparing((Bucket bucket) -> (ZonedDateTime) bucket.getFrom())).map(bucket -> parseBucket(bucket, detector.getEnabledFeatureIds())).collect(Collectors.toList()));
    }, listener::onFailure));
}
Also used : Arrays(java.util.Arrays) Max(org.opensearch.search.aggregations.metrics.Max) Aggregation(org.opensearch.search.aggregations.Aggregation) ZonedDateTime(java.time.ZonedDateTime) AnomalyDetectionException(org.opensearch.ad.common.exception.AnomalyDetectionException) AggregationBuilder(org.opensearch.search.aggregations.AggregationBuilder) FieldSortBuilder(org.opensearch.search.sort.FieldSortBuilder) Locale(java.util.Locale) Map(java.util.Map) ParseUtils.batchFeatureQuery(org.opensearch.ad.util.ParseUtils.batchFeatureQuery) ActionListener(org.opensearch.action.ActionListener) Interpolator(org.opensearch.ad.dataprocessor.Interpolator) Client(org.opensearch.client.Client) Settings(org.opensearch.common.settings.Settings) MultiBucketsAggregation(org.opensearch.search.aggregations.bucket.MultiBucketsAggregation) Terms(org.opensearch.search.aggregations.bucket.terms.Terms) Collectors(java.util.stream.Collectors) List(java.util.List) Logger(org.apache.logging.log4j.Logger) PREVIEW_TIMEOUT_IN_MILLIS(org.opensearch.ad.settings.AnomalyDetectorSettings.PREVIEW_TIMEOUT_IN_MILLIS) SearchSourceBuilder(org.opensearch.search.builder.SearchSourceBuilder) Entry(java.util.Map.Entry) DATE_HISTOGRAM(org.opensearch.ad.constant.CommonName.DATE_HISTOGRAM) Optional(java.util.Optional) Bucket(org.opensearch.search.aggregations.bucket.range.InternalDateRange.Bucket) TermsValuesSourceBuilder(org.opensearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder) BoolQueryBuilder(org.opensearch.index.query.BoolQueryBuilder) InternalDateRange(org.opensearch.search.aggregations.bucket.range.InternalDateRange) MatrixUtils.createRealMatrix(org.apache.commons.math3.linear.MatrixUtils.createRealMatrix) HashMap(java.util.HashMap) Aggregations(org.opensearch.search.aggregations.Aggregations) ArrayList(java.util.ArrayList) SortOrder(org.opensearch.search.sort.SortOrder) PAGE_SIZE(org.opensearch.ad.settings.AnomalyDetectorSettings.PAGE_SIZE) AnomalyDetector(org.opensearch.ad.model.AnomalyDetector) SearchRequest(org.opensearch.action.search.SearchRequest) SearchResponse(org.opensearch.action.search.SearchResponse) SimpleEntry(java.util.AbstractMap.SimpleEntry) MAX_ENTITIES_FOR_PREVIEW(org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_ENTITIES_FOR_PREVIEW) QueryBuilders(org.opensearch.index.query.QueryBuilders) CommonName(org.opensearch.ad.constant.CommonName) ClientUtil(org.opensearch.ad.util.ClientUtil) RangeQueryBuilder(org.opensearch.index.query.RangeQueryBuilder) InternalComposite(org.opensearch.search.aggregations.bucket.composite.InternalComposite) TermQueryBuilder(org.opensearch.index.query.TermQueryBuilder) IOException(java.io.IOException) PipelineAggregatorBuilders(org.opensearch.search.aggregations.PipelineAggregatorBuilders) Min(org.opensearch.search.aggregations.metrics.Min) CompositeAggregation(org.opensearch.search.aggregations.bucket.composite.CompositeAggregation) AggregationBuilders(org.opensearch.search.aggregations.AggregationBuilders) Entity(org.opensearch.ad.model.Entity) NamedXContentRegistry(org.opensearch.common.xcontent.NamedXContentRegistry) ClusterService(org.opensearch.cluster.service.ClusterService) Clock(java.time.Clock) ArrayDeque(java.util.ArrayDeque) Comparator(java.util.Comparator) IntervalTimeConfiguration(org.opensearch.ad.model.IntervalTimeConfiguration) Collections(java.util.Collections) LogManager(org.apache.logging.log4j.LogManager) ParseUtils(org.opensearch.ad.util.ParseUtils) SearchRequest(org.opensearch.action.search.SearchRequest) ZonedDateTime(java.time.ZonedDateTime) Bucket(org.opensearch.search.aggregations.bucket.range.InternalDateRange.Bucket) Aggregations(org.opensearch.search.aggregations.Aggregations) InternalDateRange(org.opensearch.search.aggregations.bucket.range.InternalDateRange)

Example 7 with Entity

use of org.opensearch.ad.model.Entity in project anomaly-detection by opensearch-project.

the class SearchFeatureDao method getHighestCountEntities.

/**
 * Get list of entities with high count in descending order within specified time range
 * @param detector detector config
 * @param startTime start time of time range
 * @param endTime end time of time range
 * @param maxEntitiesSize max top entities
 * @param minimumDocCount minimum doc count for top entities
 * @param pageSize page size when query multi-category HC detector's top entities
 * @param listener listener to return back the entities
 */
public void getHighestCountEntities(AnomalyDetector detector, long startTime, long endTime, int maxEntitiesSize, int minimumDocCount, int pageSize, ActionListener<List<Entity>> listener) {
    if (!detector.isMultientityDetector()) {
        listener.onResponse(null);
        return;
    }
    RangeQueryBuilder rangeQuery = new RangeQueryBuilder(detector.getTimeField()).from(startTime).to(endTime).format("epoch_millis").includeLower(true).includeUpper(false);
    BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery().filter(rangeQuery).filter(detector.getFilterQuery());
    AggregationBuilder bucketAggs = null;
    if (detector.getCategoryField().size() == 1) {
        bucketAggs = AggregationBuilders.terms(AGG_NAME_TOP).size(maxEntitiesSize).field(detector.getCategoryField().get(0));
    } else {
        /*
             * We don't have an efficient solution for terms aggregation on multiple fields.
             * Terms aggregation does not support collecting terms from multiple fields in the same document.
             *  We have to work around the limitation by using a script to retrieve terms from multiple fields.
             *  The workaround disables the global ordinals optimization and thus causes a markedly longer
             *  slowdown. This is because scripting is tugging on memory and has to iterate through
             *  all of the documents at least once to create run-time fields.
             *
             *  We evaluated composite and terms aggregation using a generated data set with one
             *  million entities.  Each entity has two documents. Composite aggregation finishes
             *  around 40 seconds.  Terms aggregation performs differently on different clusters.
             *  On a 3 data node cluster, terms aggregation does not finish running within 2 hours
             *  on a 5 primary shard index. On a 15 data node cluster, terms  aggregation needs 217 seconds
             *  on a 15 primary shard index. On a 30 data node cluster, terms aggregation needs 47 seconds
             *  on a 30 primary shard index.
             *
             * Here we work around the problem using composite aggregation. Composite aggregation cannot
             * give top entities without collecting all aggregated results. Paginated results are returned
             * in the natural order of composite keys. This is fine for Preview API. Preview API needs the
             * top entities to make sure there is enough data for training and showing the results. We
             * can paginate entities and filter out entities that do not have enough docs (e.g., 256 docs).
             * As long as we have collected the desired number of entities (e.g., 5 entities), we can stop
             * pagination.
             *
             * Example composite query:
             * {
             *       "size": 0,
             *       "query": {
             *          "bool": {
             *               "filter": [{
             *                   "range": {
             *                       "@timestamp": {
             *                           "from": 1626118340000,
             *                           "to": 1626294912000,
             *                           "include_lower": true,
             *                           "include_upper": false,
             *                           "format": "epoch_millis",
             *                           "boost": 1.0
             *                       }
             *                   }
             *               }, {
             *                   "match_all": {
             *                       "boost": 1.0
             *                   }
             *               }],
             *               "adjust_pure_negative": true,
             *               "boost": 1.0
             *           }
             *       },
             *       "track_total_hits": -1,
             *       "aggregations": {
             *           "top_agg": {
             *               "composite": {
             *                   "size": 1,
             *                   "sources": [{
             *                       "service": {
             *                           "terms": {
             *                               "field": "service",
             *                               "missing_bucket": false,
             *                               "order": "asc"
             *                           }
             *                       }
             *                   }, {
             *                       "host": {
             *                           "terms": {
             *                               "field": "host",
             *                               "missing_bucket": false,
             *                               "order": "asc"
             *                           }
             *                       }
             *                   }]
             *               },
             *               "aggregations": {
             *                   "bucketSort": {
             *                       "bucket_sort": {
             *                           "sort": [{
             *                               "_count": {
             *                                   "order": "desc"
             *                               }
             *                           }],
             *                           "from": 0,
             *                           "size": 5,
             *                           "gap_policy": "SKIP"
             *                       }
             *                   }
             *               }
             *           }
             *       }
             *   }
             *
             */
        bucketAggs = AggregationBuilders.composite(AGG_NAME_TOP, detector.getCategoryField().stream().map(f -> new TermsValuesSourceBuilder(f).field(f)).collect(Collectors.toList())).size(pageSize).subAggregation(PipelineAggregatorBuilders.bucketSort("bucketSort", Arrays.asList(new FieldSortBuilder("_count").order(SortOrder.DESC))).size(maxEntitiesSize));
    }
    SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(boolQueryBuilder).aggregation(bucketAggs).trackTotalHits(false).size(0);
    SearchRequest searchRequest = new SearchRequest().indices(detector.getIndices().toArray(new String[0])).source(searchSourceBuilder);
    client.search(searchRequest, new TopEntitiesListener(listener, detector, searchSourceBuilder, // TODO: tune timeout for historical analysis based on performance test result
    clock.millis() + previewTimeoutInMilliseconds, maxEntitiesSize, minimumDocCount));
}
Also used : Arrays(java.util.Arrays) Max(org.opensearch.search.aggregations.metrics.Max) Aggregation(org.opensearch.search.aggregations.Aggregation) ZonedDateTime(java.time.ZonedDateTime) AnomalyDetectionException(org.opensearch.ad.common.exception.AnomalyDetectionException) AggregationBuilder(org.opensearch.search.aggregations.AggregationBuilder) FieldSortBuilder(org.opensearch.search.sort.FieldSortBuilder) Locale(java.util.Locale) Map(java.util.Map) ParseUtils.batchFeatureQuery(org.opensearch.ad.util.ParseUtils.batchFeatureQuery) ActionListener(org.opensearch.action.ActionListener) Interpolator(org.opensearch.ad.dataprocessor.Interpolator) Client(org.opensearch.client.Client) Settings(org.opensearch.common.settings.Settings) MultiBucketsAggregation(org.opensearch.search.aggregations.bucket.MultiBucketsAggregation) Terms(org.opensearch.search.aggregations.bucket.terms.Terms) Collectors(java.util.stream.Collectors) List(java.util.List) Logger(org.apache.logging.log4j.Logger) PREVIEW_TIMEOUT_IN_MILLIS(org.opensearch.ad.settings.AnomalyDetectorSettings.PREVIEW_TIMEOUT_IN_MILLIS) SearchSourceBuilder(org.opensearch.search.builder.SearchSourceBuilder) Entry(java.util.Map.Entry) DATE_HISTOGRAM(org.opensearch.ad.constant.CommonName.DATE_HISTOGRAM) Optional(java.util.Optional) Bucket(org.opensearch.search.aggregations.bucket.range.InternalDateRange.Bucket) TermsValuesSourceBuilder(org.opensearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder) BoolQueryBuilder(org.opensearch.index.query.BoolQueryBuilder) InternalDateRange(org.opensearch.search.aggregations.bucket.range.InternalDateRange) MatrixUtils.createRealMatrix(org.apache.commons.math3.linear.MatrixUtils.createRealMatrix) HashMap(java.util.HashMap) Aggregations(org.opensearch.search.aggregations.Aggregations) ArrayList(java.util.ArrayList) SortOrder(org.opensearch.search.sort.SortOrder) PAGE_SIZE(org.opensearch.ad.settings.AnomalyDetectorSettings.PAGE_SIZE) AnomalyDetector(org.opensearch.ad.model.AnomalyDetector) SearchRequest(org.opensearch.action.search.SearchRequest) SearchResponse(org.opensearch.action.search.SearchResponse) SimpleEntry(java.util.AbstractMap.SimpleEntry) MAX_ENTITIES_FOR_PREVIEW(org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_ENTITIES_FOR_PREVIEW) QueryBuilders(org.opensearch.index.query.QueryBuilders) CommonName(org.opensearch.ad.constant.CommonName) ClientUtil(org.opensearch.ad.util.ClientUtil) RangeQueryBuilder(org.opensearch.index.query.RangeQueryBuilder) InternalComposite(org.opensearch.search.aggregations.bucket.composite.InternalComposite) TermQueryBuilder(org.opensearch.index.query.TermQueryBuilder) IOException(java.io.IOException) PipelineAggregatorBuilders(org.opensearch.search.aggregations.PipelineAggregatorBuilders) Min(org.opensearch.search.aggregations.metrics.Min) CompositeAggregation(org.opensearch.search.aggregations.bucket.composite.CompositeAggregation) AggregationBuilders(org.opensearch.search.aggregations.AggregationBuilders) Entity(org.opensearch.ad.model.Entity) NamedXContentRegistry(org.opensearch.common.xcontent.NamedXContentRegistry) ClusterService(org.opensearch.cluster.service.ClusterService) Clock(java.time.Clock) ArrayDeque(java.util.ArrayDeque) Comparator(java.util.Comparator) IntervalTimeConfiguration(org.opensearch.ad.model.IntervalTimeConfiguration) Collections(java.util.Collections) LogManager(org.apache.logging.log4j.LogManager) ParseUtils(org.opensearch.ad.util.ParseUtils) SearchRequest(org.opensearch.action.search.SearchRequest) TermsValuesSourceBuilder(org.opensearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder) AggregationBuilder(org.opensearch.search.aggregations.AggregationBuilder) BoolQueryBuilder(org.opensearch.index.query.BoolQueryBuilder) FieldSortBuilder(org.opensearch.search.sort.FieldSortBuilder) RangeQueryBuilder(org.opensearch.index.query.RangeQueryBuilder) SearchSourceBuilder(org.opensearch.search.builder.SearchSourceBuilder)

Example 8 with Entity

use of org.opensearch.ad.model.Entity in project anomaly-detection by opensearch-project.

the class EntityProfileRunner method validateEntity.

/**
 * Verify if the input entity exists or not in case of typos.
 *
 * If a user deletes the entity after job start, then we will not be able to
 * get this entity in the index. For this case, we will not return a profile
 * for this entity even if it's running on some data node. the entity's model
 * will be deleted by another entity or by maintenance due to long inactivity.
 *
 * @param entity Entity accessor
 * @param categoryFields category fields defined for a detector
 * @param detectorId Detector Id
 * @param profilesToCollect Profile to collect from the input
 * @param detector Detector config accessor
 * @param listener Callback to send responses.
 */
private void validateEntity(Entity entity, List<String> categoryFields, String detectorId, Set<EntityProfileName> profilesToCollect, AnomalyDetector detector, ActionListener<EntityProfile> listener) {
    Map<String, String> attributes = entity.getAttributes();
    if (attributes == null || attributes.size() != categoryFields.size()) {
        listener.onFailure(new IllegalArgumentException(EMPTY_ENTITY_ATTRIBUTES));
        return;
    }
    for (String field : categoryFields) {
        if (false == attributes.containsKey(field)) {
            listener.onFailure(new IllegalArgumentException("Cannot find " + field));
            return;
        }
    }
    BoolQueryBuilder internalFilterQuery = QueryBuilders.boolQuery().filter(detector.getFilterQuery());
    for (TermQueryBuilder term : entity.getTermQueryBuilders()) {
        internalFilterQuery.filter(term);
    }
    SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(internalFilterQuery).size(1);
    SearchRequest searchRequest = new SearchRequest(detector.getIndices().toArray(new String[0]), searchSourceBuilder).preference(Preference.LOCAL.toString());
    client.search(searchRequest, ActionListener.wrap(searchResponse -> {
        try {
            if (searchResponse.getHits().getHits().length == 0) {
                listener.onFailure(new IllegalArgumentException(NO_ENTITY));
                return;
            }
            prepareEntityProfile(listener, detectorId, entity, profilesToCollect, detector, categoryFields.get(0));
        } catch (Exception e) {
            listener.onFailure(new IllegalArgumentException(NO_ENTITY));
            return;
        }
    }, e -> listener.onFailure(new IllegalArgumentException(NO_ENTITY))));
}
Also used : EntityProfileRequest(org.opensearch.ad.transport.EntityProfileRequest) EntityProfileName(org.opensearch.ad.model.EntityProfileName) XContentParser(org.opensearch.common.xcontent.XContentParser) ANOMALY_DETECTOR_JOB_INDEX(org.opensearch.ad.model.AnomalyDetectorJob.ANOMALY_DETECTOR_JOB_INDEX) EntityProfileResponse(org.opensearch.ad.transport.EntityProfileResponse) Map(java.util.Map) AnomalyDetector(org.opensearch.ad.model.AnomalyDetector) SearchRequest(org.opensearch.action.search.SearchRequest) ActionListener(org.opensearch.action.ActionListener) EntityProfile(org.opensearch.ad.model.EntityProfile) NestedQueryBuilder(org.opensearch.index.query.NestedQueryBuilder) QueryBuilders(org.opensearch.index.query.QueryBuilders) Client(org.opensearch.client.Client) CommonName(org.opensearch.ad.constant.CommonName) NumericSetting(org.opensearch.ad.settings.NumericSetting) Preference(org.opensearch.cluster.routing.Preference) InitProgressProfile(org.opensearch.ad.model.InitProgressProfile) IndexNotFoundException(org.opensearch.index.IndexNotFoundException) ANOMALY_DETECTORS_INDEX(org.opensearch.ad.model.AnomalyDetector.ANOMALY_DETECTORS_INDEX) GetRequest(org.opensearch.action.get.GetRequest) LoggingDeprecationHandler(org.opensearch.common.xcontent.LoggingDeprecationHandler) Set(java.util.Set) TermQueryBuilder(org.opensearch.index.query.TermQueryBuilder) XContentParserUtils.ensureExpectedToken(org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken) ScoreMode(org.apache.lucene.search.join.ScoreMode) MultiResponsesDelegateActionListener(org.opensearch.ad.util.MultiResponsesDelegateActionListener) AggregationBuilders(org.opensearch.search.aggregations.AggregationBuilders) List(java.util.List) CommonErrorMessages(org.opensearch.ad.constant.CommonErrorMessages) Logger(org.apache.logging.log4j.Logger) AnomalyResult(org.opensearch.ad.model.AnomalyResult) Entity(org.opensearch.ad.model.Entity) EntityState(org.opensearch.ad.model.EntityState) SearchSourceBuilder(org.opensearch.search.builder.SearchSourceBuilder) AnomalyDetectorJob(org.opensearch.ad.model.AnomalyDetectorJob) NamedXContentRegistry(org.opensearch.common.xcontent.NamedXContentRegistry) Optional(java.util.Optional) XContentType(org.opensearch.common.xcontent.XContentType) IntervalTimeConfiguration(org.opensearch.ad.model.IntervalTimeConfiguration) LogManager(org.apache.logging.log4j.LogManager) EntityProfileAction(org.opensearch.ad.transport.EntityProfileAction) ParseUtils(org.opensearch.ad.util.ParseUtils) BoolQueryBuilder(org.opensearch.index.query.BoolQueryBuilder) SearchRequest(org.opensearch.action.search.SearchRequest) BoolQueryBuilder(org.opensearch.index.query.BoolQueryBuilder) TermQueryBuilder(org.opensearch.index.query.TermQueryBuilder) IndexNotFoundException(org.opensearch.index.IndexNotFoundException) SearchSourceBuilder(org.opensearch.search.builder.SearchSourceBuilder)

Example 9 with Entity

use of org.opensearch.ad.model.Entity in project anomaly-detection by opensearch-project.

the class EntityProfileRunner method prepareEntityProfile.

private void prepareEntityProfile(ActionListener<EntityProfile> listener, String detectorId, Entity entityValue, Set<EntityProfileName> profilesToCollect, AnomalyDetector detector, String categoryField) {
    EntityProfileRequest request = new EntityProfileRequest(detectorId, entityValue, profilesToCollect);
    client.execute(EntityProfileAction.INSTANCE, request, ActionListener.wrap(r -> getJob(detectorId, entityValue, profilesToCollect, detector, r, listener), listener::onFailure));
}
Also used : EntityProfileRequest(org.opensearch.ad.transport.EntityProfileRequest) EntityProfileName(org.opensearch.ad.model.EntityProfileName) XContentParser(org.opensearch.common.xcontent.XContentParser) ANOMALY_DETECTOR_JOB_INDEX(org.opensearch.ad.model.AnomalyDetectorJob.ANOMALY_DETECTOR_JOB_INDEX) EntityProfileResponse(org.opensearch.ad.transport.EntityProfileResponse) Map(java.util.Map) AnomalyDetector(org.opensearch.ad.model.AnomalyDetector) SearchRequest(org.opensearch.action.search.SearchRequest) ActionListener(org.opensearch.action.ActionListener) EntityProfile(org.opensearch.ad.model.EntityProfile) NestedQueryBuilder(org.opensearch.index.query.NestedQueryBuilder) QueryBuilders(org.opensearch.index.query.QueryBuilders) Client(org.opensearch.client.Client) CommonName(org.opensearch.ad.constant.CommonName) NumericSetting(org.opensearch.ad.settings.NumericSetting) Preference(org.opensearch.cluster.routing.Preference) InitProgressProfile(org.opensearch.ad.model.InitProgressProfile) IndexNotFoundException(org.opensearch.index.IndexNotFoundException) ANOMALY_DETECTORS_INDEX(org.opensearch.ad.model.AnomalyDetector.ANOMALY_DETECTORS_INDEX) GetRequest(org.opensearch.action.get.GetRequest) LoggingDeprecationHandler(org.opensearch.common.xcontent.LoggingDeprecationHandler) Set(java.util.Set) TermQueryBuilder(org.opensearch.index.query.TermQueryBuilder) XContentParserUtils.ensureExpectedToken(org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken) ScoreMode(org.apache.lucene.search.join.ScoreMode) MultiResponsesDelegateActionListener(org.opensearch.ad.util.MultiResponsesDelegateActionListener) AggregationBuilders(org.opensearch.search.aggregations.AggregationBuilders) List(java.util.List) CommonErrorMessages(org.opensearch.ad.constant.CommonErrorMessages) Logger(org.apache.logging.log4j.Logger) AnomalyResult(org.opensearch.ad.model.AnomalyResult) Entity(org.opensearch.ad.model.Entity) EntityState(org.opensearch.ad.model.EntityState) SearchSourceBuilder(org.opensearch.search.builder.SearchSourceBuilder) AnomalyDetectorJob(org.opensearch.ad.model.AnomalyDetectorJob) NamedXContentRegistry(org.opensearch.common.xcontent.NamedXContentRegistry) Optional(java.util.Optional) XContentType(org.opensearch.common.xcontent.XContentType) IntervalTimeConfiguration(org.opensearch.ad.model.IntervalTimeConfiguration) LogManager(org.apache.logging.log4j.LogManager) EntityProfileAction(org.opensearch.ad.transport.EntityProfileAction) ParseUtils(org.opensearch.ad.util.ParseUtils) BoolQueryBuilder(org.opensearch.index.query.BoolQueryBuilder) EntityProfileRequest(org.opensearch.ad.transport.EntityProfileRequest)

Example 10 with Entity

use of org.opensearch.ad.model.Entity in project anomaly-detection by opensearch-project.

the class ADTaskManagerTests method testGetLocalADTaskProfilesByDetectorId.

public void testGetLocalADTaskProfilesByDetectorId() {
    doReturn(node1).when(clusterService).localNode();
    when(adTaskCacheManager.isHCTaskRunning(anyString())).thenReturn(true);
    when(adTaskCacheManager.isHCTaskCoordinatingNode(anyString())).thenReturn(true);
    List<String> tasksOfDetector = ImmutableList.of(randomAlphaOfLength(5));
    when(adTaskCacheManager.getTasksOfDetector(anyString())).thenReturn(tasksOfDetector);
    Deque<Map.Entry<Long, Optional<double[]>>> shingle = new LinkedBlockingDeque<>();
    when(adTaskCacheManager.getShingle(anyString())).thenReturn(shingle);
    ThresholdedRandomCutForest trcf = mock(ThresholdedRandomCutForest.class);
    when(adTaskCacheManager.getTRcfModel(anyString())).thenReturn(trcf);
    RandomCutForest rcf = mock(RandomCutForest.class);
    when(trcf.getForest()).thenReturn(rcf);
    when(rcf.getTotalUpdates()).thenReturn(randomLongBetween(100, 1000));
    when(adTaskCacheManager.isThresholdModelTrained(anyString())).thenReturn(true);
    when(adTaskCacheManager.getThresholdModelTrainingDataSize(anyString())).thenReturn(randomIntBetween(100, 1000));
    when(adTaskCacheManager.getModelSize(anyString())).thenReturn(randomLongBetween(100, 1000));
    Entity entity = createSingleAttributeEntity(randomAlphaOfLength(5), randomAlphaOfLength(5));
    when(adTaskCacheManager.getEntity(anyString())).thenReturn(entity);
    String detectorId = randomAlphaOfLength(5);
    ExecutorService executeService = mock(ExecutorService.class);
    when(threadPool.executor(anyString())).thenReturn(executeService);
    doAnswer(invocation -> {
        Runnable runnable = invocation.getArgument(0);
        runnable.run();
        return null;
    }).when(executeService).execute(any());
    ADTaskProfile taskProfile = adTaskManager.getLocalADTaskProfilesByDetectorId(detectorId);
    assertEquals(1, taskProfile.getEntityTaskProfiles().size());
    verify(adTaskCacheManager, times(1)).cleanExpiredHCBatchTaskRunStates();
}
Also used : Entity.createSingleAttributeEntity(org.opensearch.ad.model.Entity.createSingleAttributeEntity) Entity(org.opensearch.ad.model.Entity) LinkedBlockingDeque(java.util.concurrent.LinkedBlockingDeque) ADTaskProfile(org.opensearch.ad.model.ADTaskProfile) RandomCutForest(com.amazon.randomcutforest.RandomCutForest) ThresholdedRandomCutForest(com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest) ExecutorService(java.util.concurrent.ExecutorService) ArgumentMatchers.anyString(org.mockito.ArgumentMatchers.anyString) ThresholdedRandomCutForest(com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest)

Aggregations

Entity (org.opensearch.ad.model.Entity)66 ActionListener (org.opensearch.action.ActionListener)37 ArrayList (java.util.ArrayList)36 List (java.util.List)35 AnomalyDetector (org.opensearch.ad.model.AnomalyDetector)34 Client (org.opensearch.client.Client)31 Optional (java.util.Optional)30 SearchRequest (org.opensearch.action.search.SearchRequest)30 HashMap (java.util.HashMap)29 Map (java.util.Map)28 IOException (java.io.IOException)26 LogManager (org.apache.logging.log4j.LogManager)26 Logger (org.apache.logging.log4j.Logger)26 SearchResponse (org.opensearch.action.search.SearchResponse)25 Settings (org.opensearch.common.settings.Settings)23 Set (java.util.Set)22 GetRequest (org.opensearch.action.get.GetRequest)22 ANOMALY_DETECTORS_INDEX (org.opensearch.ad.model.AnomalyDetector.ANOMALY_DETECTORS_INDEX)22 AnomalyDetectorJob (org.opensearch.ad.model.AnomalyDetectorJob)22 AnomalyDetectionException (org.opensearch.ad.common.exception.AnomalyDetectionException)21