use of org.opensearch.ad.model.Entity in project anomaly-detection by opensearch-project.
the class NoPowermockSearchFeatureDaoTests method testGetHighestCountEntitiesExhaustedPages.
@SuppressWarnings("unchecked")
public void testGetHighestCountEntitiesExhaustedPages() throws InterruptedException {
SearchResponse response1 = createPageResponse(attrs1);
CompositeAggregation emptyComposite = mock(CompositeAggregation.class);
when(emptyComposite.getName()).thenReturn(SearchFeatureDao.AGG_NAME_TOP);
when(emptyComposite.afterKey()).thenReturn(null);
// empty bucket
when(emptyComposite.getBuckets()).thenAnswer((Answer<List<CompositeAggregation.Bucket>>) invocation -> {
return new ArrayList<CompositeAggregation.Bucket>();
});
Aggregations emptyAggs = new Aggregations(Collections.singletonList(emptyComposite));
SearchResponseSections emptySections = new SearchResponseSections(SearchHits.empty(), emptyAggs, null, false, null, null, 1);
SearchResponse emptyResponse = new SearchResponse(emptySections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, Clusters.EMPTY);
CountDownLatch inProgress = new CountDownLatch(2);
doAnswer(invocation -> {
ActionListener<SearchResponse> listener = invocation.getArgument(1);
inProgress.countDown();
if (inProgress.getCount() == 1) {
listener.onResponse(response1);
} else {
listener.onResponse(emptyResponse);
}
return null;
}).when(client).search(any(), any());
ActionListener<List<Entity>> listener = mock(ActionListener.class);
searchFeatureDao = new SearchFeatureDao(client, xContentRegistry(), interpolator, clientUtil, settings, clusterService, AnomalyDetectorSettings.NUM_SAMPLES_PER_TREE, clock, 2, 1, 60_000L);
searchFeatureDao.getHighestCountEntities(detector, 10L, 20L, listener);
ArgumentCaptor<List<Entity>> captor = ArgumentCaptor.forClass(List.class);
verify(listener).onResponse(captor.capture());
List<Entity> result = captor.getValue();
assertEquals(1, result.size());
assertEquals(Entity.createEntityByReordering(attrs1), result.get(0));
// both counts are used in client.search
assertTrue(inProgress.await(10000L, TimeUnit.MILLISECONDS));
}
use of org.opensearch.ad.model.Entity in project anomaly-detection by opensearch-project.
the class NoPowermockSearchFeatureDaoTests method testGetHighestCountEntitiesUsingTermsAgg.
@SuppressWarnings("unchecked")
@Test
public void testGetHighestCountEntitiesUsingTermsAgg() {
SearchHits hits = new SearchHits(new SearchHit[] {}, null, Float.NaN);
String entity1Name = "value1";
long entity1Count = 3;
StringTerms.Bucket entity1Bucket = new StringTerms.Bucket(new BytesRef(entity1Name.getBytes(StandardCharsets.UTF_8), 0, entity1Name.getBytes(StandardCharsets.UTF_8).length), entity1Count, null, false, 0L, DocValueFormat.RAW);
String entity2Name = "value2";
long entity2Count = 1;
StringTerms.Bucket entity2Bucket = new StringTerms.Bucket(new BytesRef(entity2Name.getBytes(StandardCharsets.UTF_8), 0, entity2Name.getBytes(StandardCharsets.UTF_8).length), entity2Count, null, false, 0, DocValueFormat.RAW);
List<StringTerms.Bucket> stringBuckets = ImmutableList.of(entity1Bucket, entity2Bucket);
StringTerms termsAgg = new StringTerms(// "term_agg",
SearchFeatureDao.AGG_NAME_TOP, InternalOrder.key(false), BucketOrder.count(false), 1, 0, Collections.emptyMap(), DocValueFormat.RAW, 1, false, 0, stringBuckets, 0);
InternalAggregations internalAggregations = InternalAggregations.from(Collections.singletonList(termsAgg));
SearchResponseSections searchSections = new SearchResponseSections(hits, internalAggregations, null, false, false, null, 1);
SearchResponse searchResponse = new SearchResponse(searchSections, null, 1, 1, 0, 30, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY);
doAnswer(invocation -> {
SearchRequest request = invocation.getArgument(0);
assertEquals(1, request.indices().length);
assertTrue(detector.getIndices().contains(request.indices()[0]));
AggregatorFactories.Builder aggs = request.source().aggregations();
assertEquals(1, aggs.count());
Collection<AggregationBuilder> factory = aggs.getAggregatorFactories();
assertTrue(!factory.isEmpty());
assertThat(factory.iterator().next(), instanceOf(TermsAggregationBuilder.class));
ActionListener<SearchResponse> listener = invocation.getArgument(1);
listener.onResponse(searchResponse);
return null;
}).when(client).search(any(SearchRequest.class), any(ActionListener.class));
String categoryField = "fieldName";
when(detector.getCategoryField()).thenReturn(Collections.singletonList(categoryField));
ActionListener<List<Entity>> listener = mock(ActionListener.class);
searchFeatureDao.getHighestCountEntities(detector, 10L, 20L, listener);
ArgumentCaptor<List<Entity>> captor = ArgumentCaptor.forClass(List.class);
verify(listener).onResponse(captor.capture());
List<Entity> result = captor.getValue();
assertEquals(2, result.size());
assertEquals(Entity.createSingleAttributeEntity(categoryField, entity1Name), result.get(0));
assertEquals(Entity.createSingleAttributeEntity(categoryField, entity2Name), result.get(1));
}
use of org.opensearch.ad.model.Entity in project anomaly-detection by opensearch-project.
the class NoPowermockSearchFeatureDaoTests method testGetHighestCountEntitiesNotEnoughTime.
@SuppressWarnings("unchecked")
public void testGetHighestCountEntitiesNotEnoughTime() throws InterruptedException {
SearchResponse response1 = createPageResponse(attrs1);
SearchResponse response2 = createPageResponse(attrs2);
CountDownLatch inProgress = new CountDownLatch(2);
doAnswer(invocation -> {
ActionListener<SearchResponse> listener = invocation.getArgument(1);
inProgress.countDown();
if (inProgress.getCount() == 1) {
listener.onResponse(response1);
} else {
listener.onResponse(response2);
}
return null;
}).when(client).search(any(), any());
ActionListener<List<Entity>> listener = mock(ActionListener.class);
long timeoutMillis = 60_000L;
searchFeatureDao = new SearchFeatureDao(client, xContentRegistry(), interpolator, clientUtil, settings, clusterService, AnomalyDetectorSettings.NUM_SAMPLES_PER_TREE, clock, 2, 1, timeoutMillis);
CountDownLatch clockInvoked = new CountDownLatch(2);
when(clock.millis()).thenAnswer(new Answer<Long>() {
@Override
public Long answer(InvocationOnMock invocation) throws Throwable {
clockInvoked.countDown();
if (clockInvoked.getCount() == 1) {
return 1L;
} else {
return 2L + timeoutMillis;
}
}
});
searchFeatureDao.getHighestCountEntities(detector, 10L, 20L, listener);
ArgumentCaptor<List<Entity>> captor = ArgumentCaptor.forClass(List.class);
verify(listener).onResponse(captor.capture());
List<Entity> result = captor.getValue();
assertEquals(1, result.size());
assertEquals(Entity.createEntityByReordering(attrs1), result.get(0));
// exited early due to timeout
assertEquals(1, inProgress.getCount());
// first called to create expired time; second called to check if time has expired
assertTrue(clockInvoked.await(10000L, TimeUnit.MILLISECONDS));
}
use of org.opensearch.ad.model.Entity in project anomaly-detection by opensearch-project.
the class NoPowermockSearchFeatureDaoTests method testGetHighestCountEntitiesUsingPagination.
@SuppressWarnings("unchecked")
public void testGetHighestCountEntitiesUsingPagination() {
SearchResponse response1 = createPageResponse(attrs1);
CountDownLatch inProgress = new CountDownLatch(1);
doAnswer(invocation -> {
ActionListener<SearchResponse> listener = invocation.getArgument(1);
inProgress.countDown();
listener.onResponse(response1);
return null;
}).when(client).search(any(), any());
ActionListener<List<Entity>> listener = mock(ActionListener.class);
searchFeatureDao.getHighestCountEntities(detector, 10L, 20L, listener);
ArgumentCaptor<List<Entity>> captor = ArgumentCaptor.forClass(List.class);
verify(listener).onResponse(captor.capture());
List<Entity> result = captor.getValue();
assertEquals(1, result.size());
assertEquals(Entity.createEntityByReordering(attrs1), result.get(0));
}
use of org.opensearch.ad.model.Entity in project anomaly-detection by opensearch-project.
the class PriorityCacheTests method replaceInOtherCacheSetUp.
private void replaceInOtherCacheSetUp() {
Entity entity5 = Entity.createSingleAttributeEntity("attributeName1", "attributeVal5");
Entity entity6 = Entity.createSingleAttributeEntity("attributeName1", "attributeVal6");
ModelState<EntityModel> modelState5 = new ModelState<>(new EntityModel(entity5, new ArrayDeque<>(), null), entity5.getModelId(detectorId2).get(), detectorId2, ModelType.ENTITY.getName(), clock, 0);
ModelState<EntityModel> modelState6 = new ModelState<>(new EntityModel(entity6, new ArrayDeque<>(), null), entity6.getModelId(detectorId2).get(), detectorId2, ModelType.ENTITY.getName(), clock, 0);
for (int i = 0; i < 3; i++) {
// bypass doorkeeper and leave room for lower frequency entity in testSelectToCold
cacheProvider.get(entity5.getModelId(detectorId2).get(), detector2);
cacheProvider.get(entity6.getModelId(detectorId2).get(), detector2);
}
for (int i = 0; i < 10; i++) {
// entity1 cannot replace entity2 due to frequency
cacheProvider.get(entity2.getModelId(detectorId).get(), detector);
}
// put modelState5 in dedicated and modelState6 in shared cache
when(memoryTracker.canAllocate(anyLong())).thenReturn(true);
cacheProvider.hostIfPossible(detector2, modelState5);
cacheProvider.hostIfPossible(detector2, modelState6);
// fill in dedicated cache
cacheProvider.hostIfPossible(detector, modelState2);
// don't allow to use shared cache afterwards
when(memoryTracker.canAllocate(anyLong())).thenReturn(false);
}
Aggregations