use of org.opensearch.ad.util.ClientUtil in project anomaly-detection by opensearch-project.
the class AbstractIndexHandlerTest method setUp.
@Override
public void setUp() throws Exception {
super.setUp();
MockitoAnnotations.initMocks(this);
setWriteBlockAdResultIndex(false);
context = TestHelpers.createThreadPool();
clientUtil = new ClientUtil(settings, client, throttler, context);
indexUtil = new IndexUtils(client, clientUtil, clusterService, indexNameResolver);
}
use of org.opensearch.ad.util.ClientUtil in project anomaly-detection by opensearch-project.
the class ADStatsNodesTransportActionTests method setUp.
@Override
@Before
public void setUp() throws Exception {
super.setUp();
Client client = client();
Clock clock = mock(Clock.class);
Throttler throttler = new Throttler(clock);
ThreadPool threadPool = mock(ThreadPool.class);
IndexNameExpressionResolver indexNameResolver = mock(IndexNameExpressionResolver.class);
IndexUtils indexUtils = new IndexUtils(client, new ClientUtil(Settings.EMPTY, client, throttler, threadPool), clusterService(), indexNameResolver);
ModelManager modelManager = mock(ModelManager.class);
CacheProvider cacheProvider = mock(CacheProvider.class);
EntityCache cache = mock(EntityCache.class);
when(cacheProvider.get()).thenReturn(cache);
clusterStatName1 = "clusterStat1";
clusterStatName2 = "clusterStat2";
nodeStatName1 = "nodeStat1";
nodeStatName2 = "nodeStat2";
Settings settings = Settings.builder().put(MAX_MODEL_SIZE_PER_NODE.getKey(), 10).build();
ClusterService clusterService = mock(ClusterService.class);
ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, Collections.unmodifiableSet(new HashSet<>(Arrays.asList(MAX_MODEL_SIZE_PER_NODE))));
when(clusterService.getClusterSettings()).thenReturn(clusterSettings);
statsMap = new HashMap<String, ADStat<?>>() {
{
put(nodeStatName1, new ADStat<>(false, new CounterSupplier()));
put(nodeStatName2, new ADStat<>(false, new ModelsOnNodeSupplier(modelManager, cacheProvider, settings, clusterService)));
put(clusterStatName1, new ADStat<>(true, new IndexStatusSupplier(indexUtils, "index1")));
put(clusterStatName2, new ADStat<>(true, new IndexStatusSupplier(indexUtils, "index2")));
put(InternalStatNames.JVM_HEAP_USAGE.getName(), new ADStat<>(true, new SettableSupplier()));
}
};
adStats = new ADStats(statsMap);
JvmService jvmService = mock(JvmService.class);
JvmStats jvmStats = mock(JvmStats.class);
JvmStats.Mem mem = mock(JvmStats.Mem.class);
when(jvmService.stats()).thenReturn(jvmStats);
when(jvmStats.getMem()).thenReturn(mem);
when(mem.getHeapUsedPercent()).thenReturn(randomShort());
adTaskManager = mock(ADTaskManager.class);
action = new ADStatsNodesTransportAction(client().threadPool(), clusterService(), mock(TransportService.class), mock(ActionFilters.class), adStats, jvmService, adTaskManager);
}
use of org.opensearch.ad.util.ClientUtil in project anomaly-detection by opensearch-project.
the class NoPowermockSearchFeatureDaoTests method testGetHighestCountEntitiesExhaustedPages.
@SuppressWarnings("unchecked")
public void testGetHighestCountEntitiesExhaustedPages() throws InterruptedException {
SearchResponse response1 = createPageResponse(attrs1);
CompositeAggregation emptyComposite = mock(CompositeAggregation.class);
when(emptyComposite.getName()).thenReturn(SearchFeatureDao.AGG_NAME_TOP);
when(emptyComposite.afterKey()).thenReturn(null);
// empty bucket
when(emptyComposite.getBuckets()).thenAnswer((Answer<List<CompositeAggregation.Bucket>>) invocation -> {
return new ArrayList<CompositeAggregation.Bucket>();
});
Aggregations emptyAggs = new Aggregations(Collections.singletonList(emptyComposite));
SearchResponseSections emptySections = new SearchResponseSections(SearchHits.empty(), emptyAggs, null, false, null, null, 1);
SearchResponse emptyResponse = new SearchResponse(emptySections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, Clusters.EMPTY);
CountDownLatch inProgress = new CountDownLatch(2);
doAnswer(invocation -> {
ActionListener<SearchResponse> listener = invocation.getArgument(1);
inProgress.countDown();
if (inProgress.getCount() == 1) {
listener.onResponse(response1);
} else {
listener.onResponse(emptyResponse);
}
return null;
}).when(client).search(any(), any());
ActionListener<List<Entity>> listener = mock(ActionListener.class);
searchFeatureDao = new SearchFeatureDao(client, xContentRegistry(), interpolator, clientUtil, settings, clusterService, AnomalyDetectorSettings.NUM_SAMPLES_PER_TREE, clock, 2, 1, 60_000L);
searchFeatureDao.getHighestCountEntities(detector, 10L, 20L, listener);
ArgumentCaptor<List<Entity>> captor = ArgumentCaptor.forClass(List.class);
verify(listener).onResponse(captor.capture());
List<Entity> result = captor.getValue();
assertEquals(1, result.size());
assertEquals(Entity.createEntityByReordering(attrs1), result.get(0));
// both counts are used in client.search
assertTrue(inProgress.await(10000L, TimeUnit.MILLISECONDS));
}
use of org.opensearch.ad.util.ClientUtil in project anomaly-detection by opensearch-project.
the class NoPowermockSearchFeatureDaoTests method setUp.
@Override
public void setUp() throws Exception {
super.setUp();
serviceField = "service";
hostField = "host";
detector = mock(AnomalyDetector.class);
when(detector.isMultientityDetector()).thenReturn(true);
when(detector.getCategoryField()).thenReturn(Arrays.asList(new String[] { serviceField, hostField }));
detectorId = "123";
when(detector.getDetectorId()).thenReturn(detectorId);
when(detector.getTimeField()).thenReturn("testTimeField");
when(detector.getIndices()).thenReturn(Arrays.asList("testIndices"));
IntervalTimeConfiguration detectionInterval = new IntervalTimeConfiguration(1, ChronoUnit.MINUTES);
when(detector.getDetectionInterval()).thenReturn(detectionInterval);
when(detector.getFilterQuery()).thenReturn(QueryBuilders.matchAllQuery());
client = mock(Client.class);
interpolator = new LinearUniformInterpolator(new SingleFeatureLinearUniformInterpolator());
clientUtil = mock(ClientUtil.class);
settings = Settings.EMPTY;
ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, Collections.unmodifiableSet(new HashSet<>(Arrays.asList(AnomalyDetectorSettings.MAX_ENTITIES_FOR_PREVIEW, AnomalyDetectorSettings.PAGE_SIZE))));
clusterService = mock(ClusterService.class);
when(clusterService.getClusterSettings()).thenReturn(clusterSettings);
clock = mock(Clock.class);
searchFeatureDao = new SearchFeatureDao(client, // Important. Without this, ParseUtils cannot parse anything
xContentRegistry(), interpolator, clientUtil, settings, clusterService, AnomalyDetectorSettings.NUM_SAMPLES_PER_TREE, clock, 1, 1, 60_000L);
String app0 = "app_0";
String server1 = "server_1";
attrs1 = new HashMap<>();
attrs1.put(serviceField, app0);
attrs1.put(hostField, server1);
String server2 = "server_2";
attrs1 = new HashMap<>();
attrs1.put(serviceField, app0);
attrs1.put(hostField, server2);
}
use of org.opensearch.ad.util.ClientUtil in project anomaly-detection by opensearch-project.
the class CheckpointDaoTests method test_batch_write_no_init.
@SuppressWarnings("unchecked")
public void test_batch_write_no_init() throws InterruptedException {
when(indexUtil.doesCheckpointIndexExist()).thenReturn(true);
doAnswer(invocation -> {
ActionListener<BulkResponse> listener = invocation.getArgument(2);
listener.onResponse(createBulkResponse(2, 0, null));
return null;
}).when(clientUtil).execute(eq(BulkAction.INSTANCE), any(BulkRequest.class), any(ActionListener.class));
final CountDownLatch processingLatch = new CountDownLatch(1);
checkpointDao.batchWrite(new BulkRequest(), ActionListener.wrap(response -> processingLatch.countDown(), e -> {
assertTrue(false);
}));
// we don't expect the waiting time elapsed before the count reached zero
assertTrue(processingLatch.await(100, TimeUnit.SECONDS));
verify(clientUtil, times(1)).execute(any(), any(), any());
}
Aggregations