use of org.opensearch.client.Client in project k-NN by opensearch-project.
the class TrainingJobRouterTransportActionTests method testMultiNode_withoutCapacity.
public void testMultiNode_withoutCapacity() {
// Mock datanodes in the cluster through mocking the cluster service
List<String> nodeIds = ImmutableList.of("node-1", "node-2", "node-3");
ImmutableOpenMap<String, DiscoveryNode> discoveryNodesMap = generateDiscoveryNodes(nodeIds);
ClusterService clusterService = generateMockedClusterService(discoveryNodesMap);
// Create a response to be returned with job route decision info
List<TrainingJobRouteDecisionInfoNodeResponse> responseList = new ArrayList<>();
// First node has no capacity
responseList.add(new TrainingJobRouteDecisionInfoNodeResponse(discoveryNodesMap.get(nodeIds.get(0)), 1));
// Second node has no capacity
responseList.add(new TrainingJobRouteDecisionInfoNodeResponse(discoveryNodesMap.get(nodeIds.get(1)), 1));
// Third node has no capacity
responseList.add(new TrainingJobRouteDecisionInfoNodeResponse(discoveryNodesMap.get(nodeIds.get(1)), 1));
TrainingJobRouteDecisionInfoResponse infoResponse = new TrainingJobRouteDecisionInfoResponse(ClusterName.DEFAULT, responseList, Collections.emptyList());
TransportService transportService = mock(TransportService.class);
Client client = mock(Client.class);
// Setup the action
TrainingJobRouterTransportAction transportAction = new TrainingJobRouterTransportAction(transportService, new ActionFilters(Collections.emptySet()), clusterService, client);
// Select the node
DiscoveryNode selectedNode = transportAction.selectNode(null, infoResponse);
assertNull(selectedNode);
}
use of org.opensearch.client.Client in project k-NN by opensearch-project.
the class TrainingJobRouterTransportActionTests method testTrainingIndexSize.
@SuppressWarnings("unchecked")
public void testTrainingIndexSize() {
String trainingIndexName = "training-index";
int dimension = 133;
int vectorCount = 1000000;
// 519,531.25 KB ~= 520 MB
int expectedSize = dimension * vectorCount * Float.BYTES / BYTES_PER_KILOBYTES + 1;
// Setup the request
TrainingModelRequest trainingModelRequest = new TrainingModelRequest(null, KNNMethodContext.getDefault(), dimension, trainingIndexName, "training-field", null, "description");
// Mock client to return the right number of docs
TotalHits totalHits = new TotalHits(vectorCount, TotalHits.Relation.EQUAL_TO);
SearchHits searchHits = new SearchHits(new SearchHit[2], totalHits, 1.0f);
SearchResponse searchResponse = mock(SearchResponse.class);
when(searchResponse.getHits()).thenReturn(searchHits);
Client client = mock(Client.class);
doAnswer(invocationOnMock -> {
((ActionListener<SearchResponse>) invocationOnMock.getArguments()[1]).onResponse(searchResponse);
return null;
}).when(client).search(any(), any());
// Setup the action
ClusterService clusterService = mock(ClusterService.class);
TransportService transportService = mock(TransportService.class);
TrainingJobRouterTransportAction transportAction = new TrainingJobRouterTransportAction(transportService, new ActionFilters(Collections.emptySet()), clusterService, client);
ActionListener<Integer> listener = ActionListener.wrap(size -> assertEquals(expectedSize, size.intValue()), e -> fail(e.getMessage()));
transportAction.getTrainingIndexSizeInKB(trainingModelRequest, listener);
}
use of org.opensearch.client.Client in project anomaly-detection by opensearch-project.
the class NoPowermockSearchFeatureDaoTests method testGetHighestCountEntitiesExhaustedPages.
@SuppressWarnings("unchecked")
public void testGetHighestCountEntitiesExhaustedPages() throws InterruptedException {
SearchResponse response1 = createPageResponse(attrs1);
CompositeAggregation emptyComposite = mock(CompositeAggregation.class);
when(emptyComposite.getName()).thenReturn(SearchFeatureDao.AGG_NAME_TOP);
when(emptyComposite.afterKey()).thenReturn(null);
// empty bucket
when(emptyComposite.getBuckets()).thenAnswer((Answer<List<CompositeAggregation.Bucket>>) invocation -> {
return new ArrayList<CompositeAggregation.Bucket>();
});
Aggregations emptyAggs = new Aggregations(Collections.singletonList(emptyComposite));
SearchResponseSections emptySections = new SearchResponseSections(SearchHits.empty(), emptyAggs, null, false, null, null, 1);
SearchResponse emptyResponse = new SearchResponse(emptySections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, Clusters.EMPTY);
CountDownLatch inProgress = new CountDownLatch(2);
doAnswer(invocation -> {
ActionListener<SearchResponse> listener = invocation.getArgument(1);
inProgress.countDown();
if (inProgress.getCount() == 1) {
listener.onResponse(response1);
} else {
listener.onResponse(emptyResponse);
}
return null;
}).when(client).search(any(), any());
ActionListener<List<Entity>> listener = mock(ActionListener.class);
searchFeatureDao = new SearchFeatureDao(client, xContentRegistry(), interpolator, clientUtil, settings, clusterService, AnomalyDetectorSettings.NUM_SAMPLES_PER_TREE, clock, 2, 1, 60_000L);
searchFeatureDao.getHighestCountEntities(detector, 10L, 20L, listener);
ArgumentCaptor<List<Entity>> captor = ArgumentCaptor.forClass(List.class);
verify(listener).onResponse(captor.capture());
List<Entity> result = captor.getValue();
assertEquals(1, result.size());
assertEquals(Entity.createEntityByReordering(attrs1), result.get(0));
// both counts are used in client.search
assertTrue(inProgress.await(10000L, TimeUnit.MILLISECONDS));
}
use of org.opensearch.client.Client in project anomaly-detection by opensearch-project.
the class NoPowermockSearchFeatureDaoTests method setUp.
@Override
public void setUp() throws Exception {
super.setUp();
serviceField = "service";
hostField = "host";
detector = mock(AnomalyDetector.class);
when(detector.isMultientityDetector()).thenReturn(true);
when(detector.getCategoryField()).thenReturn(Arrays.asList(new String[] { serviceField, hostField }));
detectorId = "123";
when(detector.getDetectorId()).thenReturn(detectorId);
when(detector.getTimeField()).thenReturn("testTimeField");
when(detector.getIndices()).thenReturn(Arrays.asList("testIndices"));
IntervalTimeConfiguration detectionInterval = new IntervalTimeConfiguration(1, ChronoUnit.MINUTES);
when(detector.getDetectionInterval()).thenReturn(detectionInterval);
when(detector.getFilterQuery()).thenReturn(QueryBuilders.matchAllQuery());
client = mock(Client.class);
interpolator = new LinearUniformInterpolator(new SingleFeatureLinearUniformInterpolator());
clientUtil = mock(ClientUtil.class);
settings = Settings.EMPTY;
ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, Collections.unmodifiableSet(new HashSet<>(Arrays.asList(AnomalyDetectorSettings.MAX_ENTITIES_FOR_PREVIEW, AnomalyDetectorSettings.PAGE_SIZE))));
clusterService = mock(ClusterService.class);
when(clusterService.getClusterSettings()).thenReturn(clusterSettings);
clock = mock(Clock.class);
searchFeatureDao = new SearchFeatureDao(client, // Important. Without this, ParseUtils cannot parse anything
xContentRegistry(), interpolator, clientUtil, settings, clusterService, AnomalyDetectorSettings.NUM_SAMPLES_PER_TREE, clock, 1, 1, 60_000L);
String app0 = "app_0";
String server1 = "server_1";
attrs1 = new HashMap<>();
attrs1.put(serviceField, app0);
attrs1.put(hostField, server1);
String server2 = "server_2";
attrs1 = new HashMap<>();
attrs1.put(serviceField, app0);
attrs1.put(hostField, server2);
}
use of org.opensearch.client.Client in project anomaly-detection by opensearch-project.
the class RolloverTests method setUp.
@Override
public void setUp() throws Exception {
super.setUp();
Client client = mock(Client.class);
indicesClient = mock(IndicesAdminClient.class);
AdminClient adminClient = mock(AdminClient.class);
clusterService = mock(ClusterService.class);
ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, Collections.unmodifiableSet(new HashSet<>(Arrays.asList(AnomalyDetectorSettings.AD_RESULT_HISTORY_MAX_DOCS_PER_SHARD, AnomalyDetectorSettings.AD_RESULT_HISTORY_ROLLOVER_PERIOD, AnomalyDetectorSettings.AD_RESULT_HISTORY_RETENTION_PERIOD, AnomalyDetectorSettings.MAX_PRIMARY_SHARDS))));
clusterName = new ClusterName("test");
when(clusterService.getClusterSettings()).thenReturn(clusterSettings);
ThreadPool threadPool = mock(ThreadPool.class);
Settings settings = Settings.EMPTY;
when(client.admin()).thenReturn(adminClient);
when(adminClient.indices()).thenReturn(indicesClient);
DiscoveryNodeFilterer nodeFilter = mock(DiscoveryNodeFilterer.class);
numberOfNodes = 2;
when(nodeFilter.getNumberOfEligibleDataNodes()).thenReturn(numberOfNodes);
adIndices = new AnomalyDetectionIndices(client, clusterService, threadPool, settings, nodeFilter, AnomalyDetectorSettings.MAX_UPDATE_RETRY_TIMES);
clusterAdminClient = mock(ClusterAdminClient.class);
when(adminClient.cluster()).thenReturn(clusterAdminClient);
doAnswer(invocation -> {
ClusterStateRequest clusterStateRequest = invocation.getArgument(0);
assertEquals(AnomalyDetectionIndices.ALL_AD_RESULTS_INDEX_PATTERN, clusterStateRequest.indices()[0]);
@SuppressWarnings("unchecked") ActionListener<ClusterStateResponse> listener = (ActionListener<ClusterStateResponse>) invocation.getArgument(1);
listener.onResponse(new ClusterStateResponse(clusterName, clusterState, true));
return null;
}).when(clusterAdminClient).state(any(), any());
defaultMaxDocs = AnomalyDetectorSettings.AD_RESULT_HISTORY_MAX_DOCS_PER_SHARD.getDefault(Settings.EMPTY);
}
Aggregations