use of org.opensearch.ad.util.DiscoveryNodeFilterer in project anomaly-detection by opensearch-project.
the class RolloverTests method setUp.
@Override
public void setUp() throws Exception {
super.setUp();
Client client = mock(Client.class);
indicesClient = mock(IndicesAdminClient.class);
AdminClient adminClient = mock(AdminClient.class);
clusterService = mock(ClusterService.class);
ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, Collections.unmodifiableSet(new HashSet<>(Arrays.asList(AnomalyDetectorSettings.AD_RESULT_HISTORY_MAX_DOCS_PER_SHARD, AnomalyDetectorSettings.AD_RESULT_HISTORY_ROLLOVER_PERIOD, AnomalyDetectorSettings.AD_RESULT_HISTORY_RETENTION_PERIOD, AnomalyDetectorSettings.MAX_PRIMARY_SHARDS))));
clusterName = new ClusterName("test");
when(clusterService.getClusterSettings()).thenReturn(clusterSettings);
ThreadPool threadPool = mock(ThreadPool.class);
Settings settings = Settings.EMPTY;
when(client.admin()).thenReturn(adminClient);
when(adminClient.indices()).thenReturn(indicesClient);
DiscoveryNodeFilterer nodeFilter = mock(DiscoveryNodeFilterer.class);
numberOfNodes = 2;
when(nodeFilter.getNumberOfEligibleDataNodes()).thenReturn(numberOfNodes);
adIndices = new AnomalyDetectionIndices(client, clusterService, threadPool, settings, nodeFilter, AnomalyDetectorSettings.MAX_UPDATE_RETRY_TIMES);
clusterAdminClient = mock(ClusterAdminClient.class);
when(adminClient.cluster()).thenReturn(clusterAdminClient);
doAnswer(invocation -> {
ClusterStateRequest clusterStateRequest = invocation.getArgument(0);
assertEquals(AnomalyDetectionIndices.ALL_AD_RESULTS_INDEX_PATTERN, clusterStateRequest.indices()[0]);
@SuppressWarnings("unchecked") ActionListener<ClusterStateResponse> listener = (ActionListener<ClusterStateResponse>) invocation.getArgument(1);
listener.onResponse(new ClusterStateResponse(clusterName, clusterState, true));
return null;
}).when(clusterAdminClient).state(any(), any());
defaultMaxDocs = AnomalyDetectorSettings.AD_RESULT_HISTORY_MAX_DOCS_PER_SHARD.getDefault(Settings.EMPTY);
}
use of org.opensearch.ad.util.DiscoveryNodeFilterer in project anomaly-detection by opensearch-project.
the class HashRingTests method setUp.
@Override
@Before
public void setUp() throws Exception {
super.setUp();
localNodeId = "localNode";
localNode = createNode(localNodeId, "127.0.0.1", 9200, emptyMap());
newNodeId = "newNode";
newNode = createNode(newNodeId, "127.0.0.2", 9201, emptyMap());
warmNodeId = "warmNode";
warmNode = createNode(warmNodeId, "127.0.0.3", 9202, ImmutableMap.of(CommonName.BOX_TYPE_KEY, CommonName.WARM_BOX_TYPE));
settings = Settings.builder().put(COOLDOWN_MINUTES.getKey(), TimeValue.timeValueSeconds(5)).build();
ClusterSettings clusterSettings = clusterSetting(settings, COOLDOWN_MINUTES);
clusterService = spy(new ClusterService(settings, clusterSettings, null));
nodeFilter = spy(new DiscoveryNodeFilterer(clusterService));
client = mock(Client.class);
dataMigrator = mock(ADDataMigrator.class);
clock = mock(Clock.class);
when(clock.millis()).thenReturn(700000L);
delta = mock(DiscoveryNodes.Delta.class);
adminClient = mock(AdminClient.class);
when(client.admin()).thenReturn(adminClient);
clusterAdminClient = mock(ClusterAdminClient.class);
when(adminClient.cluster()).thenReturn(clusterAdminClient);
String modelId = "123_model_threshold";
modelManager = mock(ModelManager.class);
doAnswer(invocation -> {
Set<String> res = new HashSet<>();
res.add(modelId);
return res;
}).when(modelManager).getAllModelIds();
hashRing = spy(new HashRing(nodeFilter, clock, settings, client, clusterService, dataMigrator, modelManager));
}
use of org.opensearch.ad.util.DiscoveryNodeFilterer in project anomaly-detection by opensearch-project.
the class HourlyCronTests method templateHourlyCron.
@SuppressWarnings("unchecked")
public void templateHourlyCron(HourlyCronTestExecutionMode mode) {
super.setUpLog4jForJUnit(HourlyCron.class);
ClusterService clusterService = mock(ClusterService.class);
ClusterState state = ClusterCreation.state(1);
when(clusterService.state()).thenReturn(state);
HashMap<String, String> ignoredAttributes = new HashMap<String, String>();
ignoredAttributes.put(CommonName.BOX_TYPE_KEY, CommonName.WARM_BOX_TYPE);
DiscoveryNodeFilterer nodeFilter = new DiscoveryNodeFilterer(clusterService);
Client client = mock(Client.class);
doAnswer(invocation -> {
Object[] args = invocation.getArguments();
assertTrue(String.format("The size of args is %d. Its content is %s", args.length, Arrays.toString(args)), args.length == 3);
assertTrue(args[2] instanceof ActionListener);
ActionListener<CronResponse> listener = (ActionListener<CronResponse>) args[2];
if (mode == HourlyCronTestExecutionMode.NODE_FAIL) {
listener.onResponse(new CronResponse(new ClusterName("test"), Collections.singletonList(new CronNodeResponse(state.nodes().getLocalNode())), Collections.singletonList(new FailedNodeException("foo0", "blah", new OpenSearchException("bar")))));
} else if (mode == HourlyCronTestExecutionMode.ALL_FAIL) {
listener.onFailure(new OpenSearchException("bar"));
} else {
CronNodeResponse nodeResponse = new CronNodeResponse(state.nodes().getLocalNode());
BytesStreamOutput nodeResponseOut = new BytesStreamOutput();
nodeResponseOut.setVersion(Version.CURRENT);
nodeResponse.writeTo(nodeResponseOut);
StreamInput siNode = nodeResponseOut.bytes().streamInput();
CronNodeResponse nodeResponseRead = new CronNodeResponse(siNode);
CronResponse response = new CronResponse(new ClusterName("test"), Collections.singletonList(nodeResponseRead), Collections.EMPTY_LIST);
BytesStreamOutput out = new BytesStreamOutput();
out.setVersion(Version.CURRENT);
response.writeTo(out);
StreamInput si = out.bytes().streamInput();
CronResponse responseRead = new CronResponse(si);
listener.onResponse(responseRead);
}
return null;
}).when(client).execute(eq(CronAction.INSTANCE), any(), any());
HourlyCron cron = new HourlyCron(client, nodeFilter);
cron.run();
Logger LOG = LogManager.getLogger(HourlyCron.class);
LOG.info(testAppender.messages);
if (mode == HourlyCronTestExecutionMode.NODE_FAIL) {
assertTrue(testAppender.containsMessage(HourlyCron.NODE_EXCEPTION_LOG_MSG));
} else if (mode == HourlyCronTestExecutionMode.ALL_FAIL) {
assertTrue(testAppender.containsMessage(HourlyCron.EXCEPTION_LOG_MSG));
} else {
assertTrue(testAppender.containsMessage(HourlyCron.SUCCEEDS_LOG_MSG));
}
super.tearDownLog4jForJUnit();
}
use of org.opensearch.ad.util.DiscoveryNodeFilterer in project anomaly-detection by opensearch-project.
the class DeleteTests method StopDetectorResponseTemplate.
@SuppressWarnings("unchecked")
public void StopDetectorResponseTemplate(DetectorExecutionMode mode) throws Exception {
doAnswer(invocation -> {
Object[] args = invocation.getArguments();
assertTrue(String.format("The size of args is %d. Its content is %s", args.length, Arrays.toString(args)), args.length >= 3);
assertTrue(args[2] instanceof ActionListener);
ActionListener<DeleteModelResponse> listener = (ActionListener<DeleteModelResponse>) args[2];
assertTrue(listener != null);
if (mode == DetectorExecutionMode.DELETE_MODEL_FAILURE) {
listener.onFailure(new OpenSearchException(""));
} else {
listener.onResponse(response);
}
return null;
}).when(client).execute(eq(DeleteModelAction.INSTANCE), any(), any());
BulkByScrollResponse deleteByQueryResponse = mock(BulkByScrollResponse.class);
when(deleteByQueryResponse.getDeleted()).thenReturn(10L);
String detectorID = "123";
DiscoveryNodeFilterer nodeFilter = mock(DiscoveryNodeFilterer.class);
StopDetectorTransportAction action = new StopDetectorTransportAction(transportService, nodeFilter, actionFilters, client);
StopDetectorRequest request = new StopDetectorRequest().adID(detectorID);
PlainActionFuture<StopDetectorResponse> listener = new PlainActionFuture<>();
action.doExecute(task, request, listener);
StopDetectorResponse response = listener.actionGet();
assertTrue(!response.success());
}
use of org.opensearch.ad.util.DiscoveryNodeFilterer in project anomaly-detection by opensearch-project.
the class AnomalyDetectionIndicesTests method setup.
@Before
public void setup() {
settings = Settings.builder().put("plugins.anomaly_detection.ad_result_history_rollover_period", TimeValue.timeValueHours(12)).put("plugins.anomaly_detection.ad_result_history_max_age", TimeValue.timeValueHours(24)).put("plugins.anomaly_detection.ad_result_history_max_docs", 10000L).put("plugins.anomaly_detection.request_timeout", TimeValue.timeValueSeconds(10)).build();
nodeFilter = new DiscoveryNodeFilterer(clusterService());
indices = new AnomalyDetectionIndices(client(), clusterService(), client().threadPool(), settings, nodeFilter, AnomalyDetectorSettings.MAX_UPDATE_RETRY_TIMES);
}
Aggregations