use of org.opensearch.cluster.service.ClusterService in project anomaly-detection by opensearch-project.
the class PriorityCacheTests method setUp.
@Override
@Before
public void setUp() throws Exception {
super.setUp();
checkpoint = mock(CheckpointDao.class);
modelManager = mock(ModelManager.class);
clusterService = mock(ClusterService.class);
ClusterSettings settings = new ClusterSettings(Settings.EMPTY, Collections.unmodifiableSet(new HashSet<>(Arrays.asList(AnomalyDetectorSettings.DEDICATED_CACHE_SIZE, AnomalyDetectorSettings.MODEL_MAX_SIZE_PERCENTAGE, AnomalyDetectorSettings.MODEL_MAX_SIZE_PERCENTAGE))));
when(clusterService.getClusterSettings()).thenReturn(settings);
dedicatedCacheSize = 1;
threadPool = mock(ThreadPool.class);
setUpADThreadPool(threadPool);
EntityCache cache = new PriorityCache(checkpoint, dedicatedCacheSize, AnomalyDetectorSettings.CHECKPOINT_TTL, AnomalyDetectorSettings.MAX_INACTIVE_ENTITIES, memoryTracker, AnomalyDetectorSettings.NUM_TREES, clock, clusterService, AnomalyDetectorSettings.HOURLY_MAINTENANCE, threadPool, checkpointWriteQueue, AnomalyDetectorSettings.MAINTENANCE_FREQ_CONSTANT);
cacheProvider = new CacheProvider(cache).get();
when(memoryTracker.estimateTRCFModelSize(anyInt(), anyInt(), anyDouble(), anyInt(), anyBoolean())).thenReturn(memoryPerEntity);
when(memoryTracker.canAllocateReserved(anyLong())).thenReturn(true);
detector2 = mock(AnomalyDetector.class);
detectorId2 = "456";
when(detector2.getDetectorId()).thenReturn(detectorId2);
when(detector2.getDetectionIntervalDuration()).thenReturn(detectorDuration);
when(detector2.getDetectorIntervalInSeconds()).thenReturn(detectorDuration.getSeconds());
point = new double[] { 0.1 };
}
use of org.opensearch.cluster.service.ClusterService in project anomaly-detection by opensearch-project.
the class HourlyCronTests method templateHourlyCron.
@SuppressWarnings("unchecked")
public void templateHourlyCron(HourlyCronTestExecutionMode mode) {
super.setUpLog4jForJUnit(HourlyCron.class);
ClusterService clusterService = mock(ClusterService.class);
ClusterState state = ClusterCreation.state(1);
when(clusterService.state()).thenReturn(state);
HashMap<String, String> ignoredAttributes = new HashMap<String, String>();
ignoredAttributes.put(CommonName.BOX_TYPE_KEY, CommonName.WARM_BOX_TYPE);
DiscoveryNodeFilterer nodeFilter = new DiscoveryNodeFilterer(clusterService);
Client client = mock(Client.class);
doAnswer(invocation -> {
Object[] args = invocation.getArguments();
assertTrue(String.format("The size of args is %d. Its content is %s", args.length, Arrays.toString(args)), args.length == 3);
assertTrue(args[2] instanceof ActionListener);
ActionListener<CronResponse> listener = (ActionListener<CronResponse>) args[2];
if (mode == HourlyCronTestExecutionMode.NODE_FAIL) {
listener.onResponse(new CronResponse(new ClusterName("test"), Collections.singletonList(new CronNodeResponse(state.nodes().getLocalNode())), Collections.singletonList(new FailedNodeException("foo0", "blah", new OpenSearchException("bar")))));
} else if (mode == HourlyCronTestExecutionMode.ALL_FAIL) {
listener.onFailure(new OpenSearchException("bar"));
} else {
CronNodeResponse nodeResponse = new CronNodeResponse(state.nodes().getLocalNode());
BytesStreamOutput nodeResponseOut = new BytesStreamOutput();
nodeResponseOut.setVersion(Version.CURRENT);
nodeResponse.writeTo(nodeResponseOut);
StreamInput siNode = nodeResponseOut.bytes().streamInput();
CronNodeResponse nodeResponseRead = new CronNodeResponse(siNode);
CronResponse response = new CronResponse(new ClusterName("test"), Collections.singletonList(nodeResponseRead), Collections.EMPTY_LIST);
BytesStreamOutput out = new BytesStreamOutput();
out.setVersion(Version.CURRENT);
response.writeTo(out);
StreamInput si = out.bytes().streamInput();
CronResponse responseRead = new CronResponse(si);
listener.onResponse(responseRead);
}
return null;
}).when(client).execute(eq(CronAction.INSTANCE), any(), any());
HourlyCron cron = new HourlyCron(client, nodeFilter);
cron.run();
Logger LOG = LogManager.getLogger(HourlyCron.class);
LOG.info(testAppender.messages);
if (mode == HourlyCronTestExecutionMode.NODE_FAIL) {
assertTrue(testAppender.containsMessage(HourlyCron.NODE_EXCEPTION_LOG_MSG));
} else if (mode == HourlyCronTestExecutionMode.ALL_FAIL) {
assertTrue(testAppender.containsMessage(HourlyCron.EXCEPTION_LOG_MSG));
} else {
assertTrue(testAppender.containsMessage(HourlyCron.SUCCEEDS_LOG_MSG));
}
super.tearDownLog4jForJUnit();
}
use of org.opensearch.cluster.service.ClusterService in project anomaly-detection by opensearch-project.
the class MemoryTrackerTests method setUp.
@Override
public void setUp() throws Exception {
super.setUp();
inputFeatures = 1;
rcfSampleSize = 256;
numberOfTrees = 30;
rcfTimeDecay = 0.2;
numMinSamples = 128;
shingleSize = 8;
dimension = inputFeatures * shingleSize;
jvmService = mock(JvmService.class);
JvmInfo info = mock(JvmInfo.class);
mem = mock(Mem.class);
// 800 MB is the limit
largeHeapSize = 800_000_000;
smallHeapSize = 1_000_000;
when(jvmService.info()).thenReturn(info);
when(info.getMem()).thenReturn(mem);
modelMaxSizePercentage = 0.1;
modelDesiredSizePercentage = 0.0002;
clusterService = mock(ClusterService.class);
modelMaxPercen = 0.1f;
Settings settings = Settings.builder().put(AnomalyDetectorSettings.MODEL_MAX_SIZE_PERCENTAGE.getKey(), modelMaxPercen).build();
ClusterSettings clusterSettings = new ClusterSettings(settings, Collections.unmodifiableSet(new HashSet<>(Arrays.asList(AnomalyDetectorSettings.MODEL_MAX_SIZE_PERCENTAGE))));
when(clusterService.getClusterSettings()).thenReturn(clusterSettings);
expectedRCFModelSize = 382784;
detectorId = "123";
trcf = ThresholdedRandomCutForest.builder().dimensions(dimension).sampleSize(rcfSampleSize).numberOfTrees(numberOfTrees).timeDecay(rcfTimeDecay).outputAfter(numMinSamples).initialAcceptFraction(numMinSamples * 1.0d / rcfSampleSize).parallelExecutionEnabled(false).compact(true).precision(Precision.FLOAT_32).boundingBoxCacheFraction(AnomalyDetectorSettings.REAL_TIME_BOUNDING_BOX_CACHE_RATIO).shingleSize(shingleSize).internalShinglingEnabled(true).build();
detector = mock(AnomalyDetector.class);
when(detector.getEnabledFeatureIds()).thenReturn(Collections.singletonList("a"));
when(detector.getShingleSize()).thenReturn(1);
circuitBreaker = mock(ADCircuitBreakerService.class);
when(circuitBreaker.isOpen()).thenReturn(false);
}
use of org.opensearch.cluster.service.ClusterService in project anomaly-detection by opensearch-project.
the class ParseUtils method getDetector.
/**
* If filterByEnabled is true, get detector and check if the user has permissions to access the detector,
* then execute function; otherwise, get detector and execute function
* @param requestUser user from request
* @param detectorId detector id
* @param listener action listener
* @param function consumer function
* @param client client
* @param clusterService cluster service
* @param xContentRegistry XContent registry
* @param filterByBackendRole filter by backend role or not
*/
public static void getDetector(User requestUser, String detectorId, ActionListener listener, Consumer<AnomalyDetector> function, Client client, ClusterService clusterService, NamedXContentRegistry xContentRegistry, boolean filterByBackendRole) {
if (clusterService.state().metadata().indices().containsKey(AnomalyDetector.ANOMALY_DETECTORS_INDEX)) {
GetRequest request = new GetRequest(AnomalyDetector.ANOMALY_DETECTORS_INDEX).id(detectorId);
client.get(request, ActionListener.wrap(response -> onGetAdResponse(response, requestUser, detectorId, listener, function, xContentRegistry, filterByBackendRole), exception -> {
logger.error("Failed to get anomaly detector: " + detectorId, exception);
listener.onFailure(exception);
}));
} else {
listener.onFailure(new IndexNotFoundException(AnomalyDetector.ANOMALY_DETECTORS_INDEX));
}
}
use of org.opensearch.cluster.service.ClusterService in project anomaly-detection by opensearch-project.
the class IndexAnomalyDetectorActionHandlerTests method setUp.
@SuppressWarnings("unchecked")
@Override
@Before
public void setUp() throws Exception {
super.setUp();
settings = Settings.EMPTY;
clusterService = mock(ClusterService.class);
clientMock = spy(new NodeClient(settings, threadPool));
transportService = mock(TransportService.class);
channel = mock(ActionListener.class);
anomalyDetectionIndices = mock(AnomalyDetectionIndices.class);
when(anomalyDetectionIndices.doesAnomalyDetectorIndexExist()).thenReturn(true);
detectorId = "123";
seqNo = 0L;
primaryTerm = 0L;
WriteRequest.RefreshPolicy refreshPolicy = WriteRequest.RefreshPolicy.IMMEDIATE;
String field = "a";
detector = TestHelpers.randomAnomalyDetectorUsingCategoryFields(detectorId, Arrays.asList(field));
requestTimeout = new TimeValue(1000L);
maxSingleEntityAnomalyDetectors = 1000;
maxMultiEntityAnomalyDetectors = 10;
maxAnomalyFeatures = 5;
method = RestRequest.Method.POST;
adTaskManager = mock(ADTaskManager.class);
searchFeatureDao = mock(SearchFeatureDao.class);
handler = new IndexAnomalyDetectorActionHandler(clusterService, clientMock, transportService, channel, anomalyDetectionIndices, detectorId, seqNo, primaryTerm, refreshPolicy, detector, requestTimeout, maxSingleEntityAnomalyDetectors, maxMultiEntityAnomalyDetectors, maxAnomalyFeatures, method, xContentRegistry(), null, adTaskManager, searchFeatureDao);
}
Aggregations