use of org.opensearch.ad.ml.ModelManager in project anomaly-detection by opensearch-project.
the class ThresholdResultTests method testExecutionException.
@SuppressWarnings("unchecked")
public void testExecutionException() {
TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet());
ModelManager manager = mock(ModelManager.class);
ThresholdResultTransportAction action = new ThresholdResultTransportAction(mock(ActionFilters.class), transportService, manager);
doThrow(NullPointerException.class).when(manager).getThresholdingResult(any(String.class), any(String.class), anyDouble(), any(ActionListener.class));
final PlainActionFuture<ThresholdResultResponse> future = new PlainActionFuture<>();
ThresholdResultRequest request = new ThresholdResultRequest("123", "123-threshold", 2);
action.doExecute(mock(Task.class), request, future);
expectThrows(NullPointerException.class, () -> future.actionGet());
}
use of org.opensearch.ad.ml.ModelManager in project anomaly-detection by opensearch-project.
the class ADStatsNodesTransportActionTests method setUp.
@Override
@Before
public void setUp() throws Exception {
super.setUp();
Client client = client();
Clock clock = mock(Clock.class);
Throttler throttler = new Throttler(clock);
ThreadPool threadPool = mock(ThreadPool.class);
IndexNameExpressionResolver indexNameResolver = mock(IndexNameExpressionResolver.class);
IndexUtils indexUtils = new IndexUtils(client, new ClientUtil(Settings.EMPTY, client, throttler, threadPool), clusterService(), indexNameResolver);
ModelManager modelManager = mock(ModelManager.class);
CacheProvider cacheProvider = mock(CacheProvider.class);
EntityCache cache = mock(EntityCache.class);
when(cacheProvider.get()).thenReturn(cache);
clusterStatName1 = "clusterStat1";
clusterStatName2 = "clusterStat2";
nodeStatName1 = "nodeStat1";
nodeStatName2 = "nodeStat2";
Settings settings = Settings.builder().put(MAX_MODEL_SIZE_PER_NODE.getKey(), 10).build();
ClusterService clusterService = mock(ClusterService.class);
ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, Collections.unmodifiableSet(new HashSet<>(Arrays.asList(MAX_MODEL_SIZE_PER_NODE))));
when(clusterService.getClusterSettings()).thenReturn(clusterSettings);
statsMap = new HashMap<String, ADStat<?>>() {
{
put(nodeStatName1, new ADStat<>(false, new CounterSupplier()));
put(nodeStatName2, new ADStat<>(false, new ModelsOnNodeSupplier(modelManager, cacheProvider, settings, clusterService)));
put(clusterStatName1, new ADStat<>(true, new IndexStatusSupplier(indexUtils, "index1")));
put(clusterStatName2, new ADStat<>(true, new IndexStatusSupplier(indexUtils, "index2")));
put(InternalStatNames.JVM_HEAP_USAGE.getName(), new ADStat<>(true, new SettableSupplier()));
}
};
adStats = new ADStats(statsMap);
JvmService jvmService = mock(JvmService.class);
JvmStats jvmStats = mock(JvmStats.class);
JvmStats.Mem mem = mock(JvmStats.Mem.class);
when(jvmService.stats()).thenReturn(jvmStats);
when(jvmStats.getMem()).thenReturn(mem);
when(mem.getHeapUsedPercent()).thenReturn(randomShort());
adTaskManager = mock(ADTaskManager.class);
action = new ADStatsNodesTransportAction(client().threadPool(), clusterService(), mock(TransportService.class), mock(ActionFilters.class), adStats, jvmService, adTaskManager);
}
use of org.opensearch.ad.ml.ModelManager in project anomaly-detection by opensearch-project.
the class HashRingTests method setUp.
@Override
@Before
public void setUp() throws Exception {
super.setUp();
localNodeId = "localNode";
localNode = createNode(localNodeId, "127.0.0.1", 9200, emptyMap());
newNodeId = "newNode";
newNode = createNode(newNodeId, "127.0.0.2", 9201, emptyMap());
warmNodeId = "warmNode";
warmNode = createNode(warmNodeId, "127.0.0.3", 9202, ImmutableMap.of(CommonName.BOX_TYPE_KEY, CommonName.WARM_BOX_TYPE));
settings = Settings.builder().put(COOLDOWN_MINUTES.getKey(), TimeValue.timeValueSeconds(5)).build();
ClusterSettings clusterSettings = clusterSetting(settings, COOLDOWN_MINUTES);
clusterService = spy(new ClusterService(settings, clusterSettings, null));
nodeFilter = spy(new DiscoveryNodeFilterer(clusterService));
client = mock(Client.class);
dataMigrator = mock(ADDataMigrator.class);
clock = mock(Clock.class);
when(clock.millis()).thenReturn(700000L);
delta = mock(DiscoveryNodes.Delta.class);
adminClient = mock(AdminClient.class);
when(client.admin()).thenReturn(adminClient);
clusterAdminClient = mock(ClusterAdminClient.class);
when(adminClient.cluster()).thenReturn(clusterAdminClient);
String modelId = "123_model_threshold";
modelManager = mock(ModelManager.class);
doAnswer(invocation -> {
Set<String> res = new HashSet<>();
res.add(modelId);
return res;
}).when(modelManager).getAllModelIds();
hashRing = spy(new HashRing(nodeFilter, clock, settings, client, clusterService, dataMigrator, modelManager));
}
use of org.opensearch.ad.ml.ModelManager in project anomaly-detection by opensearch-project.
the class PriorityCacheTests method setUp.
@Override
@Before
public void setUp() throws Exception {
super.setUp();
checkpoint = mock(CheckpointDao.class);
modelManager = mock(ModelManager.class);
clusterService = mock(ClusterService.class);
ClusterSettings settings = new ClusterSettings(Settings.EMPTY, Collections.unmodifiableSet(new HashSet<>(Arrays.asList(AnomalyDetectorSettings.DEDICATED_CACHE_SIZE, AnomalyDetectorSettings.MODEL_MAX_SIZE_PERCENTAGE, AnomalyDetectorSettings.MODEL_MAX_SIZE_PERCENTAGE))));
when(clusterService.getClusterSettings()).thenReturn(settings);
dedicatedCacheSize = 1;
threadPool = mock(ThreadPool.class);
setUpADThreadPool(threadPool);
EntityCache cache = new PriorityCache(checkpoint, dedicatedCacheSize, AnomalyDetectorSettings.CHECKPOINT_TTL, AnomalyDetectorSettings.MAX_INACTIVE_ENTITIES, memoryTracker, AnomalyDetectorSettings.NUM_TREES, clock, clusterService, AnomalyDetectorSettings.HOURLY_MAINTENANCE, threadPool, checkpointWriteQueue, AnomalyDetectorSettings.MAINTENANCE_FREQ_CONSTANT);
cacheProvider = new CacheProvider(cache).get();
when(memoryTracker.estimateTRCFModelSize(anyInt(), anyInt(), anyDouble(), anyInt(), anyBoolean())).thenReturn(memoryPerEntity);
when(memoryTracker.canAllocateReserved(anyLong())).thenReturn(true);
detector2 = mock(AnomalyDetector.class);
detectorId2 = "456";
when(detector2.getDetectorId()).thenReturn(detectorId2);
when(detector2.getDetectionIntervalDuration()).thenReturn(detectorDuration);
when(detector2.getDetectorIntervalInSeconds()).thenReturn(detectorDuration.getSeconds());
point = new double[] { 0.1 };
}
use of org.opensearch.ad.ml.ModelManager in project anomaly-detection by opensearch-project.
the class CheckpointReadWorkerTests method setUp.
@Override
public void setUp() throws Exception {
super.setUp();
clusterService = mock(ClusterService.class);
clusterSettings = new ClusterSettings(Settings.EMPTY, Collections.unmodifiableSet(new HashSet<>(Arrays.asList(AnomalyDetectorSettings.CHECKPOINT_READ_QUEUE_MAX_HEAP_PERCENT, AnomalyDetectorSettings.CHECKPOINT_READ_QUEUE_CONCURRENCY, AnomalyDetectorSettings.CHECKPOINT_READ_QUEUE_BATCH_SIZE))));
when(clusterService.getClusterSettings()).thenReturn(clusterSettings);
state = MLUtil.randomModelState(new RandomModelStateConfig.Builder().fullModel(true).build());
checkpoint = mock(CheckpointDao.class);
Map.Entry<EntityModel, Instant> entry = new SimpleImmutableEntry<EntityModel, Instant>(state.getModel(), Instant.now());
when(checkpoint.processGetResponse(any(), anyString())).thenReturn(Optional.of(entry));
checkpointWriteQueue = mock(CheckpointWriteWorker.class);
modelManager = mock(ModelManager.class);
when(modelManager.processEntityCheckpoint(any(), any(), anyString(), anyString(), anyInt())).thenReturn(state);
when(modelManager.score(any(), anyString(), any())).thenReturn(new ThresholdingResult(0, 1, 0.7));
coldstartQueue = mock(EntityColdStartWorker.class);
resultWriteQueue = mock(ResultWriteWorker.class);
anomalyDetectionIndices = mock(AnomalyDetectionIndices.class);
cacheProvider = mock(CacheProvider.class);
entityCache = mock(EntityCache.class);
when(cacheProvider.get()).thenReturn(entityCache);
when(entityCache.hostIfPossible(any(), any())).thenReturn(true);
// Integer.MAX_VALUE makes a huge heap
worker = new CheckpointReadWorker(Integer.MAX_VALUE, AnomalyDetectorSettings.ENTITY_FEATURE_REQUEST_SIZE_IN_BYTES, AnomalyDetectorSettings.CHECKPOINT_READ_QUEUE_MAX_HEAP_PERCENT, clusterService, new Random(42), mock(ADCircuitBreakerService.class), threadPool, Settings.EMPTY, AnomalyDetectorSettings.MAX_QUEUED_TASKS_RATIO, clock, AnomalyDetectorSettings.MEDIUM_SEGMENT_PRUNE_RATIO, AnomalyDetectorSettings.LOW_SEGMENT_PRUNE_RATIO, AnomalyDetectorSettings.MAINTENANCE_FREQ_CONSTANT, AnomalyDetectorSettings.QUEUE_MAINTENANCE, modelManager, checkpoint, coldstartQueue, resultWriteQueue, nodeStateManager, anomalyDetectionIndices, cacheProvider, AnomalyDetectorSettings.HOURLY_MAINTENANCE, checkpointWriteQueue);
request = new EntityFeatureRequest(Integer.MAX_VALUE, detectorId, RequestPriority.MEDIUM, entity, new double[] { 0 }, 0);
request2 = new EntityFeatureRequest(Integer.MAX_VALUE, detectorId, RequestPriority.MEDIUM, entity2, new double[] { 0 }, 0);
request3 = new EntityFeatureRequest(Integer.MAX_VALUE, detectorId, RequestPriority.MEDIUM, entity3, new double[] { 0 }, 0);
}
Aggregations