use of org.opensearch.ad.cluster.HashRing in project anomaly-detection by opensearch-project.
the class ADTaskManagerTests method testCleanADResultOfDeletedDetectorWithException.
public void testCleanADResultOfDeletedDetectorWithException() {
String detectorId = randomAlphaOfLength(5);
when(adTaskCacheManager.pollDeletedDetector()).thenReturn(detectorId);
doAnswer(invocation -> {
ActionListener<BulkByScrollResponse> listener = invocation.getArgument(2);
listener.onFailure(new RuntimeException("test"));
return null;
}).doAnswer(invocation -> {
ActionListener<BulkByScrollResponse> listener = invocation.getArgument(2);
BulkByScrollResponse deleteByQueryResponse = mock(BulkByScrollResponse.class);
listener.onResponse(deleteByQueryResponse);
return null;
}).when(client).execute(any(), any(), any());
settings = Settings.builder().put(MAX_OLD_AD_TASK_DOCS_PER_DETECTOR.getKey(), 2).put(BATCH_TASK_PIECE_INTERVAL_SECONDS.getKey(), 1).put(REQUEST_TIMEOUT.getKey(), TimeValue.timeValueSeconds(10)).put(DELETE_AD_RESULT_WHEN_DELETE_DETECTOR.getKey(), true).build();
clusterSettings = clusterSetting(settings, MAX_OLD_AD_TASK_DOCS_PER_DETECTOR, BATCH_TASK_PIECE_INTERVAL_SECONDS, REQUEST_TIMEOUT, DELETE_AD_RESULT_WHEN_DELETE_DETECTOR, MAX_BATCH_TASK_PER_NODE, MAX_RUNNING_ENTITIES_PER_DETECTOR_FOR_HISTORICAL_ANALYSIS);
clusterService = spy(new ClusterService(settings, clusterSettings, null));
ADTaskManager adTaskManager = spy(new ADTaskManager(settings, clusterService, client, TestHelpers.xContentRegistry(), detectionIndices, nodeFilter, hashRing, adTaskCacheManager, threadPool));
adTaskManager.cleanADResultOfDeletedDetector();
verify(client, times(1)).execute(eq(DeleteByQueryAction.INSTANCE), any(), any());
verify(adTaskCacheManager, times(1)).addDeletedDetector(eq(detectorId));
adTaskManager.cleanADResultOfDeletedDetector();
verify(client, times(2)).execute(eq(DeleteByQueryAction.INSTANCE), any(), any());
verify(adTaskCacheManager, times(1)).addDeletedDetector(eq(detectorId));
}
use of org.opensearch.ad.cluster.HashRing in project anomaly-detection by opensearch-project.
the class RCFPollingTests method setUp.
@Override
@Before
public void setUp() throws Exception {
super.setUp();
clusterService = mock(ClusterService.class);
hashRing = mock(HashRing.class);
transportAddress1 = new TransportAddress(new InetSocketAddress(InetAddress.getByName("1.2.3.4"), 9300));
manager = mock(ModelManager.class);
transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet());
future = new PlainActionFuture<>();
request = new RCFPollingRequest(detectorId);
model0Id = SingleStreamModelIdMapper.getRcfModelId(detectorId, 0);
doAnswer(invocation -> {
Object[] args = invocation.getArguments();
@SuppressWarnings("unchecked") ActionListener<Long> listener = (ActionListener<Long>) args[2];
listener.onResponse(totalUpdates);
return null;
}).when(manager).getTotalUpdates(any(String.class), any(String.class), any());
normalTransportInterceptor = new TransportInterceptor() {
@Override
public AsyncSender interceptSender(AsyncSender sender) {
return new AsyncSender() {
@Override
public <T extends TransportResponse> void sendRequest(Transport.Connection connection, String action, TransportRequest request, TransportRequestOptions options, TransportResponseHandler<T> handler) {
if (RCFPollingAction.NAME.equals(action)) {
sender.sendRequest(connection, action, request, options, rcfRollingHandler(handler));
} else {
sender.sendRequest(connection, action, request, options, handler);
}
}
};
}
};
failureTransportInterceptor = new TransportInterceptor() {
@Override
public AsyncSender interceptSender(AsyncSender sender) {
return new AsyncSender() {
@Override
public <T extends TransportResponse> void sendRequest(Transport.Connection connection, String action, TransportRequest request, TransportRequestOptions options, TransportResponseHandler<T> handler) {
if (RCFPollingAction.NAME.equals(action)) {
sender.sendRequest(connection, action, request, options, rcfFailureRollingHandler(handler));
} else {
sender.sendRequest(connection, action, request, options, handler);
}
}
};
}
};
}
use of org.opensearch.ad.cluster.HashRing in project anomaly-detection by opensearch-project.
the class AnomalyDetectorPlugin method createComponents.
@Override
public Collection<Object> createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, ResourceWatcherService resourceWatcherService, ScriptService scriptService, NamedXContentRegistry xContentRegistry, Environment environment, NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry, IndexNameExpressionResolver indexNameExpressionResolver, Supplier<RepositoriesService> repositoriesServiceSupplier) {
EnabledSetting.getInstance().init(clusterService);
NumericSetting.getInstance().init(clusterService);
this.client = client;
this.threadPool = threadPool;
Settings settings = environment.settings();
Throttler throttler = new Throttler(getClock());
this.clientUtil = new ClientUtil(settings, client, throttler, threadPool);
this.indexUtils = new IndexUtils(client, clientUtil, clusterService, indexNameExpressionResolver);
this.nodeFilter = new DiscoveryNodeFilterer(clusterService);
this.anomalyDetectionIndices = new AnomalyDetectionIndices(client, clusterService, threadPool, settings, nodeFilter, AnomalyDetectorSettings.MAX_UPDATE_RETRY_TIMES);
this.clusterService = clusterService;
SingleFeatureLinearUniformInterpolator singleFeatureLinearUniformInterpolator = new IntegerSensitiveSingleFeatureLinearUniformInterpolator();
Interpolator interpolator = new LinearUniformInterpolator(singleFeatureLinearUniformInterpolator);
SearchFeatureDao searchFeatureDao = new SearchFeatureDao(client, xContentRegistry, interpolator, clientUtil, settings, clusterService, AnomalyDetectorSettings.NUM_SAMPLES_PER_TREE);
JvmService jvmService = new JvmService(environment.settings());
RandomCutForestMapper mapper = new RandomCutForestMapper();
mapper.setSaveExecutorContextEnabled(true);
mapper.setSaveTreeStateEnabled(true);
mapper.setPartialTreeStateEnabled(true);
V1JsonToV2StateConverter converter = new V1JsonToV2StateConverter();
double modelMaxSizePercent = AnomalyDetectorSettings.MODEL_MAX_SIZE_PERCENTAGE.get(settings);
ADCircuitBreakerService adCircuitBreakerService = new ADCircuitBreakerService(jvmService).init();
MemoryTracker memoryTracker = new MemoryTracker(jvmService, modelMaxSizePercent, AnomalyDetectorSettings.DESIRED_MODEL_SIZE_PERCENTAGE, clusterService, adCircuitBreakerService);
NodeStateManager stateManager = new NodeStateManager(client, xContentRegistry, settings, clientUtil, getClock(), AnomalyDetectorSettings.HOURLY_MAINTENANCE, clusterService);
FeatureManager featureManager = new FeatureManager(searchFeatureDao, interpolator, getClock(), AnomalyDetectorSettings.MAX_TRAIN_SAMPLE, AnomalyDetectorSettings.MAX_SAMPLE_STRIDE, AnomalyDetectorSettings.TRAIN_SAMPLE_TIME_RANGE_IN_HOURS, AnomalyDetectorSettings.MIN_TRAIN_SAMPLES, AnomalyDetectorSettings.MAX_SHINGLE_PROPORTION_MISSING, AnomalyDetectorSettings.MAX_IMPUTATION_NEIGHBOR_DISTANCE, AnomalyDetectorSettings.PREVIEW_SAMPLE_RATE, AnomalyDetectorSettings.MAX_PREVIEW_SAMPLES, AnomalyDetectorSettings.HOURLY_MAINTENANCE, threadPool, AD_THREAD_POOL_NAME);
long heapSizeBytes = JvmInfo.jvmInfo().getMem().getHeapMax().getBytes();
serializeRCFBufferPool = AccessController.doPrivileged(new PrivilegedAction<GenericObjectPool<LinkedBuffer>>() {
@Override
public GenericObjectPool<LinkedBuffer> run() {
return new GenericObjectPool<>(new BasePooledObjectFactory<LinkedBuffer>() {
@Override
public LinkedBuffer create() throws Exception {
return LinkedBuffer.allocate(AnomalyDetectorSettings.SERIALIZATION_BUFFER_BYTES);
}
@Override
public PooledObject<LinkedBuffer> wrap(LinkedBuffer obj) {
return new DefaultPooledObject<>(obj);
}
});
}
});
serializeRCFBufferPool.setMaxTotal(AnomalyDetectorSettings.MAX_TOTAL_RCF_SERIALIZATION_BUFFERS);
serializeRCFBufferPool.setMaxIdle(AnomalyDetectorSettings.MAX_TOTAL_RCF_SERIALIZATION_BUFFERS);
serializeRCFBufferPool.setMinIdle(0);
serializeRCFBufferPool.setBlockWhenExhausted(false);
serializeRCFBufferPool.setTimeBetweenEvictionRuns(AnomalyDetectorSettings.HOURLY_MAINTENANCE);
CheckpointDao checkpoint = new CheckpointDao(client, clientUtil, CommonName.CHECKPOINT_INDEX_NAME, gson, mapper, converter, new ThresholdedRandomCutForestMapper(), AccessController.doPrivileged((PrivilegedAction<Schema<ThresholdedRandomCutForestState>>) () -> RuntimeSchema.getSchema(ThresholdedRandomCutForestState.class)), HybridThresholdingModel.class, anomalyDetectionIndices, AnomalyDetectorSettings.MAX_CHECKPOINT_BYTES, serializeRCFBufferPool, AnomalyDetectorSettings.SERIALIZATION_BUFFER_BYTES, 1 - AnomalyDetectorSettings.THRESHOLD_MIN_PVALUE);
Random random = new Random(42);
CheckpointWriteWorker checkpointWriteQueue = new CheckpointWriteWorker(heapSizeBytes, AnomalyDetectorSettings.CHECKPOINT_WRITE_QUEUE_SIZE_IN_BYTES, AnomalyDetectorSettings.CHECKPOINT_WRITE_QUEUE_MAX_HEAP_PERCENT, clusterService, random, adCircuitBreakerService, threadPool, settings, AnomalyDetectorSettings.MAX_QUEUED_TASKS_RATIO, getClock(), AnomalyDetectorSettings.MEDIUM_SEGMENT_PRUNE_RATIO, AnomalyDetectorSettings.LOW_SEGMENT_PRUNE_RATIO, AnomalyDetectorSettings.MAINTENANCE_FREQ_CONSTANT, AnomalyDetectorSettings.QUEUE_MAINTENANCE, checkpoint, CommonName.CHECKPOINT_INDEX_NAME, AnomalyDetectorSettings.HOURLY_MAINTENANCE, stateManager, AnomalyDetectorSettings.HOURLY_MAINTENANCE);
EntityCache cache = new PriorityCache(checkpoint, AnomalyDetectorSettings.DEDICATED_CACHE_SIZE.get(settings), AnomalyDetectorSettings.CHECKPOINT_TTL, AnomalyDetectorSettings.MAX_INACTIVE_ENTITIES, memoryTracker, AnomalyDetectorSettings.NUM_TREES, getClock(), clusterService, AnomalyDetectorSettings.HOURLY_MAINTENANCE, threadPool, checkpointWriteQueue, AnomalyDetectorSettings.MAINTENANCE_FREQ_CONSTANT);
CacheProvider cacheProvider = new CacheProvider(cache);
EntityColdStarter entityColdStarter = new EntityColdStarter(getClock(), threadPool, stateManager, AnomalyDetectorSettings.NUM_SAMPLES_PER_TREE, AnomalyDetectorSettings.NUM_TREES, AnomalyDetectorSettings.TIME_DECAY, AnomalyDetectorSettings.NUM_MIN_SAMPLES, AnomalyDetectorSettings.MAX_SAMPLE_STRIDE, AnomalyDetectorSettings.MAX_TRAIN_SAMPLE, interpolator, searchFeatureDao, AnomalyDetectorSettings.THRESHOLD_MIN_PVALUE, featureManager, settings, AnomalyDetectorSettings.HOURLY_MAINTENANCE, checkpointWriteQueue, AnomalyDetectorSettings.MAX_COLD_START_ROUNDS);
EntityColdStartWorker coldstartQueue = new EntityColdStartWorker(heapSizeBytes, AnomalyDetectorSettings.ENTITY_REQUEST_SIZE_IN_BYTES, AnomalyDetectorSettings.ENTITY_COLD_START_QUEUE_MAX_HEAP_PERCENT, clusterService, random, adCircuitBreakerService, threadPool, settings, AnomalyDetectorSettings.MAX_QUEUED_TASKS_RATIO, getClock(), AnomalyDetectorSettings.MEDIUM_SEGMENT_PRUNE_RATIO, AnomalyDetectorSettings.LOW_SEGMENT_PRUNE_RATIO, AnomalyDetectorSettings.MAINTENANCE_FREQ_CONSTANT, AnomalyDetectorSettings.QUEUE_MAINTENANCE, entityColdStarter, AnomalyDetectorSettings.HOURLY_MAINTENANCE, stateManager);
ModelManager modelManager = new ModelManager(checkpoint, getClock(), AnomalyDetectorSettings.NUM_TREES, AnomalyDetectorSettings.NUM_SAMPLES_PER_TREE, AnomalyDetectorSettings.TIME_DECAY, AnomalyDetectorSettings.NUM_MIN_SAMPLES, AnomalyDetectorSettings.THRESHOLD_MIN_PVALUE, AnomalyDetectorSettings.MIN_PREVIEW_SIZE, AnomalyDetectorSettings.HOURLY_MAINTENANCE, AnomalyDetectorSettings.HOURLY_MAINTENANCE, entityColdStarter, featureManager, memoryTracker);
MultiEntityResultHandler multiEntityResultHandler = new MultiEntityResultHandler(client, settings, threadPool, anomalyDetectionIndices, this.clientUtil, this.indexUtils, clusterService);
ResultWriteWorker resultWriteQueue = new ResultWriteWorker(heapSizeBytes, AnomalyDetectorSettings.RESULT_WRITE_QUEUE_SIZE_IN_BYTES, AnomalyDetectorSettings.RESULT_WRITE_QUEUE_MAX_HEAP_PERCENT, clusterService, random, adCircuitBreakerService, threadPool, settings, AnomalyDetectorSettings.MAX_QUEUED_TASKS_RATIO, getClock(), AnomalyDetectorSettings.MEDIUM_SEGMENT_PRUNE_RATIO, AnomalyDetectorSettings.LOW_SEGMENT_PRUNE_RATIO, AnomalyDetectorSettings.MAINTENANCE_FREQ_CONSTANT, AnomalyDetectorSettings.QUEUE_MAINTENANCE, multiEntityResultHandler, xContentRegistry, stateManager, AnomalyDetectorSettings.HOURLY_MAINTENANCE);
CheckpointReadWorker checkpointReadQueue = new CheckpointReadWorker(heapSizeBytes, AnomalyDetectorSettings.ENTITY_FEATURE_REQUEST_SIZE_IN_BYTES, AnomalyDetectorSettings.CHECKPOINT_READ_QUEUE_MAX_HEAP_PERCENT, clusterService, random, adCircuitBreakerService, threadPool, settings, AnomalyDetectorSettings.MAX_QUEUED_TASKS_RATIO, getClock(), AnomalyDetectorSettings.MEDIUM_SEGMENT_PRUNE_RATIO, AnomalyDetectorSettings.LOW_SEGMENT_PRUNE_RATIO, AnomalyDetectorSettings.MAINTENANCE_FREQ_CONSTANT, AnomalyDetectorSettings.QUEUE_MAINTENANCE, modelManager, checkpoint, coldstartQueue, resultWriteQueue, stateManager, anomalyDetectionIndices, cacheProvider, AnomalyDetectorSettings.HOURLY_MAINTENANCE, checkpointWriteQueue);
ColdEntityWorker coldEntityQueue = new ColdEntityWorker(heapSizeBytes, AnomalyDetectorSettings.ENTITY_FEATURE_REQUEST_SIZE_IN_BYTES, AnomalyDetectorSettings.COLD_ENTITY_QUEUE_MAX_HEAP_PERCENT, clusterService, random, adCircuitBreakerService, threadPool, settings, AnomalyDetectorSettings.MAX_QUEUED_TASKS_RATIO, getClock(), AnomalyDetectorSettings.MEDIUM_SEGMENT_PRUNE_RATIO, AnomalyDetectorSettings.LOW_SEGMENT_PRUNE_RATIO, AnomalyDetectorSettings.MAINTENANCE_FREQ_CONSTANT, checkpointReadQueue, AnomalyDetectorSettings.HOURLY_MAINTENANCE, stateManager);
ADDataMigrator dataMigrator = new ADDataMigrator(client, clusterService, xContentRegistry, anomalyDetectionIndices);
HashRing hashRing = new HashRing(nodeFilter, getClock(), settings, client, clusterService, dataMigrator, modelManager);
anomalyDetectorRunner = new AnomalyDetectorRunner(modelManager, featureManager, AnomalyDetectorSettings.MAX_PREVIEW_RESULTS);
Map<String, ADStat<?>> stats = ImmutableMap.<String, ADStat<?>>builder().put(StatNames.AD_EXECUTE_REQUEST_COUNT.getName(), new ADStat<>(false, new CounterSupplier())).put(StatNames.AD_EXECUTE_FAIL_COUNT.getName(), new ADStat<>(false, new CounterSupplier())).put(StatNames.AD_HC_EXECUTE_REQUEST_COUNT.getName(), new ADStat<>(false, new CounterSupplier())).put(StatNames.AD_HC_EXECUTE_FAIL_COUNT.getName(), new ADStat<>(false, new CounterSupplier())).put(StatNames.MODEL_INFORMATION.getName(), new ADStat<>(false, new ModelsOnNodeSupplier(modelManager, cacheProvider, settings, clusterService))).put(StatNames.ANOMALY_DETECTORS_INDEX_STATUS.getName(), new ADStat<>(true, new IndexStatusSupplier(indexUtils, AnomalyDetector.ANOMALY_DETECTORS_INDEX))).put(StatNames.ANOMALY_RESULTS_INDEX_STATUS.getName(), new ADStat<>(true, new IndexStatusSupplier(indexUtils, CommonName.ANOMALY_RESULT_INDEX_ALIAS))).put(StatNames.MODELS_CHECKPOINT_INDEX_STATUS.getName(), new ADStat<>(true, new IndexStatusSupplier(indexUtils, CommonName.CHECKPOINT_INDEX_NAME))).put(StatNames.ANOMALY_DETECTION_JOB_INDEX_STATUS.getName(), new ADStat<>(true, new IndexStatusSupplier(indexUtils, AnomalyDetectorJob.ANOMALY_DETECTOR_JOB_INDEX))).put(StatNames.ANOMALY_DETECTION_STATE_STATUS.getName(), new ADStat<>(true, new IndexStatusSupplier(indexUtils, CommonName.DETECTION_STATE_INDEX))).put(StatNames.DETECTOR_COUNT.getName(), new ADStat<>(true, new SettableSupplier())).put(StatNames.SINGLE_ENTITY_DETECTOR_COUNT.getName(), new ADStat<>(true, new SettableSupplier())).put(StatNames.MULTI_ENTITY_DETECTOR_COUNT.getName(), new ADStat<>(true, new SettableSupplier())).put(StatNames.AD_EXECUTING_BATCH_TASK_COUNT.getName(), new ADStat<>(false, new CounterSupplier())).put(StatNames.AD_CANCELED_BATCH_TASK_COUNT.getName(), new ADStat<>(false, new CounterSupplier())).put(StatNames.AD_TOTAL_BATCH_TASK_EXECUTION_COUNT.getName(), new ADStat<>(false, new CounterSupplier())).put(StatNames.AD_BATCH_TASK_FAILURE_COUNT.getName(), new ADStat<>(false, new CounterSupplier())).put(StatNames.MODEL_COUNT.getName(), new ADStat<>(false, new ModelsOnNodeCountSupplier(modelManager, cacheProvider))).build();
adStats = new ADStats(stats);
adTaskCacheManager = new ADTaskCacheManager(settings, clusterService, memoryTracker);
adTaskManager = new ADTaskManager(settings, clusterService, client, xContentRegistry, anomalyDetectionIndices, nodeFilter, hashRing, adTaskCacheManager, threadPool);
AnomalyResultBulkIndexHandler anomalyResultBulkIndexHandler = new AnomalyResultBulkIndexHandler(client, settings, threadPool, this.clientUtil, this.indexUtils, clusterService, anomalyDetectionIndices);
adBatchTaskRunner = new ADBatchTaskRunner(settings, threadPool, clusterService, client, adCircuitBreakerService, featureManager, adTaskManager, anomalyDetectionIndices, adStats, anomalyResultBulkIndexHandler, adTaskCacheManager, searchFeatureDao, hashRing, modelManager);
ADSearchHandler adSearchHandler = new ADSearchHandler(settings, clusterService, client);
// transport action handler constructors
return ImmutableList.of(anomalyDetectionIndices, anomalyDetectorRunner, searchFeatureDao, singleFeatureLinearUniformInterpolator, interpolator, gson, jvmService, hashRing, featureManager, modelManager, stateManager, new ADClusterEventListener(clusterService, hashRing), adCircuitBreakerService, adStats, new MasterEventListener(clusterService, threadPool, client, getClock(), clientUtil, nodeFilter), nodeFilter, multiEntityResultHandler, checkpoint, cacheProvider, adTaskManager, adBatchTaskRunner, adSearchHandler, coldstartQueue, resultWriteQueue, checkpointReadQueue, checkpointWriteQueue, coldEntityQueue, entityColdStarter, adTaskCacheManager);
}
use of org.opensearch.ad.cluster.HashRing in project anomaly-detection by opensearch-project.
the class AnomalyResultTests method setUp.
@SuppressWarnings("unchecked")
@Override
@Before
public void setUp() throws Exception {
super.setUp();
super.setUpLog4jForJUnit(AnomalyResultTransportAction.class);
setupTestNodes(AnomalyDetectorSettings.MAX_ENTITIES_PER_QUERY, AnomalyDetectorSettings.PAGE_SIZE);
transportService = testNodes[0].transportService;
clusterService = testNodes[0].clusterService;
settings = clusterService.getSettings();
stateManager = mock(NodeStateManager.class);
when(stateManager.isMuted(any(String.class), any(String.class))).thenReturn(false);
when(stateManager.markColdStartRunning(anyString())).thenReturn(() -> {
});
detector = mock(AnomalyDetector.class);
featureId = "xyz";
// we have one feature
when(detector.getEnabledFeatureIds()).thenReturn(Collections.singletonList(featureId));
featureName = "abc";
when(detector.getEnabledFeatureNames()).thenReturn(Collections.singletonList(featureName));
List<String> userIndex = new ArrayList<>();
userIndex.add("test*");
when(detector.getIndices()).thenReturn(userIndex);
adID = "123";
when(detector.getDetectorId()).thenReturn(adID);
when(detector.getCategoryField()).thenReturn(null);
doAnswer(invocation -> {
ActionListener<Optional<AnomalyDetector>> listener = invocation.getArgument(1);
listener.onResponse(Optional.of(detector));
return null;
}).when(stateManager).getAnomalyDetector(any(String.class), any(ActionListener.class));
when(detector.getDetectorIntervalInMinutes()).thenReturn(1L);
hashRing = mock(HashRing.class);
Optional<DiscoveryNode> localNode = Optional.of(clusterService.state().nodes().getLocalNode());
when(hashRing.getOwningNodeWithSameLocalAdVersionForRealtimeAD(any(String.class))).thenReturn(localNode);
doReturn(localNode).when(hashRing).getNodeByAddress(any());
featureQuery = mock(FeatureManager.class);
doAnswer(invocation -> {
ActionListener<SinglePointFeatures> listener = invocation.getArgument(3);
listener.onResponse(new SinglePointFeatures(Optional.of(new double[] { 0.0d }), Optional.of(new double[] { 0 })));
return null;
}).when(featureQuery).getCurrentFeatures(any(AnomalyDetector.class), anyLong(), anyLong(), any(ActionListener.class));
double rcfScore = 0.2;
confidence = 0.91;
anomalyGrade = 0.5;
normalModelManager = mock(ModelManager.class);
long totalUpdates = 1440;
int relativeIndex = 0;
double[] currentTimeAttribution = new double[] { 0.5, 0.5 };
double[] pastValues = new double[] { 123, 456 };
double[][] expectedValuesList = new double[][] { new double[] { 789, 12 } };
double[] likelihood = new double[] { 1 };
double threshold = 1.1d;
doAnswer(invocation -> {
ActionListener<ThresholdingResult> listener = invocation.getArgument(3);
listener.onResponse(new ThresholdingResult(anomalyGrade, confidence, rcfScore, totalUpdates, relativeIndex, currentTimeAttribution, pastValues, expectedValuesList, likelihood, threshold, 30));
return null;
}).when(normalModelManager).getTRcfResult(any(String.class), any(String.class), any(double[].class), any(ActionListener.class));
doAnswer(invocation -> {
ActionListener<ThresholdingResult> listener = invocation.getArgument(3);
listener.onResponse(new ThresholdingResult(0, 1.0d, rcfScore));
return null;
}).when(normalModelManager).getThresholdingResult(any(String.class), any(String.class), anyDouble(), any(ActionListener.class));
// "123-threshold";
thresholdModelID = SingleStreamModelIdMapper.getThresholdModelId(adID);
// when(normalModelPartitioner.getThresholdModelId(any(String.class))).thenReturn(thresholdModelID);
adCircuitBreakerService = mock(ADCircuitBreakerService.class);
when(adCircuitBreakerService.isOpen()).thenReturn(false);
ThreadPool threadPool = mock(ThreadPool.class);
client = mock(Client.class);
ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
when(client.threadPool()).thenReturn(threadPool);
when(client.threadPool().getThreadContext()).thenReturn(threadContext);
doAnswer(invocation -> {
Object[] args = invocation.getArguments();
assertTrue(String.format("The size of args is %d. Its content is %s", args.length, Arrays.toString(args)), args.length >= 2);
IndexRequest request = null;
ActionListener<IndexResponse> listener = null;
if (args[0] instanceof IndexRequest) {
request = (IndexRequest) args[0];
}
if (args[1] instanceof ActionListener) {
listener = (ActionListener<IndexResponse>) args[1];
}
assertTrue(request != null && listener != null);
ShardId shardId = new ShardId(new Index(CommonName.ANOMALY_RESULT_INDEX_ALIAS, randomAlphaOfLength(10)), 0);
listener.onResponse(new IndexResponse(shardId, randomAlphaOfLength(10), request.id(), 1, 1, 1, true));
return null;
}).when(client).index(any(), any());
indexNameResolver = new IndexNameExpressionResolver(new ThreadContext(Settings.EMPTY));
Map<String, ADStat<?>> statsMap = new HashMap<String, ADStat<?>>() {
{
put(StatNames.AD_EXECUTE_REQUEST_COUNT.getName(), new ADStat<>(false, new CounterSupplier()));
put(StatNames.AD_EXECUTE_FAIL_COUNT.getName(), new ADStat<>(false, new CounterSupplier()));
put(StatNames.AD_HC_EXECUTE_REQUEST_COUNT.getName(), new ADStat<>(false, new CounterSupplier()));
put(StatNames.AD_HC_EXECUTE_FAIL_COUNT.getName(), new ADStat<>(false, new CounterSupplier()));
}
};
adStats = new ADStats(statsMap);
doAnswer(invocation -> {
Object[] args = invocation.getArguments();
GetRequest request = (GetRequest) args[0];
ActionListener<GetResponse> listener = (ActionListener<GetResponse>) args[1];
if (request.index().equals(CommonName.DETECTION_STATE_INDEX)) {
DetectorInternalState.Builder result = new DetectorInternalState.Builder().lastUpdateTime(Instant.now());
listener.onResponse(TestHelpers.createGetResponse(result.build(), detector.getDetectorId(), CommonName.DETECTION_STATE_INDEX));
}
return null;
}).when(client).get(any(), any());
adTaskManager = mock(ADTaskManager.class);
doAnswer(invocation -> {
ActionListener<Boolean> listener = invocation.getArgument(3);
listener.onResponse(true);
return null;
}).when(adTaskManager).initRealtimeTaskCacheAndCleanupStaleCache(anyString(), any(AnomalyDetector.class), any(TransportService.class), any(ActionListener.class));
}
use of org.opensearch.ad.cluster.HashRing in project anomaly-detection by opensearch-project.
the class MultiEntityResultTests method setUp.
@SuppressWarnings({ "serial", "unchecked" })
@Override
@Before
public void setUp() throws Exception {
super.setUp();
now = Instant.now();
clock = mock(Clock.class);
when(clock.instant()).thenReturn(now);
detectorId = "123";
String categoryField = "a";
detector = TestHelpers.randomAnomalyDetectorUsingCategoryFields(detectorId, Collections.singletonList(categoryField));
stateManager = mock(NodeStateManager.class);
// make sure parameters are not null, otherwise this mock won't get invoked
doAnswer(invocation -> {
ActionListener<Optional<AnomalyDetector>> listener = invocation.getArgument(1);
listener.onResponse(Optional.of(detector));
return null;
}).when(stateManager).getAnomalyDetector(anyString(), any(ActionListener.class));
settings = Settings.builder().put(AnomalyDetectorSettings.COOLDOWN_MINUTES.getKey(), TimeValue.timeValueMinutes(5)).build();
// make sure end time is larger enough than Clock.systemUTC().millis() to get PageIterator.hasNext() to pass
request = new AnomalyResultRequest(detectorId, 100, Clock.systemUTC().millis() + 100_000);
transportService = mock(TransportService.class);
client = mock(Client.class);
ThreadContext threadContext = new ThreadContext(settings);
mockThreadPool = mock(ThreadPool.class);
setUpADThreadPool(mockThreadPool);
when(client.threadPool()).thenReturn(mockThreadPool);
when(mockThreadPool.getThreadContext()).thenReturn(threadContext);
featureQuery = mock(FeatureManager.class);
normalModelManager = mock(ModelManager.class);
hashRing = mock(HashRing.class);
Set<Setting<?>> anomalyResultSetting = new HashSet<>(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
anomalyResultSetting.add(MAX_ENTITIES_PER_QUERY);
anomalyResultSetting.add(PAGE_SIZE);
anomalyResultSetting.add(MAX_RETRY_FOR_UNRESPONSIVE_NODE);
anomalyResultSetting.add(BACKOFF_MINUTES);
ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, anomalyResultSetting);
DiscoveryNode discoveryNode = new DiscoveryNode("node1", OpenSearchTestCase.buildNewFakeTransportAddress(), Collections.emptyMap(), DiscoveryNodeRole.BUILT_IN_ROLES, Version.CURRENT);
clusterService = ClusterServiceUtils.createClusterService(threadPool, discoveryNode, clusterSettings);
indexNameResolver = new IndexNameExpressionResolver(new ThreadContext(Settings.EMPTY));
adCircuitBreakerService = mock(ADCircuitBreakerService.class);
when(adCircuitBreakerService.isOpen()).thenReturn(false);
Map<String, ADStat<?>> statsMap = new HashMap<String, ADStat<?>>() {
{
put(StatNames.AD_EXECUTE_REQUEST_COUNT.getName(), new ADStat<>(false, new CounterSupplier()));
put(StatNames.AD_EXECUTE_FAIL_COUNT.getName(), new ADStat<>(false, new CounterSupplier()));
put(StatNames.AD_HC_EXECUTE_REQUEST_COUNT.getName(), new ADStat<>(false, new CounterSupplier()));
put(StatNames.AD_HC_EXECUTE_FAIL_COUNT.getName(), new ADStat<>(false, new CounterSupplier()));
}
};
adStats = new ADStats(statsMap);
adTaskManager = mock(ADTaskManager.class);
doAnswer(invocation -> {
ActionListener<Boolean> listener = invocation.getArgument(3);
listener.onResponse(true);
return null;
}).when(adTaskManager).initRealtimeTaskCacheAndCleanupStaleCache(anyString(), any(AnomalyDetector.class), any(TransportService.class), any(ActionListener.class));
action = new AnomalyResultTransportAction(new ActionFilters(Collections.emptySet()), transportService, settings, client, stateManager, featureQuery, normalModelManager, hashRing, clusterService, indexNameResolver, adCircuitBreakerService, adStats, mockThreadPool, xContentRegistry(), adTaskManager);
provider = mock(CacheProvider.class);
entityCache = mock(EntityCache.class);
when(provider.get()).thenReturn(entityCache);
when(entityCache.get(any(), any())).thenReturn(MLUtil.randomModelState(new RandomModelStateConfig.Builder().fullModel(true).build()));
when(entityCache.selectUpdateCandidate(any(), any(), any())).thenReturn(Pair.of(new ArrayList<Entity>(), new ArrayList<Entity>()));
indexUtil = mock(AnomalyDetectionIndices.class);
resultWriteQueue = mock(ResultWriteWorker.class);
checkpointReadQueue = mock(CheckpointReadWorker.class);
coldEntityQueue = mock(ColdEntityWorker.class);
attrs1 = new HashMap<>();
attrs1.put(serviceField, app0);
attrs1.put(hostField, server1);
attrs2 = new HashMap<>();
attrs2.put(serviceField, app0);
attrs2.put(hostField, server2);
attrs3 = new HashMap<>();
attrs3.put(serviceField, app0);
attrs3.put(hostField, server3);
}
Aggregations