use of org.opensearch.ad.common.exception.AnomalyDetectionException in project anomaly-detection by opensearch-project.
the class EntityColdStarterTests method testColdStartException.
public void testColdStartException() throws InterruptedException {
Queue<double[]> samples = MLUtil.createQueueSamples(1);
EntityModel model = new EntityModel(entity, samples, null);
modelState = new ModelState<>(model, modelId, detectorId, ModelType.ENTITY.getName(), clock, priority);
doAnswer(invocation -> {
ActionListener<Optional<Long>> listener = invocation.getArgument(2);
listener.onFailure(new AnomalyDetectionException(detectorId, ""));
return null;
}).when(searchFeatureDao).getEntityMinDataTime(any(), any(), any());
entityColdStarter.trainModel(entity, detectorId, modelState, listener);
assertTrue(stateManager.getLastDetectionError(detectorId) != null);
checkSemaphoreRelease();
}
use of org.opensearch.ad.common.exception.AnomalyDetectionException in project anomaly-detection by opensearch-project.
the class ADTaskManager method parseEntityFromString.
/**
* Parse entity string value into Entity {@link Entity} instance.
* @param entityValue entity value
* @param adTask AD task
* @return Entity instance
*/
public Entity parseEntityFromString(String entityValue, ADTask adTask) {
AnomalyDetector detector = adTask.getDetector();
if (detector.isMultiCategoryDetector()) {
try {
XContentParser parser = XContentType.JSON.xContent().createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, entityValue);
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser);
return Entity.parse(parser);
} catch (IOException e) {
String error = "Failed to parse string into entity";
logger.debug(error, e);
throw new AnomalyDetectionException(error);
}
} else if (detector.isMultientityDetector()) {
return Entity.createSingleAttributeEntity(detector.getCategoryField().get(0), entityValue);
}
throw new IllegalArgumentException("Fail to parse to Entity for single flow detector");
}
use of org.opensearch.ad.common.exception.AnomalyDetectionException in project anomaly-detection by opensearch-project.
the class ADBatchTaskRunner method getDateRangeOfSourceData.
private void getDateRangeOfSourceData(ADTask adTask, BiConsumer<Long, Long> consumer, ActionListener<String> internalListener) {
String taskId = adTask.getTaskId();
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().aggregation(AggregationBuilders.min(AGG_NAME_MIN_TIME).field(adTask.getDetector().getTimeField())).aggregation(AggregationBuilders.max(AGG_NAME_MAX_TIME).field(adTask.getDetector().getTimeField())).size(0);
if (adTask.getEntity() != null && adTask.getEntity().getAttributes().size() > 0) {
BoolQueryBuilder query = new BoolQueryBuilder();
adTask.getEntity().getAttributes().entrySet().forEach(entity -> query.filter(new TermQueryBuilder(entity.getKey(), entity.getValue())));
searchSourceBuilder.query(query);
}
SearchRequest request = new SearchRequest().indices(adTask.getDetector().getIndices().toArray(new String[0])).source(searchSourceBuilder);
client.search(request, ActionListener.wrap(r -> {
InternalMin minAgg = r.getAggregations().get(AGG_NAME_MIN_TIME);
InternalMax maxAgg = r.getAggregations().get(AGG_NAME_MAX_TIME);
double minValue = minAgg.getValue();
double maxValue = maxAgg.getValue();
// If time field not exist or there is no value, will return infinity value
if (minValue == Double.POSITIVE_INFINITY) {
internalListener.onFailure(new ResourceNotFoundException(adTask.getDetectorId(), "There is no data in the time field"));
return;
}
long interval = ((IntervalTimeConfiguration) adTask.getDetector().getDetectionInterval()).toDuration().toMillis();
DetectionDateRange detectionDateRange = adTask.getDetectionDateRange();
long dataStartTime = detectionDateRange.getStartTime().toEpochMilli();
long dataEndTime = detectionDateRange.getEndTime().toEpochMilli();
long minDate = (long) minValue;
long maxDate = (long) maxValue;
if (minDate >= dataEndTime || maxDate <= dataStartTime) {
internalListener.onFailure(new ResourceNotFoundException(adTask.getDetectorId(), "There is no data in the detection date range"));
return;
}
if (minDate > dataStartTime) {
dataStartTime = minDate;
}
if (maxDate < dataEndTime) {
dataEndTime = maxDate;
}
// normalize start/end time to make it consistent with feature data agg result
dataStartTime = dataStartTime - dataStartTime % interval;
dataEndTime = dataEndTime - dataEndTime % interval;
logger.debug("adjusted date range: start: {}, end: {}, taskId: {}", dataStartTime, dataEndTime, taskId);
if ((dataEndTime - dataStartTime) < NUM_MIN_SAMPLES * interval) {
internalListener.onFailure(new AnomalyDetectionException("There is not enough data to train model").countedInStats(false));
return;
}
consumer.accept(dataStartTime, dataEndTime);
}, e -> {
internalListener.onFailure(e);
}));
}
use of org.opensearch.ad.common.exception.AnomalyDetectionException in project anomaly-detection by opensearch-project.
the class RCFPollingTransportAction method doExecute.
@Override
protected void doExecute(Task task, RCFPollingRequest request, ActionListener<RCFPollingResponse> listener) {
String adID = request.getAdID();
String rcfModelID = SingleStreamModelIdMapper.getRcfModelId(adID, 0);
Optional<DiscoveryNode> rcfNode = hashRing.getOwningNodeWithSameLocalAdVersionForRealtimeAD(rcfModelID);
if (!rcfNode.isPresent()) {
listener.onFailure(new AnomalyDetectionException(adID, NO_NODE_FOUND_MSG));
return;
}
String rcfNodeId = rcfNode.get().getId();
DiscoveryNode localNode = clusterService.localNode();
if (localNode.getId().equals(rcfNodeId)) {
modelManager.getTotalUpdates(rcfModelID, adID, ActionListener.wrap(totalUpdates -> listener.onResponse(new RCFPollingResponse(totalUpdates)), e -> listener.onFailure(new AnomalyDetectionException(adID, FAIL_TO_GET_RCF_UPDATE_MSG, e))));
} else if (request.remoteAddress() == null) {
// redirect if request comes from local host.
// If a request comes from remote machine, it is already redirected.
// One redirection should be enough.
// We don't want a potential infinite loop due to any bug and thus give up.
LOG.info("Sending RCF polling request to {} for model {}", rcfNodeId, rcfModelID);
try {
transportService.sendRequest(rcfNode.get(), RCFPollingAction.NAME, request, option, new TransportResponseHandler<RCFPollingResponse>() {
@Override
public RCFPollingResponse read(StreamInput in) throws IOException {
return new RCFPollingResponse(in);
}
@Override
public void handleResponse(RCFPollingResponse response) {
listener.onResponse(response);
}
@Override
public void handleException(TransportException exp) {
listener.onFailure(exp);
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
});
} catch (Exception e) {
LOG.error(String.format(Locale.ROOT, "Fail to poll RCF models for {}", adID), e);
listener.onFailure(new AnomalyDetectionException(adID, FAIL_TO_GET_RCF_UPDATE_MSG, e));
}
} else {
LOG.error("Fail to poll rcf for model {} due to an unexpected bug.", rcfModelID);
listener.onFailure(new AnomalyDetectionException(adID, NO_NODE_FOUND_MSG));
}
}
use of org.opensearch.ad.common.exception.AnomalyDetectionException in project anomaly-detection by opensearch-project.
the class AnomalyResultTransportAction method coldStart.
private void coldStart(AnomalyDetector detector) {
String detectorId = detector.getDetectorId();
// If last cold start is not finished, we don't trigger another one
if (stateManager.isColdStartRunning(detectorId)) {
return;
}
final Releasable coldStartFinishingCallback = stateManager.markColdStartRunning(detectorId);
ActionListener<Optional<double[][]>> listener = ActionListener.wrap(trainingData -> {
if (trainingData.isPresent()) {
double[][] dataPoints = trainingData.get();
ActionListener<Void> trainModelListener = ActionListener.wrap(res -> {
LOG.info("Succeeded in training {}", detectorId);
}, exception -> {
if (exception instanceof AnomalyDetectionException) {
// e.g., partitioned model exceeds memory limit
stateManager.setException(detectorId, exception);
} else if (exception instanceof IllegalArgumentException) {
// IllegalArgumentException due to invalid training data
stateManager.setException(detectorId, new EndRunException(detectorId, "Invalid training data", exception, false));
} else if (exception instanceof OpenSearchTimeoutException) {
stateManager.setException(detectorId, new InternalFailure(detectorId, "Time out while indexing cold start checkpoint", exception));
} else {
stateManager.setException(detectorId, new EndRunException(detectorId, "Error while training model", exception, false));
}
});
modelManager.trainModel(detector, dataPoints, new ThreadedActionListener<>(LOG, threadPool, AnomalyDetectorPlugin.AD_THREAD_POOL_NAME, trainModelListener, false));
} else {
stateManager.setException(detectorId, new EndRunException(detectorId, "Cannot get training data", false));
}
}, exception -> {
if (exception instanceof OpenSearchTimeoutException) {
stateManager.setException(detectorId, new InternalFailure(detectorId, "Time out while getting training data", exception));
} else if (exception instanceof AnomalyDetectionException) {
// e.g., Invalid search query
stateManager.setException(detectorId, exception);
} else {
stateManager.setException(detectorId, new EndRunException(detectorId, "Error while cold start", exception, false));
}
});
final ActionListener<Optional<double[][]>> listenerWithReleaseCallback = ActionListener.runAfter(listener, coldStartFinishingCallback::close);
threadPool.executor(AnomalyDetectorPlugin.AD_THREAD_POOL_NAME).execute(() -> featureManager.getColdStartData(detector, new ThreadedActionListener<>(LOG, threadPool, AnomalyDetectorPlugin.AD_THREAD_POOL_NAME, listenerWithReleaseCallback, false)));
}
Aggregations