use of com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO in project pinot by linkedin.
the class DetectionJobScheduler method runAdhocAnomalyFunction.
/**
* Point of entry for rest endpoints calling adhoc anomaly functions
* TODO: Not updating detection status in case of adhoc currently, reconsider
* @param functionId
* @param startTime
* @param endTime
* @return job execution id
*/
public Long runAdhocAnomalyFunction(Long functionId, Long startTime, Long endTime) {
Long jobExecutionId = null;
AnomalyFunctionDTO anomalyFunction = DAO_REGISTRY.getAnomalyFunctionDAO().findById(functionId);
String dataset = anomalyFunction.getCollection();
DatasetConfigDTO datasetConfig = null;
try {
datasetConfig = CACHE_REGISTRY.getDatasetConfigCache().get(dataset);
} catch (ExecutionException e) {
LOG.error("Function: {} Dataset: {} Exception in fetching dataset config", functionId, dataset, e);
}
boolean pass = checkIfDetectionRunCriteriaMet(startTime, endTime, datasetConfig, anomalyFunction);
if (pass) {
jobExecutionId = runAnomalyFunctionOnRanges(anomalyFunction, Lists.newArrayList(startTime), Lists.newArrayList(endTime));
} else {
LOG.warn("Function: {} Dataset: {} Data incomplete for monitoring window {} ({}) to {} ({}), skipping anomaly detection", functionId, dataset, startTime, new DateTime(startTime), endTime, new DateTime(endTime));
// TODO: Send email to owners/dev team
}
return jobExecutionId;
}
use of com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO in project pinot by linkedin.
the class DetectionJobResource method toggleRequiresCompletenessCheck.
private void toggleRequiresCompletenessCheck(Long id, boolean state) {
AnomalyFunctionDTO anomalyFunctionSpec = anomalyFunctionSpecDAO.findById(id);
if (anomalyFunctionSpec == null) {
throw new NullArgumentException("Function spec not found");
}
anomalyFunctionSpec.setRequiresCompletenessCheck(state);
anomalyFunctionSpecDAO.update(anomalyFunctionSpec);
}
use of com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO in project pinot by linkedin.
the class DetectionJobResource method tuneAlertFilter.
/**
*
* @param id anomaly function id
* @param startTime start time of anomalies to tune alert filter
* @param endTime end time of anomalies to tune alert filter
* @param autoTuneType the type of auto tune to invoke (default is "AUTOTUNE")
* @return HTTP response of request: string of alert filter
*/
@POST
@Path("/autotune/filter/{functionId}")
public Response tuneAlertFilter(@PathParam("functionId") long id, @QueryParam("startTime") long startTime, @QueryParam("endTime") long endTime, @QueryParam("autoTuneType") String autoTuneType) {
// get anomalies by function id, start time and end time
AnomalyFunctionDTO anomalyFunctionSpec = DAO_REGISTRY.getAnomalyFunctionDAO().findById(id);
AnomalyFunctionManager anomalyFunctionDAO = DAO_REGISTRY.getAnomalyFunctionDAO();
MergedAnomalyResultManager anomalyMergedResultDAO = DAO_REGISTRY.getMergedAnomalyResultDAO();
List<MergedAnomalyResultDTO> anomalyResultDTOS = anomalyMergedResultDAO.findByStartTimeInRangeAndFunctionId(startTime, endTime, id);
// create alert filter and evaluator
AlertFilter alertFilter = alertFilterFactory.fromSpec(anomalyFunctionSpec.getAlertFilter());
AlertFilterEvaluationUtil evaluator = new AlertFilterEvaluationUtil(alertFilter);
// create alert filter auto tune
AlertFilterAutoTune alertFilterAutotune = alertFilterAutotuneFactory.fromSpec(autoTuneType);
LOG.info("initiated alertFilterAutoTune of Type {}", alertFilterAutotune.getClass().toString());
try {
//evaluate current alert filter (calculate current precision and recall)
evaluator.updatePrecisionAndRecall(anomalyResultDTOS);
LOG.info("AlertFilter of Type {}, has been evaluated with precision: {}, recall: {}", alertFilter.getClass().toString(), evaluator.getPrecision(), evaluator.getRecall());
// get tuned alert filter
Map<String, String> tunedAlertFilter = alertFilterAutotune.tuneAlertFilter(anomalyResultDTOS, evaluator.getPrecision(), evaluator.getRecall());
LOG.info("tuned AlertFilter");
// otherwise do nothing and return alert filter
if (alertFilterAutotune.isUpdated()) {
anomalyFunctionSpec.setAlertFilter(tunedAlertFilter);
anomalyFunctionDAO.update(anomalyFunctionSpec);
LOG.info("Model has been updated");
} else {
LOG.info("Model hasn't been updated because tuned model cannot beat original model");
}
} catch (Exception e) {
LOG.warn("AutoTune throws exception due to: {}", e.getMessage());
}
return Response.ok(alertFilterAutotune.isUpdated()).build();
}
use of com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO in project pinot by linkedin.
the class TaskGenerator method createDetectionTasks.
public List<DetectionTaskInfo> createDetectionTasks(DetectionJobContext detectionJobContext, List<DateTime> monitoringWindowStartTimes, List<DateTime> monitoringWindowEndTimes) throws Exception {
List<DetectionTaskInfo> tasks = new ArrayList<>();
AnomalyFunctionDTO anomalyFunctionSpec = detectionJobContext.getAnomalyFunctionSpec();
long jobExecutionId = detectionJobContext.getJobExecutionId();
// generate tasks
String exploreDimensionsString = anomalyFunctionSpec.getExploreDimensions();
if (StringUtils.isBlank(exploreDimensionsString)) {
DetectionTaskInfo taskInfo = new DetectionTaskInfo(jobExecutionId, monitoringWindowStartTimes, monitoringWindowEndTimes, anomalyFunctionSpec, null);
tasks.add(taskInfo);
} else {
DetectionTaskInfo taskInfo = new DetectionTaskInfo(jobExecutionId, monitoringWindowStartTimes, monitoringWindowEndTimes, anomalyFunctionSpec, exploreDimensionsString);
tasks.add(taskInfo);
}
return tasks;
}
use of com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO in project pinot by linkedin.
the class OnboardResource method deleteExistingAnomalies.
/**
* Delete raw or merged anomalies whose start time is located in the given time ranges, except
* the following two cases:
*
* 1. If a raw anomaly belongs to a merged anomaly whose start time is not located in the given
* time ranges, then the raw anomaly will not be deleted.
*
* 2. If a raw anomaly belongs to a merged anomaly whose start time is located in the given
* time ranges, then it is deleted regardless its start time.
*
* If monitoringWindowStartTime is not given, then start time is set to 0.
* If monitoringWindowEndTime is not given, then end time is set to Long.MAX_VALUE.
* @param monitoringWindowStartTime The start time of the monitoring window (in milli-second)
* @param monitoringWindowEndTime The start time of the monitoring window (in milli-second)
*/
@POST
@Path("function/{id}/deleteExistingAnomalies")
public Map<String, Integer> deleteExistingAnomalies(@PathParam("id") String id, @QueryParam("start") long monitoringWindowStartTime, @QueryParam("end") long monitoringWindowEndTime) {
long functionId = Long.valueOf(id);
AnomalyFunctionDTO anomalyFunction = anomalyFunctionDAO.findById(functionId);
if (anomalyFunction == null) {
LOG.info("Anomaly functionId {} is not found", functionId);
return null;
}
HashMap<String, Integer> returnInfo = new HashMap<>();
// Find merged anomaly result and delete them first
LOG.info("Deleting merged anomaly results in the time range: {} -- {}", new DateTime(monitoringWindowStartTime), new DateTime(monitoringWindowEndTime));
LOG.info("Beginning cleanup merged anomaly results of functionId {} collection {} metric {}", functionId, anomalyFunction.getCollection(), anomalyFunction.getMetric());
int mergedAnomaliesDeleted = 0;
List<MergedAnomalyResultDTO> mergedResults = mergedAnomalyResultDAO.findByStartTimeInRangeAndFunctionId(monitoringWindowStartTime, monitoringWindowEndTime, functionId);
if (CollectionUtils.isNotEmpty(mergedResults)) {
mergedAnomaliesDeleted = deleteMergedResults(mergedResults);
}
returnInfo.put("mergedAnomaliesDeleted", mergedAnomaliesDeleted);
LOG.info("{} merged anomaly results have been deleted", mergedAnomaliesDeleted);
// Find raw anomaly results and delete them
LOG.info("Deleting raw anomaly results in the time range: {} -- {}", new DateTime(monitoringWindowStartTime), new DateTime(monitoringWindowEndTime));
LOG.info("Beginning cleanup merged anomaly results of functionId {} collection {} metric {}", functionId, anomalyFunction.getCollection(), anomalyFunction.getMetric());
int rawAnomaliesDeleted = 0;
List<RawAnomalyResultDTO> rawResults = rawAnomalyResultDAO.findAllByTimeAndFunctionId(monitoringWindowStartTime, monitoringWindowEndTime, functionId);
if (CollectionUtils.isNotEmpty(rawResults)) {
rawAnomaliesDeleted = deleteRawResults(rawResults);
}
returnInfo.put("rawAnomaliesDeleted", rawAnomaliesDeleted);
LOG.info("{} raw anomaly results have been deleted", rawAnomaliesDeleted);
return returnInfo;
}
Aggregations