use of com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO in project pinot by linkedin.
the class DetectionJobResource method evaluateAlertFilterByFunctionId.
/**
* The endpoint to evaluate alert filter
* @param id: function ID
* @param startTime: startTime of merged anomaly
* @param endTime: endTime of merged anomaly
* @return feedback summary, precision and recall as json object
* @throws Exception when data has no positive label or model has no positive prediction
*/
@POST
@Path("/eval/filter/{functionId}")
public Response evaluateAlertFilterByFunctionId(@PathParam("functionId") long id, @QueryParam("startTime") long startTime, @QueryParam("endTime") long endTime) {
// get anomalies by function id, start time and end time
AnomalyFunctionDTO anomalyFunctionSpec = DAO_REGISTRY.getAnomalyFunctionDAO().findById(id);
MergedAnomalyResultManager anomalyMergedResultDAO = DAO_REGISTRY.getMergedAnomalyResultDAO();
List<MergedAnomalyResultDTO> anomalyResultDTOS = anomalyMergedResultDAO.findByStartTimeInRangeAndFunctionId(startTime, endTime, id);
// create alert filter and evaluator
AlertFilter alertFilter = alertFilterFactory.fromSpec(anomalyFunctionSpec.getAlertFilter());
AlertFilterEvaluationUtil evaluator = new AlertFilterEvaluationUtil(alertFilter);
try {
//evaluate current alert filter (calculate current precision and recall)
evaluator.updatePrecisionAndRecall(anomalyResultDTOS);
LOG.info("AlertFilter of Type {}, has been evaluated with precision: {}, recall:{}", alertFilter.getClass().toString(), evaluator.getPrecision(), evaluator.getRecall());
} catch (Exception e) {
LOG.warn("Updating precision and recall failed because: {}", e.getMessage());
}
// get anomaly summary from merged anomaly results
evaluator.updateFeedbackSummary(anomalyResultDTOS);
return Response.ok(evaluator.toProperties().toString()).build();
}
use of com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO in project pinot by linkedin.
the class DetectionJobResource method generateAnomaliesInRange.
/**
* Breaks down the given range into consecutive monitoring windows as per function definition
* Regenerates anomalies for each window separately
*
* As the anomaly result regeneration is a heavy job, we move the function from Dashboard to worker
* @param id anomaly function id
* @param startTimeIso The start time of the monitoring window (in ISO Format), ex: 2016-5-23T00:00:00Z
* @param endTimeIso The start time of the monitoring window (in ISO Format)
* @param isForceBackfill false to resume backfill from the latest left off
* @return HTTP response of this request
* @throws Exception
*/
@POST
@Path("/{id}/generateAnomaliesInRange")
public Response generateAnomaliesInRange(@PathParam("id") String id, @QueryParam("start") String startTimeIso, @QueryParam("end") String endTimeIso, @QueryParam("force") @DefaultValue("false") String isForceBackfill) throws Exception {
long functionId = Long.valueOf(id);
boolean forceBackfill = Boolean.valueOf(isForceBackfill);
AnomalyFunctionDTO anomalyFunction = anomalyFunctionDAO.findById(functionId);
if (anomalyFunction == null) {
return Response.noContent().build();
}
// Check if the timestamps are available
DateTime startTime = null;
DateTime endTime = null;
if (startTimeIso == null || startTimeIso.isEmpty()) {
throw new IllegalArgumentException(String.format("[functionId %s] Monitoring start time is not found", id));
}
if (endTimeIso == null || endTimeIso.isEmpty()) {
throw new IllegalArgumentException(String.format("[functionId %s] Monitoring end time is not found", id));
}
startTime = ISODateTimeFormat.dateTimeParser().parseDateTime(startTimeIso);
endTime = ISODateTimeFormat.dateTimeParser().parseDateTime(endTimeIso);
if (startTime.isAfter(endTime)) {
throw new IllegalArgumentException(String.format("[functionId %s] Monitoring start time is after monitoring end time", id));
}
if (endTime.isAfterNow()) {
throw new IllegalArgumentException(String.format("[functionId %s] Monitor end time {} should not be in the future", id, endTime.toString()));
}
// Check if the merged anomaly results have been cleaned up before regeneration
List<MergedAnomalyResultDTO> mergedResults = mergedAnomalyResultDAO.findByStartTimeInRangeAndFunctionId(startTime.getMillis(), endTime.getMillis(), functionId);
if (CollectionUtils.isNotEmpty(mergedResults) && !forceBackfill) {
throw new IllegalArgumentException(String.format("[functionId %s] Merged anomaly results should be cleaned up before regeneration", id));
}
// Check if the raw anomaly results have been cleaned up before regeneration
List<RawAnomalyResultDTO> rawResults = rawAnomalyResultDAO.findAllByTimeAndFunctionId(startTime.getMillis(), endTime.getMillis(), functionId);
if (CollectionUtils.isNotEmpty(rawResults) && !forceBackfill) {
throw new IllegalArgumentException(String.format("[functionId {}] Raw anomaly results should be cleaned up before regeneration", id));
}
// Check if the anomaly function is active
if (!anomalyFunction.getIsActive()) {
throw new IllegalArgumentException(String.format("Skipping deactivated function %s", id));
}
String response = null;
DateTime innerStartTime = startTime;
DateTime innerEndTime = endTime;
new Thread(new Runnable() {
@Override
public void run() {
detectionJobScheduler.runBackfill(functionId, innerStartTime, innerEndTime, forceBackfill);
}
}).start();
return Response.ok().build();
}
use of com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO in project pinot by linkedin.
the class DetectionJobResource method toggleActive.
private void toggleActive(Long id, boolean state) {
AnomalyFunctionDTO anomalyFunctionSpec = anomalyFunctionSpecDAO.findById(id);
if (anomalyFunctionSpec == null) {
throw new NullArgumentException("Function spec not found");
}
anomalyFunctionSpec.setIsActive(state);
anomalyFunctionSpecDAO.update(anomalyFunctionSpec);
}
use of com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO in project pinot by linkedin.
the class DetectionJobRunner method run.
/**
* Creates anomaly detection job and tasks, depending on the information in context
* @param detectionJobContext
* @return
*/
public Long run(DetectionJobContext detectionJobContext) {
long functionId = detectionJobContext.getAnomalyFunctionId();
AnomalyFunctionDTO anomalyFunction = DAO_REGISTRY.getAnomalyFunctionDAO().findById(functionId);
List<DateTime> monitoringWindowStartTimes = new ArrayList<>();
List<DateTime> monitoringWindowEndTimes = new ArrayList<>();
List<Long> startTimes = detectionJobContext.getStartTimes();
List<Long> endTimes = detectionJobContext.getEndTimes();
for (Long startTime : startTimes) {
DateTime monitoringWindowStartTime = new DateTime(startTime);
monitoringWindowStartTime = alignTimestampsToDataTimezone(monitoringWindowStartTime, anomalyFunction.getCollection());
monitoringWindowStartTimes.add(monitoringWindowStartTime);
}
for (Long endTime : endTimes) {
DateTime monitoringWindowEndTime = new DateTime(endTime);
monitoringWindowEndTime = alignTimestampsToDataTimezone(monitoringWindowEndTime, anomalyFunction.getCollection());
monitoringWindowEndTimes.add(monitoringWindowEndTime);
}
// write to anomaly_jobs
Long jobExecutionId = createJob(detectionJobContext.getJobName(), monitoringWindowStartTimes.get(0), monitoringWindowEndTimes.get(0));
detectionJobContext.setJobExecutionId(jobExecutionId);
// write to anomaly_tasks
List<Long> taskIds = createTasks(detectionJobContext, monitoringWindowStartTimes, monitoringWindowEndTimes);
return jobExecutionId;
}
use of com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO in project pinot by linkedin.
the class AlertFilterHelper method applyFiltrationRule.
/**
* Each function has a filtration rule which let alert module decide if an anomaly should be
* included in the alert email. This method applies respective filtration rule on list of
* anomalies.
*
* @param results
*
* @return
*/
public static List<MergedAnomalyResultDTO> applyFiltrationRule(List<MergedAnomalyResultDTO> results, AlertFilterFactory alertFilterFactory) {
if (results.size() == 0) {
return results;
}
// Function ID to Alert Filter
Map<Long, AlertFilter> functionAlertFilter = new HashMap<>();
List<MergedAnomalyResultDTO> qualifiedAnomalies = new ArrayList<>();
for (MergedAnomalyResultDTO result : results) {
// Lazy initiates alert filter for anomalies of the same anomaly function
AnomalyFunctionDTO anomalyFunctionSpec = result.getFunction();
long functionId = anomalyFunctionSpec.getId();
AlertFilter alertFilter = functionAlertFilter.get(functionId);
if (alertFilter == null) {
// Get filtration rule from anomaly function configuration
alertFilter = alertFilterFactory.fromSpec(anomalyFunctionSpec.getAlertFilter());
functionAlertFilter.put(functionId, alertFilter);
LOG.info("Using filter {} for anomaly function {} (dataset: {}, topic metric: {})", alertFilter, functionId, anomalyFunctionSpec.getCollection(), anomalyFunctionSpec.getTopicMetric());
}
if (alertFilter.isQualified(result)) {
qualifiedAnomalies.add(result);
}
}
LOG.info("Found [{}] anomalies qualified to alert after applying filtration rule on [{}] anomalies", qualifiedAnomalies.size(), results.size());
return qualifiedAnomalies;
}
Aggregations