use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class AnomalyResource method getExternalDashboardUrlForMergedAnomaly.
@GET
@Path("/external-dashboard-url/{mergedAnomalyId}")
public String getExternalDashboardUrlForMergedAnomaly(@NotNull @PathParam("mergedAnomalyId") Long mergedAnomalyId) throws Exception {
MergedAnomalyResultDTO mergedAnomalyResultDTO = mergedAnomalyResultDAO.findById(mergedAnomalyId);
String metric = mergedAnomalyResultDTO.getMetric();
String dataset = mergedAnomalyResultDTO.getCollection();
Long startTime = mergedAnomalyResultDTO.getStartTime();
Long endTime = mergedAnomalyResultDTO.getEndTime();
MetricConfigDTO metricConfigDTO = metricConfigDAO.findByMetricAndDataset(metric, dataset);
Map<String, String> context = new HashMap<>();
context.put(MetricConfigBean.URL_TEMPLATE_START_TIME, String.valueOf(startTime));
context.put(MetricConfigBean.URL_TEMPLATE_END_TIME, String.valueOf(endTime));
StrSubstitutor strSubstitutor = new StrSubstitutor(context);
Map<String, String> urlTemplates = metricConfigDTO.getExtSourceLinkInfo();
for (Entry<String, String> entry : urlTemplates.entrySet()) {
String sourceName = entry.getKey();
String urlTemplate = entry.getValue();
String extSourceUrl = strSubstitutor.replace(urlTemplate);
urlTemplates.put(sourceName, extSourceUrl);
}
return new JSONObject(urlTemplates).toString();
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class AnomalyResource method getAnomalyMergedResultTimeSeries.
/**
* Returns the time series for the given anomaly.
*
* If viewWindowStartTime and/or viewWindowEndTime is not given, then a window is padded automatically. The padded
* windows is half of the anomaly window size. For instance, if the anomaly lasts for 4 hours, then the pad window
* size is 2 hours. The max padding size is 1 day.
*
* @param anomalyResultId the id of the given anomaly
* @param viewWindowStartTime start time of the time series, inclusive
* @param viewWindowEndTime end time of the time series, inclusive
* @return the time series of the given anomaly
* @throws Exception when it fails to retrieve collection, i.e., dataset, information
*/
@GET
@Path("/anomaly-merged-result/timeseries/{anomaly_merged_result_id}")
public AnomalyTimelinesView getAnomalyMergedResultTimeSeries(@NotNull @PathParam("anomaly_merged_result_id") long anomalyResultId, @NotNull @QueryParam("aggTimeGranularity") String aggTimeGranularity, @QueryParam("start") long viewWindowStartTime, @QueryParam("end") long viewWindowEndTime) throws Exception {
boolean loadRawAnomalies = false;
MergedAnomalyResultDTO anomalyResult = anomalyMergedResultDAO.findById(anomalyResultId, loadRawAnomalies);
DimensionMap dimensions = anomalyResult.getDimensions();
AnomalyFunctionDTO anomalyFunctionSpec = anomalyResult.getFunction();
BaseAnomalyFunction anomalyFunction = anomalyFunctionFactory.fromSpec(anomalyFunctionSpec);
// By default, the padding window size is half of the anomaly window.
if (viewWindowStartTime == 0 || viewWindowEndTime == 0) {
long anomalyWindowStartTime = anomalyResult.getStartTime();
long anomalyWindowEndTime = anomalyResult.getEndTime();
long bucketMillis = TimeUnit.MILLISECONDS.convert(anomalyFunctionSpec.getBucketSize(), anomalyFunctionSpec.getBucketUnit());
long bucketCount = (anomalyWindowEndTime - anomalyWindowStartTime) / bucketMillis;
long paddingMillis = Math.max(1, (bucketCount / 2)) * bucketMillis;
if (paddingMillis > TimeUnit.DAYS.toMillis(1)) {
paddingMillis = TimeUnit.DAYS.toMillis(1);
}
if (viewWindowStartTime == 0) {
viewWindowStartTime = anomalyWindowStartTime - paddingMillis;
}
if (viewWindowEndTime == 0) {
viewWindowEndTime = anomalyWindowEndTime + paddingMillis;
}
}
TimeGranularity timeGranularity = Utils.getAggregationTimeGranularity(aggTimeGranularity, anomalyFunctionSpec.getCollection());
long bucketMillis = timeGranularity.toMillis();
// ThirdEye backend is end time exclusive, so one more bucket is appended to make end time inclusive for frontend.
viewWindowEndTime += bucketMillis;
long maxDataTime = collectionMaxDataTimeCache.get(anomalyResult.getCollection());
if (viewWindowEndTime > maxDataTime) {
viewWindowEndTime = (anomalyResult.getEndTime() > maxDataTime) ? anomalyResult.getEndTime() : maxDataTime;
}
AnomalyDetectionInputContext adInputContext = TimeBasedAnomalyMerger.fetchDataByDimension(viewWindowStartTime, viewWindowEndTime, dimensions, anomalyFunction, anomalyMergedResultDAO, overrideConfigDAO, false);
MetricTimeSeries metricTimeSeries = adInputContext.getDimensionKeyMetricTimeSeriesMap().get(dimensions);
if (metricTimeSeries == null) {
// the timeseries for the given anomaly
return new AnomalyTimelinesView();
}
// Transform time series with scaling factor
List<ScalingFactor> scalingFactors = adInputContext.getScalingFactors();
if (CollectionUtils.isNotEmpty(scalingFactors)) {
Properties properties = anomalyFunction.getProperties();
MetricTransfer.rescaleMetric(metricTimeSeries, viewWindowStartTime, scalingFactors, anomalyFunctionSpec.getTopicMetric(), properties);
}
List<MergedAnomalyResultDTO> knownAnomalies = adInputContext.getKnownMergedAnomalies().get(dimensions);
// Known anomalies are ignored (the null parameter) because 1. we can reduce users' waiting time and 2. presentation
// data does not need to be as accurate as the one used for detecting anomalies
AnomalyTimelinesView anomalyTimelinesView = anomalyFunction.getTimeSeriesView(metricTimeSeries, bucketMillis, anomalyFunctionSpec.getTopicMetric(), viewWindowStartTime, viewWindowEndTime, knownAnomalies);
// Generate summary for frontend
List<TimeBucket> timeBuckets = anomalyTimelinesView.getTimeBuckets();
if (timeBuckets.size() > 0) {
TimeBucket firstBucket = timeBuckets.get(0);
anomalyTimelinesView.addSummary("currentStart", Long.toString(firstBucket.getCurrentStart()));
anomalyTimelinesView.addSummary("baselineStart", Long.toString(firstBucket.getBaselineStart()));
TimeBucket lastBucket = timeBuckets.get(timeBuckets.size() - 1);
anomalyTimelinesView.addSummary("currentEnd", Long.toString(lastBucket.getCurrentStart()));
anomalyTimelinesView.addSummary("baselineEnd", Long.toString(lastBucket.getBaselineEnd()));
}
return anomalyTimelinesView;
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class EmailResource method generateAndSendAlertForDatasets.
// TODO : add end points for AlertConfig
@GET
@Path("generate/datasets/{startTime}/{endTime}")
public Response generateAndSendAlertForDatasets(@PathParam("startTime") Long startTime, @PathParam("endTime") Long endTime, @QueryParam("datasets") String datasets, @QueryParam("from") String fromAddr, @QueryParam("to") String toAddr, @QueryParam("subject") String subject, @QueryParam("includeSentAnomaliesOnly") boolean includeSentAnomaliesOnly, @QueryParam("teHost") String teHost, @QueryParam("smtpHost") String smtpHost, @QueryParam("smtpPort") int smtpPort) {
if (Strings.isNullOrEmpty(datasets)) {
throw new WebApplicationException("datasets null or empty : " + datasets);
}
String[] dataSetArr = datasets.split(",");
if (dataSetArr.length == 0) {
throw new WebApplicationException("Datasets empty : " + datasets);
}
if (Strings.isNullOrEmpty(toAddr)) {
throw new WebApplicationException("Empty : list of recipients" + toAddr);
}
if (Strings.isNullOrEmpty(teHost)) {
throw new WebApplicationException("Invalid TE host" + teHost);
}
if (Strings.isNullOrEmpty(smtpHost)) {
throw new WebApplicationException("invalid smtp host" + smtpHost);
}
AnomalyReportGenerator anomalyReportGenerator = AnomalyReportGenerator.getInstance();
List<MergedAnomalyResultDTO> anomalies = anomalyReportGenerator.getAnomaliesForDatasets(Arrays.asList(dataSetArr), startTime, endTime);
ThirdEyeAnomalyConfiguration configuration = new ThirdEyeAnomalyConfiguration();
SmtpConfiguration smtpConfiguration = new SmtpConfiguration();
smtpConfiguration.setSmtpHost(smtpHost);
smtpConfiguration.setSmtpPort(smtpPort);
configuration.setSmtpConfiguration(smtpConfiguration);
configuration.setDashboardHost(teHost);
configuration.setPhantomJsPath(thirdeyeConfiguration.getPhantomJsPath());
configuration.setRootDir(thirdeyeConfiguration.getRootDir());
String emailSub = Strings.isNullOrEmpty(subject) ? "Thirdeye Anomaly Report" : subject;
anomalyReportGenerator.buildReport(startTime, endTime, anomalies, emailSub, configuration, includeSentAnomaliesOnly, toAddr, fromAddr, "Thirdeye Anomaly Report", true);
return Response.ok().build();
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class AnomaliesResource method getAnomalyDataCompareResults.
@GET
@Path("/{anomalyId}")
public AnomalyDataCompare.Response getAnomalyDataCompareResults(@PathParam("anomalyId") Long anomalyId) {
MergedAnomalyResultDTO anomaly = mergedAnomalyResultDAO.findById(anomalyId);
if (anomaly == null) {
LOG.error("Anomaly not found with id " + anomalyId);
throw new IllegalArgumentException("Anomaly not found with id " + anomalyId);
}
AnomalyDataCompare.Response response = new AnomalyDataCompare.Response();
response.setCurrentStart(anomaly.getStartTime());
response.setCurrenEnd(anomaly.getEndTime());
try {
DatasetConfigDTO dataset = datasetConfigDAO.findByDataset(anomaly.getCollection());
TimeGranularity granularity = new TimeGranularity(dataset.getTimeDuration(), dataset.getTimeUnit());
// Lets compute currentTimeRange
Pair<Long, Long> currentTmeRange = new Pair<>(anomaly.getStartTime(), anomaly.getEndTime());
MetricTimeSeries ts = TimeSeriesUtil.getTimeSeriesByDimension(anomaly.getFunction(), Arrays.asList(currentTmeRange), anomaly.getDimensions(), granularity, false);
double currentVal = getTotalFromTimeSeries(ts, dataset.isAdditive());
response.setCurrentVal(currentVal);
for (AlertConfigBean.COMPARE_MODE compareMode : AlertConfigBean.COMPARE_MODE.values()) {
long baselineOffset = EmailHelper.getBaselineOffset(compareMode);
Pair<Long, Long> baselineTmeRange = new Pair<>(anomaly.getStartTime() - baselineOffset, anomaly.getEndTime() - baselineOffset);
MetricTimeSeries baselineTs = TimeSeriesUtil.getTimeSeriesByDimension(anomaly.getFunction(), Arrays.asList(baselineTmeRange), anomaly.getDimensions(), granularity, false);
AnomalyDataCompare.CompareResult cr = new AnomalyDataCompare.CompareResult();
double baseLineval = getTotalFromTimeSeries(baselineTs, dataset.isAdditive());
cr.setBaselineValue(baseLineval);
cr.setCompareMode(compareMode);
cr.setChange(calculateChange(currentVal, baseLineval));
response.getCompareResults().add(cr);
}
} catch (Exception e) {
LOG.error("Error fetching the timeseries data from pinot", e);
throw new RuntimeException(e);
}
return response;
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class AnomaliesResource method getAnomaliesByAnomalyIds.
/**
* Find anomalies by anomaly ids
* @param startTime
* @param endTime
* @param anomalyIdsString
* @param functionName
* @return
* @throws Exception
*/
@GET
@Path("search/anomalyIds/{startTime}/{endTime}/{pageNumber}")
public AnomaliesWrapper getAnomaliesByAnomalyIds(@PathParam("startTime") Long startTime, @PathParam("endTime") Long endTime, @PathParam("pageNumber") int pageNumber, @QueryParam("anomalyIds") String anomalyIdsString, @QueryParam("functionName") String functionName) throws Exception {
String[] anomalyIds = anomalyIdsString.split(COMMA_SEPARATOR);
List<MergedAnomalyResultDTO> mergedAnomalies = new ArrayList<>();
for (String id : anomalyIds) {
Long anomalyId = Long.valueOf(id);
MergedAnomalyResultDTO anomaly = mergedAnomalyResultDAO.findById(anomalyId);
if (anomaly != null) {
mergedAnomalies.add(anomaly);
}
}
AnomaliesWrapper anomaliesWrapper = constructAnomaliesWrapperFromMergedAnomalies(mergedAnomalies, pageNumber);
return anomaliesWrapper;
}
Aggregations