use of com.linkedin.thirdeye.dashboard.resources.OnboardResource in project pinot by linkedin.
the class ThirdEyeDashboardApplication method run.
@Override
public void run(ThirdEyeDashboardConfiguration config, Environment env) throws Exception {
super.initDAOs();
try {
ThirdEyeCacheRegistry.initializeCaches(config);
} catch (Exception e) {
LOG.error("Exception while loading caches", e);
}
AnomalyFunctionFactory anomalyFunctionFactory = new AnomalyFunctionFactory(config.getFunctionConfigPath());
AlertFilterFactory alertFilterFactory = new AlertFilterFactory(config.getAlertFilterConfigPath());
env.jersey().register(new AnomalyFunctionResource(config.getFunctionConfigPath()));
env.jersey().register(new DashboardResource());
env.jersey().register(new CacheResource());
env.jersey().register(new AnomalyResource(anomalyFunctionFactory, alertFilterFactory));
env.jersey().register(new EmailResource(config));
env.jersey().register(new EntityManagerResource());
env.jersey().register(new IngraphMetricConfigResource());
env.jersey().register(new MetricConfigResource());
env.jersey().register(new DatasetConfigResource());
env.jersey().register(new IngraphDashboardConfigResource());
env.jersey().register(new JobResource());
env.jersey().register(new AdminResource());
env.jersey().register(new SummaryResource());
env.jersey().register(new ThirdEyeResource());
env.jersey().register(new OverrideConfigResource());
env.jersey().register(new DataResource(anomalyFunctionFactory, alertFilterFactory));
env.jersey().register(new AnomaliesResource(anomalyFunctionFactory, alertFilterFactory));
env.jersey().register(new TimeSeriesResource());
env.jersey().register(new OnboardResource());
env.jersey().register(new EventResource(config.getInformedApiUrl()));
}
use of com.linkedin.thirdeye.dashboard.resources.OnboardResource in project pinot by linkedin.
the class CleanupAndRegenerateAnomaliesTool method deleteExistingAnomalies.
/**
* Delete raw or merged anomalies whose start time is located in the given time ranges, except
* the following two cases:
*
* 1. If a raw anomaly belongs to a merged anomaly whose start time is not located in the given
* time ranges, then the raw anomaly will not be deleted.
*
* 2. If a raw anomaly belongs to a merged anomaly whose start time is located in the given
* time ranges, then it is deleted regardless its start time.
*
* If monitoringWindowStartTime is not given, then start time is set to 0.
* If monitoringWindowEndTime is not given, then end time is set to Long.MAX_VALUE.
*/
private void deleteExistingAnomalies() {
long startTime = 0;
long endTime = Long.MAX_VALUE;
if (StringUtils.isNotBlank(monitoringWindowStartTime)) {
startTime = ISODateTimeFormat.dateTimeParser().parseDateTime(monitoringWindowStartTime).getMillis();
}
if (StringUtils.isNotBlank(monitoringWindowEndTime)) {
endTime = ISODateTimeFormat.dateTimeParser().parseDateTime(monitoringWindowEndTime).getMillis();
}
LOG.info("Deleting anomalies in the time range: {} -- {}", new DateTime(startTime), new DateTime(endTime));
for (Long functionId : functionIds) {
AnomalyFunctionDTO anomalyFunction = anomalyFunctionDAO.findById(functionId);
if (anomalyFunction == null) {
LOG.info("Requested functionId {} doesn't exist", functionId);
continue;
}
LOG.info("Beginning cleanup of functionId {} collection {} metric {}", functionId, anomalyFunction.getCollection(), anomalyFunction.getMetric());
// Clean up merged and raw anomaly of functionID
OnboardResource onboardResource = new OnboardResource(anomalyFunctionDAO, mergedResultDAO, rawResultDAO);
onboardResource.deleteExistingAnomalies(Long.toString(functionId), startTime, endTime);
}
}
Aggregations