use of com.linkedin.thirdeye.detector.email.filter.AlertFilterFactory in project pinot by linkedin.
the class AlertTaskRunnerV2 method execute.
@Override
public List<TaskResult> execute(TaskInfo taskInfo, TaskContext taskContext) throws Exception {
List<TaskResult> taskResult = new ArrayList<>();
AlertTaskInfo alertTaskInfo = (AlertTaskInfo) taskInfo;
alertConfig = alertTaskInfo.getAlertConfigDTO();
thirdeyeConfig = taskContext.getThirdEyeAnomalyConfiguration();
alertFilterFactory = new AlertFilterFactory(thirdeyeConfig.getAlertFilterConfigPath());
try {
LOG.info("Begin executing task {}", taskInfo);
runTask();
} catch (Exception t) {
LOG.error("Task failed with exception:", t);
sendFailureEmail(t);
// Let task driver mark this task failed
throw t;
}
return taskResult;
}
use of com.linkedin.thirdeye.detector.email.filter.AlertFilterFactory in project pinot by linkedin.
the class ThirdEyeAnomalyApplication method run.
@Override
public void run(final ThirdEyeAnomalyConfiguration config, final Environment environment) throws Exception {
LOG.info("Starting ThirdeyeAnomalyApplication : Scheduler {} Worker {}", config.isScheduler(), config.isWorker());
super.initDAOs();
ThirdEyeCacheRegistry.initializeCaches(config);
environment.lifecycle().manage(new Managed() {
@Override
public void start() throws Exception {
if (config.isWorker()) {
anomalyFunctionFactory = new AnomalyFunctionFactory(config.getFunctionConfigPath());
alertFilterFactory = new AlertFilterFactory(config.getAlertFilterConfigPath());
taskDriver = new TaskDriver(config, anomalyFunctionFactory, alertFilterFactory);
taskDriver.start();
}
if (config.isScheduler()) {
detectionJobScheduler = new DetectionJobScheduler();
alertFilterFactory = new AlertFilterFactory(config.getAlertFilterConfigPath());
alertFilterAutotuneFactory = new AlertFilterAutotuneFactory(config.getFilterAutotuneConfigPath());
detectionJobScheduler.start();
environment.jersey().register(new DetectionJobResource(detectionJobScheduler, alertFilterFactory, alertFilterAutotuneFactory));
environment.jersey().register(new AnomalyFunctionResource(config.getFunctionConfigPath()));
}
if (config.isMonitor()) {
monitorJobScheduler = new MonitorJobScheduler(config.getMonitorConfiguration());
monitorJobScheduler.start();
}
if (config.isAlert()) {
alertJobScheduler = new AlertJobScheduler();
alertJobScheduler.start();
// start alert scheduler v2
alertJobSchedulerV2 = new AlertJobSchedulerV2();
alertJobSchedulerV2.start();
environment.jersey().register(new AlertJobResource(alertJobScheduler, emailConfigurationDAO));
}
if (config.isMerger()) {
// anomalyFunctionFactory might have initiated if current machine is also a worker
if (anomalyFunctionFactory == null) {
anomalyFunctionFactory = new AnomalyFunctionFactory(config.getFunctionConfigPath());
}
ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor();
anomalyMergeExecutor = new AnomalyMergeExecutor(executorService, anomalyFunctionFactory);
anomalyMergeExecutor.start();
}
if (config.isAutoload()) {
autoLoadPinotMetricsService = new AutoLoadPinotMetricsService(config);
autoLoadPinotMetricsService.start();
}
if (config.isDataCompleteness()) {
dataCompletenessScheduler = new DataCompletenessScheduler();
dataCompletenessScheduler.start();
}
}
@Override
public void stop() throws Exception {
if (config.isWorker()) {
taskDriver.stop();
}
if (config.isScheduler()) {
detectionJobScheduler.shutdown();
}
if (config.isMonitor()) {
monitorJobScheduler.stop();
}
if (config.isAlert()) {
alertJobScheduler.shutdown();
alertJobSchedulerV2.shutdown();
}
if (config.isMerger()) {
anomalyMergeExecutor.stop();
}
if (config.isAutoload()) {
autoLoadPinotMetricsService.shutdown();
}
if (config.isDataCompleteness()) {
dataCompletenessScheduler.shutdown();
}
}
});
}
use of com.linkedin.thirdeye.detector.email.filter.AlertFilterFactory in project pinot by linkedin.
the class ThirdEyeDashboardApplication method run.
@Override
public void run(ThirdEyeDashboardConfiguration config, Environment env) throws Exception {
super.initDAOs();
try {
ThirdEyeCacheRegistry.initializeCaches(config);
} catch (Exception e) {
LOG.error("Exception while loading caches", e);
}
AnomalyFunctionFactory anomalyFunctionFactory = new AnomalyFunctionFactory(config.getFunctionConfigPath());
AlertFilterFactory alertFilterFactory = new AlertFilterFactory(config.getAlertFilterConfigPath());
env.jersey().register(new AnomalyFunctionResource(config.getFunctionConfigPath()));
env.jersey().register(new DashboardResource());
env.jersey().register(new CacheResource());
env.jersey().register(new AnomalyResource(anomalyFunctionFactory, alertFilterFactory));
env.jersey().register(new EmailResource(config));
env.jersey().register(new EntityManagerResource());
env.jersey().register(new IngraphMetricConfigResource());
env.jersey().register(new MetricConfigResource());
env.jersey().register(new DatasetConfigResource());
env.jersey().register(new IngraphDashboardConfigResource());
env.jersey().register(new JobResource());
env.jersey().register(new AdminResource());
env.jersey().register(new SummaryResource());
env.jersey().register(new ThirdEyeResource());
env.jersey().register(new OverrideConfigResource());
env.jersey().register(new DataResource(anomalyFunctionFactory, alertFilterFactory));
env.jersey().register(new AnomaliesResource(anomalyFunctionFactory, alertFilterFactory));
env.jersey().register(new TimeSeriesResource());
env.jersey().register(new OnboardResource());
env.jersey().register(new EventResource(config.getInformedApiUrl()));
}
use of com.linkedin.thirdeye.detector.email.filter.AlertFilterFactory in project pinot by linkedin.
the class AnomalyApplicationEndToEndTest method setup.
private void setup() throws Exception {
// Mock query cache
ThirdEyeClient mockThirdeyeClient = Mockito.mock(ThirdEyeClient.class);
Mockito.when(mockThirdeyeClient.execute(Matchers.any(ThirdEyeRequest.class))).thenAnswer(new Answer<ThirdEyeResponse>() {
@Override
public ThirdEyeResponse answer(InvocationOnMock invocation) throws Throwable {
Object[] args = invocation.getArguments();
ThirdEyeRequest request = (ThirdEyeRequest) args[0];
ThirdEyeResponse response = getMockResponse(request);
return response;
}
});
QueryCache mockQueryCache = new QueryCache(mockThirdeyeClient, Executors.newFixedThreadPool(10));
cacheRegistry.registerQueryCache(mockQueryCache);
MetricConfigDTO metricConfig = getTestMetricConfig(collection, metric, 1L);
// create metric config in cache
LoadingCache<MetricDataset, MetricConfigDTO> mockMetricConfigCache = Mockito.mock(LoadingCache.class);
Mockito.when(mockMetricConfigCache.get(new MetricDataset(metric, collection))).thenReturn(metricConfig);
cacheRegistry.registerMetricConfigCache(mockMetricConfigCache);
// create dataset config in cache
LoadingCache<String, DatasetConfigDTO> mockDatasetConfigCache = Mockito.mock(LoadingCache.class);
Mockito.when(mockDatasetConfigCache.get(collection)).thenReturn(getTestDatasetConfig(collection));
cacheRegistry.registerDatasetConfigCache(mockDatasetConfigCache);
ResultSet mockResultSet = Mockito.mock(ResultSet.class);
Mockito.when(mockResultSet.getRowCount()).thenReturn(0);
ResultSetGroup mockResultSetGroup = Mockito.mock(ResultSetGroup.class);
Mockito.when(mockResultSetGroup.getResultSet(0)).thenReturn(mockResultSet);
LoadingCache<PinotQuery, ResultSetGroup> mockResultSetGroupCache = Mockito.mock(LoadingCache.class);
Mockito.when(mockResultSetGroupCache.get(Matchers.any(PinotQuery.class))).thenAnswer(new Answer<ResultSetGroup>() {
@Override
public ResultSetGroup answer(InvocationOnMock invocation) throws Throwable {
return mockResultSetGroup;
}
});
cacheRegistry.registerResultSetGroupCache(mockResultSetGroupCache);
// Application config
thirdeyeAnomalyConfig = new ThirdEyeAnomalyConfiguration();
thirdeyeAnomalyConfig.setId(id);
thirdeyeAnomalyConfig.setDashboardHost(dashboardHost);
MonitorConfiguration monitorConfiguration = new MonitorConfiguration();
monitorConfiguration.setMonitorFrequency(new TimeGranularity(30, TimeUnit.SECONDS));
thirdeyeAnomalyConfig.setMonitorConfiguration(monitorConfiguration);
thirdeyeAnomalyConfig.setRootDir(System.getProperty("dw.rootDir", "NOT_SET(dw.rootDir)"));
// create test anomaly function
functionId = anomalyFunctionDAO.save(getTestFunctionSpec(metric, collection));
// create test email configuration
emailConfigurationDAO.save(getTestEmailConfiguration(metric, collection));
// create test alert configuration
alertConfigDAO.save(getTestAlertConfiguration("test alert v2"));
// create test dataset config
datasetConfigDAO.save(getTestDatasetConfig(collection));
// setup function factory for worker and merger
InputStream factoryStream = AnomalyApplicationEndToEndTest.class.getResourceAsStream(functionPropertiesFile);
anomalyFunctionFactory = new AnomalyFunctionFactory(factoryStream);
// setup alertfilter factory for worker
InputStream alertFilterStream = AnomalyApplicationEndToEndTest.class.getResourceAsStream(alertFilterPropertiesFile);
alertFilterFactory = new AlertFilterFactory(alertFilterStream);
}
Aggregations