use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class MergedAnomalyResultManagerImpl method batchConvertMergedAnomalyBean2DTO.
protected List<MergedAnomalyResultDTO> batchConvertMergedAnomalyBean2DTO(List<MergedAnomalyResultBean> mergedAnomalyResultBeanList, boolean loadRawAnomalies) {
List<Future<MergedAnomalyResultDTO>> mergedAnomalyResultDTOFutureList = new ArrayList<>(mergedAnomalyResultBeanList.size());
for (MergedAnomalyResultBean mergedAnomalyResultBean : mergedAnomalyResultBeanList) {
Future<MergedAnomalyResultDTO> future = executorService.submit(() -> convertMergedAnomalyBean2DTO(mergedAnomalyResultBean, loadRawAnomalies));
mergedAnomalyResultDTOFutureList.add(future);
}
List<MergedAnomalyResultDTO> mergedAnomalyResultDTOList = new ArrayList<>(mergedAnomalyResultBeanList.size());
for (Future future : mergedAnomalyResultDTOFutureList) {
try {
mergedAnomalyResultDTOList.add((MergedAnomalyResultDTO) future.get(60, TimeUnit.SECONDS));
} catch (InterruptedException | TimeoutException | ExecutionException e) {
LOG.warn("Failed to convert MergedAnomalyResultDTO from bean: {}", e.toString());
}
}
return mergedAnomalyResultDTOList;
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class MergedAnomalyResultManagerImpl method convertMergedAnomalyBean2DTO.
protected MergedAnomalyResultDTO convertMergedAnomalyBean2DTO(MergedAnomalyResultBean mergedAnomalyResultBean, boolean loadRawAnomalies) {
MergedAnomalyResultDTO mergedAnomalyResultDTO;
mergedAnomalyResultDTO = MODEL_MAPPER.map(mergedAnomalyResultBean, MergedAnomalyResultDTO.class);
if (mergedAnomalyResultBean.getFunctionId() != null) {
AnomalyFunctionBean anomalyFunctionBean = genericPojoDao.get(mergedAnomalyResultBean.getFunctionId(), AnomalyFunctionBean.class);
AnomalyFunctionDTO anomalyFunctionDTO = MODEL_MAPPER.map(anomalyFunctionBean, AnomalyFunctionDTO.class);
mergedAnomalyResultDTO.setFunction(anomalyFunctionDTO);
}
if (mergedAnomalyResultBean.getAnomalyFeedbackId() != null) {
AnomalyFeedbackBean anomalyFeedbackBean = genericPojoDao.get(mergedAnomalyResultBean.getAnomalyFeedbackId(), AnomalyFeedbackBean.class);
AnomalyFeedbackDTO anomalyFeedbackDTO = MODEL_MAPPER.map(anomalyFeedbackBean, AnomalyFeedbackDTO.class);
mergedAnomalyResultDTO.setFeedback(anomalyFeedbackDTO);
}
if (loadRawAnomalies && mergedAnomalyResultBean.getRawAnomalyIdList() != null && !mergedAnomalyResultBean.getRawAnomalyIdList().isEmpty()) {
List<RawAnomalyResultDTO> anomalyResults = new ArrayList<>();
List<RawAnomalyResultBean> list = genericPojoDao.get(mergedAnomalyResultBean.getRawAnomalyIdList(), RawAnomalyResultBean.class);
for (RawAnomalyResultBean rawAnomalyResultBean : list) {
anomalyResults.add(createRawAnomalyDTOFromBean(rawAnomalyResultBean));
}
mergedAnomalyResultDTO.setAnomalyResults(anomalyResults);
}
return mergedAnomalyResultDTO;
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class AnomalyApplicationEndToEndTest method testThirdeyeAnomalyApplication.
@Test(enabled = true)
public void testThirdeyeAnomalyApplication() throws Exception {
Assert.assertNotNull(daoRegistry.getJobDAO());
// setup caches and config
setup();
Assert.assertNotNull(daoRegistry.getJobDAO());
// startDataCompletenessChecker
startDataCompletenessScheduler();
Thread.sleep(10000);
int jobSizeDataCompleteness = jobDAO.findAll().size();
int taskSizeDataCompleteness = taskDAO.findAll().size();
Assert.assertTrue(jobSizeDataCompleteness == 1);
Assert.assertTrue(taskSizeDataCompleteness == 2);
JobDTO jobDTO = jobDAO.findAll().get(0);
Assert.assertTrue(jobDTO.getJobName().startsWith(TaskType.DATA_COMPLETENESS.toString()));
List<TaskDTO> taskDTOs = taskDAO.findAll();
for (TaskDTO taskDTO : taskDTOs) {
Assert.assertEquals(taskDTO.getTaskType(), TaskType.DATA_COMPLETENESS);
Assert.assertEquals(taskDTO.getStatus(), TaskStatus.WAITING);
DataCompletenessTaskInfo taskInfo = (DataCompletenessTaskInfo) TaskInfoFactory.getTaskInfoFromTaskType(taskDTO.getTaskType(), taskDTO.getTaskInfo());
Assert.assertTrue((taskInfo.getDataCompletenessType() == DataCompletenessType.CHECKER) || (taskInfo.getDataCompletenessType() == DataCompletenessType.CLEANUP));
}
// start detection scheduler
startDetectionScheduler();
// start alert scheduler
startAlertScheduler();
// check for number of entries in tasks and jobs
Thread.sleep(10000);
int jobSize1 = jobDAO.findAll().size();
int taskSize1 = taskDAO.findAll().size();
Assert.assertTrue(jobSize1 > 0);
Assert.assertTrue(taskSize1 > 0);
Thread.sleep(10000);
int jobSize2 = jobDAO.findAll().size();
int taskSize2 = taskDAO.findAll().size();
Assert.assertTrue(jobSize2 > jobSize1);
Assert.assertTrue(taskSize2 > taskSize1);
tasks = taskDAO.findAll();
// check for task type
int detectionCount = 0;
int alertCount = 0;
for (TaskDTO task : tasks) {
if (task.getTaskType().equals(TaskType.ANOMALY_DETECTION)) {
detectionCount++;
} else if (task.getTaskType().equals(TaskType.ALERT)) {
alertCount++;
}
}
Assert.assertTrue(detectionCount > 0);
Assert.assertTrue(alertCount > 0);
// check for task status
tasks = taskDAO.findAll();
for (TaskDTO task : tasks) {
Assert.assertEquals(task.getStatus(), TaskStatus.WAITING);
}
// start monitor
startMonitor();
// check for monitor tasks
Thread.sleep(5000);
tasks = taskDAO.findAll();
int monitorCount = 0;
for (TaskDTO task : tasks) {
if (task.getTaskType().equals(TaskType.MONITOR)) {
monitorCount++;
}
}
Assert.assertEquals(monitorCount, 2);
// check for job status
jobs = jobDAO.findAll();
for (JobDTO job : jobs) {
Assert.assertEquals(job.getStatus(), JobStatus.SCHEDULED);
}
// start task drivers
startWorker();
// check for change in task status to COMPLETED
Thread.sleep(30000);
tasks = taskDAO.findAll();
int completedCount = 0;
for (TaskDTO task : tasks) {
if (task.getStatus().equals(TaskStatus.COMPLETED)) {
completedCount++;
}
}
Assert.assertTrue(completedCount > 0);
// Raw anomalies of the same function and dimensions should have been merged by the worker, so we
// check if any raw anomalies present, whose existence means the worker fails the synchronous merge.
List<RawAnomalyResultDTO> rawAnomalies = rawAnomalyResultDAO.findUnmergedByFunctionId(functionId);
Assert.assertTrue(rawAnomalies.size() == 0);
// check merged anomalies
List<MergedAnomalyResultDTO> mergedAnomalies = mergedAnomalyResultDAO.findByFunctionId(functionId);
Assert.assertTrue(mergedAnomalies.size() > 0);
// THE FOLLOWING TEST FAILS OCCASIONALLY DUE TO MACHINE COMPUTATION POWER
// TODO: Move test away from Thread.sleep
// check for job status COMPLETED
// jobs = jobDAO.findAll();
// int completedJobCount = 0;
// for (JobDTO job : jobs) {
// int attempt = 0;
// while (attempt < 3 && !job.getStatus().equals(JobStatus.COMPLETED)) {
// LOG.info("Checking job status with attempt : {}", attempt + 1);
// Thread.sleep(5_000);
// attempt++;
// }
// if (job.getStatus().equals(JobStatus.COMPLETED)) {
// completedJobCount ++;
// }
// }
// Assert.assertTrue(completedJobCount > 0);
// stop schedulers
cleanup();
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class TestMergedAnomalyResultManager method testMergedResultCRUD.
@Test
public void testMergedResultCRUD() {
anomalyFunctionDAO.save(function);
Assert.assertNotNull(function.getId());
// create anomaly result
RawAnomalyResultDTO result = getAnomalyResult();
result.setFunction(function);
rawAnomalyResultDAO.save(result);
RawAnomalyResultDTO resultRet = rawAnomalyResultDAO.findById(result.getId());
Assert.assertEquals(resultRet.getFunction(), function);
anomalyResultId = result.getId();
// Let's create merged result
List<RawAnomalyResultDTO> rawResults = new ArrayList<>();
rawResults.add(result);
AnomalyMergeConfig mergeConfig = new AnomalyMergeConfig();
List<MergedAnomalyResultDTO> mergedResults = AnomalyTimeBasedSummarizer.mergeAnomalies(rawResults, mergeConfig.getMaxMergeDurationLength(), mergeConfig.getSequentialAllowedGap());
Assert.assertEquals(mergedResults.get(0).getStartTime(), result.getStartTime());
Assert.assertEquals(mergedResults.get(0).getEndTime(), result.getEndTime());
Assert.assertEquals(mergedResults.get(0).getAnomalyResults().get(0), result);
// Let's persist the merged result
mergedResults.get(0).setDimensions(result.getDimensions());
mergedAnomalyResultDAO.save(mergedResults.get(0));
mergedResult = mergedResults.get(0);
Assert.assertNotNull(mergedResult.getId());
// verify the merged result
MergedAnomalyResultDTO mergedResultById = mergedAnomalyResultDAO.findById(mergedResult.getId());
Assert.assertEquals(mergedResultById.getAnomalyResults(), rawResults);
Assert.assertEquals(mergedResultById.getAnomalyResults().get(0).getId(), anomalyResultId);
List<MergedAnomalyResultDTO> mergedResultsByMetricDimensionsTime = mergedAnomalyResultDAO.findByCollectionMetricDimensionsTime(mergedResult.getCollection(), mergedResult.getMetric(), mergedResult.getDimensions().toString(), 0, System.currentTimeMillis(), true);
Assert.assertEquals(mergedResultsByMetricDimensionsTime.get(0), mergedResult);
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class TestMergedAnomalyResultManager method testFeedback.
@Test(dependsOnMethods = { "testMergedResultCRUD" })
public void testFeedback() {
MergedAnomalyResultDTO anomalyMergedResult = mergedAnomalyResultDAO.findById(mergedResult.getId());
AnomalyFeedbackDTO feedback = new AnomalyFeedbackDTO();
feedback.setComment("this is a good find");
feedback.setFeedbackType(AnomalyFeedbackType.ANOMALY);
feedback.setStatus(FeedbackStatus.NEW);
anomalyMergedResult.setFeedback(feedback);
// now we need to make explicit call to anomaly update in order to update the feedback
mergedAnomalyResultDAO.updateAnomalyFeedback(anomalyMergedResult);
//verify feedback
MergedAnomalyResultDTO mergedResult1 = mergedAnomalyResultDAO.findById(mergedResult.getId());
Assert.assertEquals(mergedResult1.getAnomalyResults().get(0).getId(), anomalyResultId);
Assert.assertEquals(mergedResult1.getFeedback().getFeedbackType(), AnomalyFeedbackType.ANOMALY);
}
Aggregations