use of com.linkedin.thirdeye.datalayer.dto.DatasetConfigDTO in project pinot by linkedin.
the class AutoloadPinotMetricsServiceTest method testRefreshDataset.
@Test(dependsOnMethods = { "testAddNewDataset" })
public void testRefreshDataset() throws Exception {
DimensionFieldSpec dimensionFieldSpec = new DimensionFieldSpec("newDimension", DataType.STRING, true);
schema.addField(dimensionFieldSpec);
testAutoLoadPinotMetricsService.addPinotDataset(dataset, schema, datasetConfig);
Assert.assertEquals(datasetConfigDAO.findAll().size(), 1);
DatasetConfigDTO newDatasetConfig1 = datasetConfigDAO.findByDataset(dataset);
Assert.assertEquals(newDatasetConfig1.getDataset(), dataset);
Assert.assertEquals(Sets.newHashSet(newDatasetConfig1.getDimensions()), Sets.newHashSet(schema.getDimensionNames()));
MetricFieldSpec metricFieldSpec = new MetricFieldSpec("newMetric", DataType.LONG);
schema.addField(metricFieldSpec);
testAutoLoadPinotMetricsService.addPinotDataset(dataset, schema, newDatasetConfig1);
Assert.assertEquals(datasetConfigDAO.findAll().size(), 1);
List<MetricConfigDTO> metricConfigs = metricConfigDAO.findByDataset(dataset);
List<String> schemaMetricNames = schema.getMetricNames();
List<Long> metricIds = new ArrayList<>();
Assert.assertEquals(metricConfigs.size(), schemaMetricNames.size());
for (MetricConfigDTO metricConfig : metricConfigs) {
Assert.assertTrue(schemaMetricNames.contains(metricConfig.getName()));
metricIds.add(metricConfig.getId());
}
DashboardConfigDTO dashboardConfig = dashboardConfigDAO.findByName(DashboardConfigBean.DEFAULT_DASHBOARD_PREFIX + dataset);
Assert.assertEquals(dashboardConfig.getMetricIds(), metricIds);
}
use of com.linkedin.thirdeye.datalayer.dto.DatasetConfigDTO in project pinot by linkedin.
the class ThirdEyeUtils method getTimeSpecFromDataset.
public static TimeSpec getTimeSpecFromDataset(String dataset) {
TimeSpec timespec = null;
try {
DatasetConfigDTO datasetConfig = CACHE_REGISTRY.getDatasetConfigCache().get(dataset);
timespec = getTimeSpecFromDatasetConfig(datasetConfig);
} catch (ExecutionException e) {
LOG.error("Exception when fetching datasetconfig from cache", e);
}
return timespec;
}
use of com.linkedin.thirdeye.datalayer.dto.DatasetConfigDTO in project pinot by linkedin.
the class TestDetectionJobSchedulerUtils method testGetNewEntriesForDetectionSchedulerMinuteLevel.
@Test
public void testGetNewEntriesForDetectionSchedulerMinuteLevel() throws Exception {
DatasetConfigDTO datasetConfig = new DatasetConfigDTO();
datasetConfig.setTimeColumn("Date");
datasetConfig.setTimeUnit(TimeUnit.MINUTES);
datasetConfig.setTimeDuration(5);
DateTimeZone dateTimeZone = DateTimeZone.UTC;
AnomalyFunctionDTO anomalyFunction = new AnomalyFunctionDTO();
anomalyFunction.setFrequency(new TimeGranularity(15, TimeUnit.MINUTES));
DateTimeFormatter dateTimeFormatter = DetectionJobSchedulerUtils.getDateTimeFormatterForDataset(datasetConfig, dateTimeZone);
String currentDateTimeString = "201702140336";
String currentDateTimeStringRounded = "201702140330";
DateTime currentDateTime = minuteDateTimeFormatter.parseDateTime(currentDateTimeString);
DateTime currentDateTimeRounded = dateTimeFormatter.parseDateTime(currentDateTimeStringRounded);
DetectionStatusDTO lastEntryForFunction = null;
// null last entry
Map<String, Long> newEntries = DetectionJobSchedulerUtils.getNewEntries(currentDateTime, lastEntryForFunction, anomalyFunction, datasetConfig, dateTimeZone);
Assert.assertEquals(newEntries.size(), 1);
Assert.assertEquals(newEntries.get(currentDateTimeStringRounded), new Long(currentDateTimeRounded.getMillis()));
// last entry same as current time
lastEntryForFunction = new DetectionStatusDTO();
lastEntryForFunction.setDateToCheckInSDF(currentDateTimeStringRounded);
lastEntryForFunction.setDateToCheckInMS(currentDateTimeRounded.getMillis());
newEntries = DetectionJobSchedulerUtils.getNewEntries(currentDateTime, lastEntryForFunction, anomalyFunction, datasetConfig, dateTimeZone);
Assert.assertEquals(newEntries.size(), 0);
// last entry 15 MINUTES before current time
String lastEntryDateTimeString = "201702140315";
DateTime lastEntryDateTime = dateTimeFormatter.parseDateTime(lastEntryDateTimeString);
lastEntryForFunction = new DetectionStatusDTO();
lastEntryForFunction.setDateToCheckInSDF(lastEntryDateTimeString);
lastEntryForFunction.setDateToCheckInMS(lastEntryDateTime.getMillis());
newEntries = DetectionJobSchedulerUtils.getNewEntries(currentDateTime, lastEntryForFunction, anomalyFunction, datasetConfig, dateTimeZone);
Assert.assertEquals(newEntries.size(), 1);
Assert.assertEquals(newEntries.get(currentDateTimeStringRounded), new Long(currentDateTimeRounded.getMillis()));
// last entry 45 MINUTES before current time
lastEntryDateTimeString = "201702140245";
lastEntryDateTime = dateTimeFormatter.parseDateTime(lastEntryDateTimeString);
lastEntryForFunction = new DetectionStatusDTO();
lastEntryForFunction.setDateToCheckInSDF(lastEntryDateTimeString);
lastEntryForFunction.setDateToCheckInMS(lastEntryDateTime.getMillis());
newEntries = DetectionJobSchedulerUtils.getNewEntries(currentDateTime, lastEntryForFunction, anomalyFunction, datasetConfig, dateTimeZone);
Assert.assertEquals(newEntries.size(), 3);
Assert.assertNotNull(newEntries.get("201702140300"));
Assert.assertNotNull(newEntries.get("201702140315"));
Assert.assertNotNull(newEntries.get("201702140330"));
Assert.assertEquals(newEntries.get(currentDateTimeStringRounded), new Long(currentDateTimeRounded.getMillis()));
}
use of com.linkedin.thirdeye.datalayer.dto.DatasetConfigDTO in project pinot by linkedin.
the class AnomaliesResource method constructAnomaliesWrapperFromMergedAnomalies.
/**
* Constructs AnomaliesWrapper object from a list of merged anomalies
* @param mergedAnomalies
* @return
* @throws ExecutionException
*/
private AnomaliesWrapper constructAnomaliesWrapperFromMergedAnomalies(List<MergedAnomalyResultDTO> mergedAnomalies, int pageNumber) throws ExecutionException {
AnomaliesWrapper anomaliesWrapper = new AnomaliesWrapper();
anomaliesWrapper.setTotalAnomalies(mergedAnomalies.size());
LOG.info("Total anomalies: {}", mergedAnomalies.size());
// TODO: get page number and page size from client
int pageSize = DEFAULT_PAGE_SIZE;
int maxPageNumber = (mergedAnomalies.size() - 1) / pageSize + 1;
if (pageNumber > maxPageNumber) {
pageNumber = maxPageNumber;
}
if (pageNumber < 1) {
pageNumber = 1;
}
int fromIndex = (pageNumber - 1) * pageSize;
int toIndex = pageNumber * pageSize;
if (toIndex > mergedAnomalies.size()) {
toIndex = mergedAnomalies.size();
}
// Show most recent anomalies first, i.e., the anomaly whose end time is most recent then largest id shown at top
Collections.sort(mergedAnomalies, new MergedAnomalyEndTimeComparator().reversed());
List<MergedAnomalyResultDTO> displayedAnomalies = mergedAnomalies.subList(fromIndex, toIndex);
anomaliesWrapper.setNumAnomaliesOnPage(displayedAnomalies.size());
LOG.info("Page number: {} Page size: {} Num anomalies on page: {}", pageNumber, pageSize, displayedAnomalies.size());
// for each anomaly, create anomaly details
List<Future<AnomalyDetails>> anomalyDetailsListFutures = new ArrayList<>();
for (MergedAnomalyResultDTO mergedAnomaly : displayedAnomalies) {
Callable<AnomalyDetails> callable = new Callable<AnomalyDetails>() {
@Override
public AnomalyDetails call() throws Exception {
String dataset = mergedAnomaly.getCollection();
DatasetConfigDTO datasetConfig = CACHE_REGISTRY.getDatasetConfigCache().get(dataset);
DateTimeFormatter timeSeriesDateFormatter = DateTimeFormat.forPattern(TIME_SERIES_DATE_FORMAT).withZone(Utils.getDataTimeZone(dataset));
DateTimeFormatter startEndDateFormatterDays = DateTimeFormat.forPattern(START_END_DATE_FORMAT_DAYS).withZone(Utils.getDataTimeZone(dataset));
DateTimeFormatter startEndDateFormatterHours = DateTimeFormat.forPattern(START_END_DATE_FORMAT_HOURS).withZone(Utils.getDataTimeZone(dataset));
return getAnomalyDetails(mergedAnomaly, datasetConfig, timeSeriesDateFormatter, startEndDateFormatterHours, startEndDateFormatterDays, getExternalURL(mergedAnomaly));
}
};
anomalyDetailsListFutures.add(threadPool.submit(callable));
}
List<AnomalyDetails> anomalyDetailsList = new ArrayList<>();
for (Future<AnomalyDetails> anomalyDetailsFuture : anomalyDetailsListFutures) {
try {
AnomalyDetails anomalyDetails = anomalyDetailsFuture.get(120, TimeUnit.SECONDS);
if (anomalyDetails != null) {
anomalyDetailsList.add(anomalyDetails);
}
} catch (InterruptedException | ExecutionException | TimeoutException e) {
LOG.error("Exception in getting AnomalyDetails", e);
}
}
anomaliesWrapper.setAnomalyDetailsList(anomalyDetailsList);
return anomaliesWrapper;
}
use of com.linkedin.thirdeye.datalayer.dto.DatasetConfigDTO in project pinot by linkedin.
the class DataResource method getDataAggregationGranularity.
@GET
@Path("agg/granularity/metric/{metricId}")
public List<String> getDataAggregationGranularity(@PathParam("metricId") Long metricId) {
List<String> list = new ArrayList<>();
list.add("DAYS");
MetricConfigDTO metricConfigDTO = metricConfigDAO.findById(metricId);
DatasetConfigDTO datasetConfigDTO = datasetConfigDAO.findByDataset(metricConfigDTO.getDataset());
int dataAggSize = datasetConfigDTO.getTimeDuration();
String dataGranularity = datasetConfigDTO.getTimeUnit().name();
if (dataGranularity.equals("DAYS")) {
// do nothing
} else {
list.add("HOURS");
if (dataGranularity.equals("MINUTES")) {
if (dataAggSize == 1) {
list.add("MINUTES");
} else {
list.add(dataAggSize + "_MINUTES");
}
}
}
return list;
}
Aggregations