use of com.linkedin.thirdeye.api.TimeGranularity in project pinot by linkedin.
the class AnomalyResource method createAnomalyFunction.
// Add anomaly function
@POST
@Path("/anomaly-function/create")
public Response createAnomalyFunction(@NotNull @QueryParam("dataset") String dataset, @NotNull @QueryParam("functionName") String functionName, @NotNull @QueryParam("metric") String metric, @NotNull @QueryParam("metricFunction") String metric_function, @QueryParam("type") String type, @NotNull @QueryParam("windowSize") String windowSize, @NotNull @QueryParam("windowUnit") String windowUnit, @QueryParam("windowDelay") String windowDelay, @QueryParam("cron") String cron, @QueryParam("windowDelayUnit") String windowDelayUnit, @QueryParam("exploreDimension") String exploreDimensions, @QueryParam("filters") String filters, @NotNull @QueryParam("properties") String properties, @QueryParam("isActive") boolean isActive) throws Exception {
if (StringUtils.isEmpty(dataset) || StringUtils.isEmpty(functionName) || StringUtils.isEmpty(metric) || StringUtils.isEmpty(windowSize) || StringUtils.isEmpty(windowUnit) || StringUtils.isEmpty(properties)) {
throw new UnsupportedOperationException("Received null for one of the mandatory params: " + "dataset " + dataset + ", functionName " + functionName + ", metric " + metric + ", windowSize " + windowSize + ", windowUnit " + windowUnit + ", properties" + properties);
}
DatasetConfigDTO datasetConfig = DAO_REGISTRY.getDatasetConfigDAO().findByDataset(dataset);
TimeSpec timespec = ThirdEyeUtils.getTimeSpecFromDatasetConfig(datasetConfig);
TimeGranularity dataGranularity = timespec.getDataGranularity();
AnomalyFunctionDTO anomalyFunctionSpec = new AnomalyFunctionDTO();
anomalyFunctionSpec.setActive(isActive);
anomalyFunctionSpec.setMetricFunction(MetricAggFunction.valueOf(metric_function));
anomalyFunctionSpec.setCollection(dataset);
anomalyFunctionSpec.setFunctionName(functionName);
anomalyFunctionSpec.setTopicMetric(metric);
anomalyFunctionSpec.setMetrics(Arrays.asList(metric));
if (StringUtils.isEmpty(type)) {
type = DEFAULT_FUNCTION_TYPE;
}
anomalyFunctionSpec.setType(type);
anomalyFunctionSpec.setWindowSize(Integer.valueOf(windowSize));
anomalyFunctionSpec.setWindowUnit(TimeUnit.valueOf(windowUnit));
// Setting window delay time / unit
TimeUnit dataGranularityUnit = dataGranularity.getUnit();
// default window delay time = 10 hours
int windowDelayTime = 10;
TimeUnit windowDelayTimeUnit = TimeUnit.HOURS;
if (dataGranularityUnit.equals(TimeUnit.MINUTES) || dataGranularityUnit.equals(TimeUnit.HOURS)) {
windowDelayTime = 4;
}
anomalyFunctionSpec.setWindowDelayUnit(windowDelayTimeUnit);
anomalyFunctionSpec.setWindowDelay(windowDelayTime);
// bucket size and unit are defaulted to the collection granularity
anomalyFunctionSpec.setBucketSize(dataGranularity.getSize());
anomalyFunctionSpec.setBucketUnit(dataGranularity.getUnit());
if (StringUtils.isNotEmpty(exploreDimensions)) {
anomalyFunctionSpec.setExploreDimensions(getDimensions(dataset, exploreDimensions));
}
if (!StringUtils.isBlank(filters)) {
filters = URLDecoder.decode(filters, UTF8);
String filterString = ThirdEyeUtils.getSortedFiltersFromJson(filters);
anomalyFunctionSpec.setFilters(filterString);
}
anomalyFunctionSpec.setProperties(properties);
if (StringUtils.isEmpty(cron)) {
cron = DEFAULT_CRON;
} else {
// validate cron
if (!CronExpression.isValidExpression(cron)) {
throw new IllegalArgumentException("Invalid cron expression for cron : " + cron);
}
}
anomalyFunctionSpec.setCron(cron);
Long id = anomalyFunctionDAO.save(anomalyFunctionSpec);
return Response.ok(id).build();
}
use of com.linkedin.thirdeye.api.TimeGranularity in project pinot by linkedin.
the class DashboardResource method getMaxTime.
@GET
@Path(value = "/data/info")
@Produces(MediaType.APPLICATION_JSON)
public String getMaxTime(@QueryParam("dataset") String collection) {
String collectionInfo = null;
try {
HashMap<String, String> map = new HashMap<>();
long maxDataTime = collectionMaxDataTimeCache.get(collection);
DatasetConfigDTO datasetConfig = CACHE_REGISTRY_INSTANCE.getDatasetConfigCache().get(collection);
TimeSpec timespec = ThirdEyeUtils.getTimeSpecFromDatasetConfig(datasetConfig);
TimeGranularity dataGranularity = timespec.getDataGranularity();
map.put("maxTime", "" + maxDataTime);
map.put("dataGranularity", dataGranularity.getUnit().toString());
List<MetricConfigDTO> metricConfigs = metricConfigDAO.findActiveByDataset(collection);
List<String> inverseMetrics = new ArrayList<>();
for (MetricConfigDTO metricConfig : metricConfigs) {
if (metricConfig.isInverseMetric()) {
inverseMetrics.add(metricConfig.getName());
}
}
if (CollectionUtils.isNotEmpty(inverseMetrics)) {
map.put("invertColorMetrics", Joiner.on(",").join(inverseMetrics));
}
collectionInfo = OBJECT_MAPPER.writeValueAsString(map);
} catch (Exception e) {
LOG.error("Error while fetching info for collection: " + collection, e);
}
return collectionInfo;
}
use of com.linkedin.thirdeye.api.TimeGranularity in project pinot by linkedin.
the class TestTimeRangeUtils method provideComputeTimeRanges.
@DataProvider(name = "computeTimeRanges")
public Object[][] provideComputeTimeRanges() {
DateTime now = DateTime.now();
DateTime yesterday = now.minusDays(1);
List<Object[]> entries = new ArrayList<>();
entries.add(new Object[] { null, yesterday, now, Collections.singletonList(Range.closedOpen(yesterday, now)) });
entries.add(new Object[] { new TimeGranularity(1, TimeUnit.DAYS), yesterday, now, Collections.singletonList(Range.closedOpen(yesterday, now)) });
entries.add(new Object[] { new TimeGranularity(6, TimeUnit.HOURS), yesterday, now, Arrays.asList(Range.closedOpen(yesterday, yesterday.plusHours(6)), Range.closedOpen(yesterday.plusHours(6), yesterday.plusHours(12)), Range.closedOpen(yesterday.plusHours(12), yesterday.plusHours(18)), Range.closedOpen(yesterday.plusHours(18), yesterday.plusHours(24))) });
return entries.toArray(new Object[entries.size()][]);
}
use of com.linkedin.thirdeye.api.TimeGranularity in project pinot by linkedin.
the class ThirdEyeUtils method getTimeSpecFromDatasetConfig.
public static TimeSpec getTimeSpecFromDatasetConfig(DatasetConfigDTO datasetConfig) {
String timeFormat = datasetConfig.getTimeFormat();
if (timeFormat.startsWith(TimeFormat.SIMPLE_DATE_FORMAT.toString())) {
timeFormat = getSDFPatternFromTimeFormat(timeFormat);
}
TimeSpec timespec = new TimeSpec(datasetConfig.getTimeColumn(), new TimeGranularity(datasetConfig.getTimeDuration(), datasetConfig.getTimeUnit()), timeFormat);
return timespec;
}
use of com.linkedin.thirdeye.api.TimeGranularity in project pinot by linkedin.
the class TestDetectionJobSchedulerUtils method testGetNewEntriesForDetectionSchedulerMinuteLevel.
@Test
public void testGetNewEntriesForDetectionSchedulerMinuteLevel() throws Exception {
DatasetConfigDTO datasetConfig = new DatasetConfigDTO();
datasetConfig.setTimeColumn("Date");
datasetConfig.setTimeUnit(TimeUnit.MINUTES);
datasetConfig.setTimeDuration(5);
DateTimeZone dateTimeZone = DateTimeZone.UTC;
AnomalyFunctionDTO anomalyFunction = new AnomalyFunctionDTO();
anomalyFunction.setFrequency(new TimeGranularity(15, TimeUnit.MINUTES));
DateTimeFormatter dateTimeFormatter = DetectionJobSchedulerUtils.getDateTimeFormatterForDataset(datasetConfig, dateTimeZone);
String currentDateTimeString = "201702140336";
String currentDateTimeStringRounded = "201702140330";
DateTime currentDateTime = minuteDateTimeFormatter.parseDateTime(currentDateTimeString);
DateTime currentDateTimeRounded = dateTimeFormatter.parseDateTime(currentDateTimeStringRounded);
DetectionStatusDTO lastEntryForFunction = null;
// null last entry
Map<String, Long> newEntries = DetectionJobSchedulerUtils.getNewEntries(currentDateTime, lastEntryForFunction, anomalyFunction, datasetConfig, dateTimeZone);
Assert.assertEquals(newEntries.size(), 1);
Assert.assertEquals(newEntries.get(currentDateTimeStringRounded), new Long(currentDateTimeRounded.getMillis()));
// last entry same as current time
lastEntryForFunction = new DetectionStatusDTO();
lastEntryForFunction.setDateToCheckInSDF(currentDateTimeStringRounded);
lastEntryForFunction.setDateToCheckInMS(currentDateTimeRounded.getMillis());
newEntries = DetectionJobSchedulerUtils.getNewEntries(currentDateTime, lastEntryForFunction, anomalyFunction, datasetConfig, dateTimeZone);
Assert.assertEquals(newEntries.size(), 0);
// last entry 15 MINUTES before current time
String lastEntryDateTimeString = "201702140315";
DateTime lastEntryDateTime = dateTimeFormatter.parseDateTime(lastEntryDateTimeString);
lastEntryForFunction = new DetectionStatusDTO();
lastEntryForFunction.setDateToCheckInSDF(lastEntryDateTimeString);
lastEntryForFunction.setDateToCheckInMS(lastEntryDateTime.getMillis());
newEntries = DetectionJobSchedulerUtils.getNewEntries(currentDateTime, lastEntryForFunction, anomalyFunction, datasetConfig, dateTimeZone);
Assert.assertEquals(newEntries.size(), 1);
Assert.assertEquals(newEntries.get(currentDateTimeStringRounded), new Long(currentDateTimeRounded.getMillis()));
// last entry 45 MINUTES before current time
lastEntryDateTimeString = "201702140245";
lastEntryDateTime = dateTimeFormatter.parseDateTime(lastEntryDateTimeString);
lastEntryForFunction = new DetectionStatusDTO();
lastEntryForFunction.setDateToCheckInSDF(lastEntryDateTimeString);
lastEntryForFunction.setDateToCheckInMS(lastEntryDateTime.getMillis());
newEntries = DetectionJobSchedulerUtils.getNewEntries(currentDateTime, lastEntryForFunction, anomalyFunction, datasetConfig, dateTimeZone);
Assert.assertEquals(newEntries.size(), 3);
Assert.assertNotNull(newEntries.get("201702140300"));
Assert.assertNotNull(newEntries.get("201702140315"));
Assert.assertNotNull(newEntries.get("201702140330"));
Assert.assertEquals(newEntries.get(currentDateTimeStringRounded), new Long(currentDateTimeRounded.getMillis()));
}
Aggregations