Search in sources :

Example 46 with DatasetConfigDTO

use of com.linkedin.thirdeye.datalayer.dto.DatasetConfigDTO in project pinot by linkedin.

the class AutoloadPinotMetricsServiceTest method testRefreshDataset.

@Test(dependsOnMethods = { "testAddNewDataset" })
public void testRefreshDataset() throws Exception {
    DimensionFieldSpec dimensionFieldSpec = new DimensionFieldSpec("newDimension", DataType.STRING, true);
    schema.addField(dimensionFieldSpec);
    testAutoLoadPinotMetricsService.addPinotDataset(dataset, schema, datasetConfig);
    Assert.assertEquals(datasetConfigDAO.findAll().size(), 1);
    DatasetConfigDTO newDatasetConfig1 = datasetConfigDAO.findByDataset(dataset);
    Assert.assertEquals(newDatasetConfig1.getDataset(), dataset);
    Assert.assertEquals(Sets.newHashSet(newDatasetConfig1.getDimensions()), Sets.newHashSet(schema.getDimensionNames()));
    MetricFieldSpec metricFieldSpec = new MetricFieldSpec("newMetric", DataType.LONG);
    schema.addField(metricFieldSpec);
    testAutoLoadPinotMetricsService.addPinotDataset(dataset, schema, newDatasetConfig1);
    Assert.assertEquals(datasetConfigDAO.findAll().size(), 1);
    List<MetricConfigDTO> metricConfigs = metricConfigDAO.findByDataset(dataset);
    List<String> schemaMetricNames = schema.getMetricNames();
    List<Long> metricIds = new ArrayList<>();
    Assert.assertEquals(metricConfigs.size(), schemaMetricNames.size());
    for (MetricConfigDTO metricConfig : metricConfigs) {
        Assert.assertTrue(schemaMetricNames.contains(metricConfig.getName()));
        metricIds.add(metricConfig.getId());
    }
    DashboardConfigDTO dashboardConfig = dashboardConfigDAO.findByName(DashboardConfigBean.DEFAULT_DASHBOARD_PREFIX + dataset);
    Assert.assertEquals(dashboardConfig.getMetricIds(), metricIds);
}
Also used : DatasetConfigDTO(com.linkedin.thirdeye.datalayer.dto.DatasetConfigDTO) MetricConfigDTO(com.linkedin.thirdeye.datalayer.dto.MetricConfigDTO) ArrayList(java.util.ArrayList) MetricFieldSpec(com.linkedin.pinot.common.data.MetricFieldSpec) DashboardConfigDTO(com.linkedin.thirdeye.datalayer.dto.DashboardConfigDTO) DimensionFieldSpec(com.linkedin.pinot.common.data.DimensionFieldSpec) Test(org.testng.annotations.Test)

Example 47 with DatasetConfigDTO

use of com.linkedin.thirdeye.datalayer.dto.DatasetConfigDTO in project pinot by linkedin.

the class ThirdEyeUtils method getTimeSpecFromDataset.

public static TimeSpec getTimeSpecFromDataset(String dataset) {
    TimeSpec timespec = null;
    try {
        DatasetConfigDTO datasetConfig = CACHE_REGISTRY.getDatasetConfigCache().get(dataset);
        timespec = getTimeSpecFromDatasetConfig(datasetConfig);
    } catch (ExecutionException e) {
        LOG.error("Exception when fetching datasetconfig from cache", e);
    }
    return timespec;
}
Also used : DatasetConfigDTO(com.linkedin.thirdeye.datalayer.dto.DatasetConfigDTO) ExecutionException(java.util.concurrent.ExecutionException) TimeSpec(com.linkedin.thirdeye.api.TimeSpec)

Example 48 with DatasetConfigDTO

use of com.linkedin.thirdeye.datalayer.dto.DatasetConfigDTO in project pinot by linkedin.

the class TestDetectionJobSchedulerUtils method testGetNewEntriesForDetectionSchedulerMinuteLevel.

@Test
public void testGetNewEntriesForDetectionSchedulerMinuteLevel() throws Exception {
    DatasetConfigDTO datasetConfig = new DatasetConfigDTO();
    datasetConfig.setTimeColumn("Date");
    datasetConfig.setTimeUnit(TimeUnit.MINUTES);
    datasetConfig.setTimeDuration(5);
    DateTimeZone dateTimeZone = DateTimeZone.UTC;
    AnomalyFunctionDTO anomalyFunction = new AnomalyFunctionDTO();
    anomalyFunction.setFrequency(new TimeGranularity(15, TimeUnit.MINUTES));
    DateTimeFormatter dateTimeFormatter = DetectionJobSchedulerUtils.getDateTimeFormatterForDataset(datasetConfig, dateTimeZone);
    String currentDateTimeString = "201702140336";
    String currentDateTimeStringRounded = "201702140330";
    DateTime currentDateTime = minuteDateTimeFormatter.parseDateTime(currentDateTimeString);
    DateTime currentDateTimeRounded = dateTimeFormatter.parseDateTime(currentDateTimeStringRounded);
    DetectionStatusDTO lastEntryForFunction = null;
    // null last entry
    Map<String, Long> newEntries = DetectionJobSchedulerUtils.getNewEntries(currentDateTime, lastEntryForFunction, anomalyFunction, datasetConfig, dateTimeZone);
    Assert.assertEquals(newEntries.size(), 1);
    Assert.assertEquals(newEntries.get(currentDateTimeStringRounded), new Long(currentDateTimeRounded.getMillis()));
    // last entry same as current time
    lastEntryForFunction = new DetectionStatusDTO();
    lastEntryForFunction.setDateToCheckInSDF(currentDateTimeStringRounded);
    lastEntryForFunction.setDateToCheckInMS(currentDateTimeRounded.getMillis());
    newEntries = DetectionJobSchedulerUtils.getNewEntries(currentDateTime, lastEntryForFunction, anomalyFunction, datasetConfig, dateTimeZone);
    Assert.assertEquals(newEntries.size(), 0);
    // last entry 15 MINUTES before current time
    String lastEntryDateTimeString = "201702140315";
    DateTime lastEntryDateTime = dateTimeFormatter.parseDateTime(lastEntryDateTimeString);
    lastEntryForFunction = new DetectionStatusDTO();
    lastEntryForFunction.setDateToCheckInSDF(lastEntryDateTimeString);
    lastEntryForFunction.setDateToCheckInMS(lastEntryDateTime.getMillis());
    newEntries = DetectionJobSchedulerUtils.getNewEntries(currentDateTime, lastEntryForFunction, anomalyFunction, datasetConfig, dateTimeZone);
    Assert.assertEquals(newEntries.size(), 1);
    Assert.assertEquals(newEntries.get(currentDateTimeStringRounded), new Long(currentDateTimeRounded.getMillis()));
    // last entry 45 MINUTES  before current time
    lastEntryDateTimeString = "201702140245";
    lastEntryDateTime = dateTimeFormatter.parseDateTime(lastEntryDateTimeString);
    lastEntryForFunction = new DetectionStatusDTO();
    lastEntryForFunction.setDateToCheckInSDF(lastEntryDateTimeString);
    lastEntryForFunction.setDateToCheckInMS(lastEntryDateTime.getMillis());
    newEntries = DetectionJobSchedulerUtils.getNewEntries(currentDateTime, lastEntryForFunction, anomalyFunction, datasetConfig, dateTimeZone);
    Assert.assertEquals(newEntries.size(), 3);
    Assert.assertNotNull(newEntries.get("201702140300"));
    Assert.assertNotNull(newEntries.get("201702140315"));
    Assert.assertNotNull(newEntries.get("201702140330"));
    Assert.assertEquals(newEntries.get(currentDateTimeStringRounded), new Long(currentDateTimeRounded.getMillis()));
}
Also used : DatasetConfigDTO(com.linkedin.thirdeye.datalayer.dto.DatasetConfigDTO) TimeGranularity(com.linkedin.thirdeye.api.TimeGranularity) AnomalyFunctionDTO(com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO) DetectionStatusDTO(com.linkedin.thirdeye.datalayer.dto.DetectionStatusDTO) DateTimeFormatter(org.joda.time.format.DateTimeFormatter) DateTimeZone(org.joda.time.DateTimeZone) DateTime(org.joda.time.DateTime) Test(org.testng.annotations.Test)

Example 49 with DatasetConfigDTO

use of com.linkedin.thirdeye.datalayer.dto.DatasetConfigDTO in project pinot by linkedin.

the class AnomaliesResource method constructAnomaliesWrapperFromMergedAnomalies.

/**
   * Constructs AnomaliesWrapper object from a list of merged anomalies
   * @param mergedAnomalies
   * @return
   * @throws ExecutionException
   */
private AnomaliesWrapper constructAnomaliesWrapperFromMergedAnomalies(List<MergedAnomalyResultDTO> mergedAnomalies, int pageNumber) throws ExecutionException {
    AnomaliesWrapper anomaliesWrapper = new AnomaliesWrapper();
    anomaliesWrapper.setTotalAnomalies(mergedAnomalies.size());
    LOG.info("Total anomalies: {}", mergedAnomalies.size());
    // TODO: get page number and page size from client
    int pageSize = DEFAULT_PAGE_SIZE;
    int maxPageNumber = (mergedAnomalies.size() - 1) / pageSize + 1;
    if (pageNumber > maxPageNumber) {
        pageNumber = maxPageNumber;
    }
    if (pageNumber < 1) {
        pageNumber = 1;
    }
    int fromIndex = (pageNumber - 1) * pageSize;
    int toIndex = pageNumber * pageSize;
    if (toIndex > mergedAnomalies.size()) {
        toIndex = mergedAnomalies.size();
    }
    // Show most recent anomalies first, i.e., the anomaly whose end time is most recent then largest id shown at top
    Collections.sort(mergedAnomalies, new MergedAnomalyEndTimeComparator().reversed());
    List<MergedAnomalyResultDTO> displayedAnomalies = mergedAnomalies.subList(fromIndex, toIndex);
    anomaliesWrapper.setNumAnomaliesOnPage(displayedAnomalies.size());
    LOG.info("Page number: {} Page size: {} Num anomalies on page: {}", pageNumber, pageSize, displayedAnomalies.size());
    // for each anomaly, create anomaly details
    List<Future<AnomalyDetails>> anomalyDetailsListFutures = new ArrayList<>();
    for (MergedAnomalyResultDTO mergedAnomaly : displayedAnomalies) {
        Callable<AnomalyDetails> callable = new Callable<AnomalyDetails>() {

            @Override
            public AnomalyDetails call() throws Exception {
                String dataset = mergedAnomaly.getCollection();
                DatasetConfigDTO datasetConfig = CACHE_REGISTRY.getDatasetConfigCache().get(dataset);
                DateTimeFormatter timeSeriesDateFormatter = DateTimeFormat.forPattern(TIME_SERIES_DATE_FORMAT).withZone(Utils.getDataTimeZone(dataset));
                DateTimeFormatter startEndDateFormatterDays = DateTimeFormat.forPattern(START_END_DATE_FORMAT_DAYS).withZone(Utils.getDataTimeZone(dataset));
                DateTimeFormatter startEndDateFormatterHours = DateTimeFormat.forPattern(START_END_DATE_FORMAT_HOURS).withZone(Utils.getDataTimeZone(dataset));
                return getAnomalyDetails(mergedAnomaly, datasetConfig, timeSeriesDateFormatter, startEndDateFormatterHours, startEndDateFormatterDays, getExternalURL(mergedAnomaly));
            }
        };
        anomalyDetailsListFutures.add(threadPool.submit(callable));
    }
    List<AnomalyDetails> anomalyDetailsList = new ArrayList<>();
    for (Future<AnomalyDetails> anomalyDetailsFuture : anomalyDetailsListFutures) {
        try {
            AnomalyDetails anomalyDetails = anomalyDetailsFuture.get(120, TimeUnit.SECONDS);
            if (anomalyDetails != null) {
                anomalyDetailsList.add(anomalyDetails);
            }
        } catch (InterruptedException | ExecutionException | TimeoutException e) {
            LOG.error("Exception in getting AnomalyDetails", e);
        }
    }
    anomaliesWrapper.setAnomalyDetailsList(anomalyDetailsList);
    return anomaliesWrapper;
}
Also used : ArrayList(java.util.ArrayList) AnomalyDetails(com.linkedin.thirdeye.dashboard.resources.v2.pojo.AnomalyDetails) Callable(java.util.concurrent.Callable) DatasetConfigDTO(com.linkedin.thirdeye.datalayer.dto.DatasetConfigDTO) MergedAnomalyResultDTO(com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO) Future(java.util.concurrent.Future) AnomaliesWrapper(com.linkedin.thirdeye.dashboard.resources.v2.pojo.AnomaliesWrapper) ExecutionException(java.util.concurrent.ExecutionException) DateTimeFormatter(org.joda.time.format.DateTimeFormatter) TimeoutException(java.util.concurrent.TimeoutException)

Example 50 with DatasetConfigDTO

use of com.linkedin.thirdeye.datalayer.dto.DatasetConfigDTO in project pinot by linkedin.

the class DataResource method getDataAggregationGranularity.

@GET
@Path("agg/granularity/metric/{metricId}")
public List<String> getDataAggregationGranularity(@PathParam("metricId") Long metricId) {
    List<String> list = new ArrayList<>();
    list.add("DAYS");
    MetricConfigDTO metricConfigDTO = metricConfigDAO.findById(metricId);
    DatasetConfigDTO datasetConfigDTO = datasetConfigDAO.findByDataset(metricConfigDTO.getDataset());
    int dataAggSize = datasetConfigDTO.getTimeDuration();
    String dataGranularity = datasetConfigDTO.getTimeUnit().name();
    if (dataGranularity.equals("DAYS")) {
    // do nothing
    } else {
        list.add("HOURS");
        if (dataGranularity.equals("MINUTES")) {
            if (dataAggSize == 1) {
                list.add("MINUTES");
            } else {
                list.add(dataAggSize + "_MINUTES");
            }
        }
    }
    return list;
}
Also used : MetricConfigDTO(com.linkedin.thirdeye.datalayer.dto.MetricConfigDTO) DatasetConfigDTO(com.linkedin.thirdeye.datalayer.dto.DatasetConfigDTO) ArrayList(java.util.ArrayList) Path(javax.ws.rs.Path) GET(javax.ws.rs.GET)

Aggregations

DatasetConfigDTO (com.linkedin.thirdeye.datalayer.dto.DatasetConfigDTO)54 TimeSpec (com.linkedin.thirdeye.api.TimeSpec)14 DateTime (org.joda.time.DateTime)14 ArrayList (java.util.ArrayList)13 Path (javax.ws.rs.Path)12 ExecutionException (java.util.concurrent.ExecutionException)11 GET (javax.ws.rs.GET)10 TimeGranularity (com.linkedin.thirdeye.api.TimeGranularity)9 AnomalyFunctionDTO (com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO)9 MetricConfigDTO (com.linkedin.thirdeye.datalayer.dto.MetricConfigDTO)9 DateTimeZone (org.joda.time.DateTimeZone)9 Test (org.testng.annotations.Test)9 DateTimeFormatter (org.joda.time.format.DateTimeFormatter)7 IOException (java.io.IOException)6 MetricExpression (com.linkedin.thirdeye.client.MetricExpression)5 ResultSetGroup (com.linkedin.pinot.client.ResultSetGroup)4 DetectionStatusDTO (com.linkedin.thirdeye.datalayer.dto.DetectionStatusDTO)4 JSONException (org.json.JSONException)4 DashboardConfigDTO (com.linkedin.thirdeye.datalayer.dto.DashboardConfigDTO)3 NullArgumentException (org.apache.commons.lang.NullArgumentException)3