use of com.linkedin.thirdeye.dashboard.views.tabular.TabularViewHandler in project pinot by linkedin.
the class DashboardResource method getDashboardData.
@GET
@Path(value = "/data/customDashboard")
@Produces(MediaType.APPLICATION_JSON)
public String getDashboardData(@QueryParam("dataset") String collection, @QueryParam("dashboard") String dashboardName, @QueryParam("filters") String filterJson, @QueryParam("timeZone") @DefaultValue(DEFAULT_TIMEZONE_ID) String timeZone, @QueryParam("baselineStart") Long baselineStart, @QueryParam("baselineEnd") Long baselineEnd, @QueryParam("currentStart") Long currentStart, @QueryParam("currentEnd") Long currentEnd, @QueryParam("compareMode") String compareMode, @QueryParam("aggTimeGranularity") String aggTimeGranularity) {
try {
TabularViewRequest request = new TabularViewRequest();
request.setCollection(collection);
List<MetricExpression> metricExpressions = new ArrayList<>();
DashboardConfigDTO dashboardConfig = dashboardConfigDAO.findByName(dashboardName);
List<Long> metricIds = dashboardConfig.getMetricIds();
for (Long metricId : metricIds) {
MetricConfigDTO metricConfig = metricConfigDAO.findById(metricId);
MetricExpression metricExpression = ThirdEyeUtils.getMetricExpressionFromMetricConfig(metricConfig);
metricExpressions.add(metricExpression);
}
request.setMetricExpressions(metricExpressions);
long maxDataTime = collectionMaxDataTimeCache.get(collection);
if (currentEnd > maxDataTime) {
long delta = currentEnd - maxDataTime;
currentEnd = currentEnd - delta;
baselineEnd = baselineEnd - delta;
}
// The input start and end time (i.e., currentStart, currentEnd, baselineStart, and
// baselineEnd) are given in millisecond since epoch, which is timezone insensitive. On the
// other hand, the start and end time of the request to be sent to backend database (e.g.,
// Pinot) could be converted to SimpleDateFormat, which is timezone sensitive. Therefore,
// we need to store user's start and end time in DateTime objects with data's timezone
// in order to ensure that the conversion to SimpleDateFormat is always correct regardless
// user and server's timezone, including daylight saving time.
DateTimeZone timeZoneForCollection = Utils.getDataTimeZone(collection);
request.setBaselineStart(new DateTime(baselineStart, timeZoneForCollection));
request.setBaselineEnd(new DateTime(baselineEnd, timeZoneForCollection));
request.setCurrentStart(new DateTime(currentStart, timeZoneForCollection));
request.setCurrentEnd(new DateTime(currentEnd, timeZoneForCollection));
if (filterJson != null && !filterJson.isEmpty()) {
filterJson = URLDecoder.decode(filterJson, "UTF-8");
request.setFilters(ThirdEyeUtils.convertToMultiMap(filterJson));
}
request.setTimeGranularity(Utils.getAggregationTimeGranularity(aggTimeGranularity, collection));
TabularViewHandler handler = new TabularViewHandler(queryCache);
String jsonResponse = null;
TabularViewResponse response = handler.process(request);
jsonResponse = OBJECT_MAPPER.enable(SerializationFeature.INDENT_OUTPUT).writeValueAsString(response);
LOG.debug("customDashboard response {}", jsonResponse);
return jsonResponse;
} catch (Exception e) {
LOG.error("Exception while processing /data/tabular call", e);
return "{\"ERROR\": + " + e.getMessage() + "}";
}
}
use of com.linkedin.thirdeye.dashboard.views.tabular.TabularViewHandler in project pinot by linkedin.
the class DashboardResource method getTabularData.
@GET
@Path(value = "/data/tabular")
@Produces(MediaType.APPLICATION_JSON)
public String getTabularData(@QueryParam("dataset") String collection, @QueryParam("filters") String filterJson, @QueryParam("timeZone") @DefaultValue(DEFAULT_TIMEZONE_ID) String timeZone, @QueryParam("baselineStart") Long baselineStart, @QueryParam("baselineEnd") Long baselineEnd, @QueryParam("currentStart") Long currentStart, @QueryParam("currentEnd") Long currentEnd, @QueryParam("aggTimeGranularity") String aggTimeGranularity, @QueryParam("metrics") String metricsJson) throws Exception {
TabularViewRequest request = new TabularViewRequest();
request.setCollection(collection);
List<MetricExpression> metricExpressions = Utils.convertToMetricExpressions(metricsJson, MetricAggFunction.SUM, collection);
request.setMetricExpressions(metricExpressions);
long maxDataTime = collectionMaxDataTimeCache.get(collection);
if (currentEnd > maxDataTime) {
long delta = currentEnd - maxDataTime;
currentEnd = currentEnd - delta;
baselineEnd = baselineEnd - delta;
}
// See {@link #getDashboardData} for the reason that the start and end time are stored in a
// DateTime object with data's timezone.
DateTimeZone timeZoneForCollection = Utils.getDataTimeZone(collection);
request.setBaselineStart(new DateTime(baselineStart, timeZoneForCollection));
request.setBaselineEnd(new DateTime(baselineEnd, timeZoneForCollection));
request.setCurrentStart(new DateTime(currentStart, timeZoneForCollection));
request.setCurrentEnd(new DateTime(currentEnd, timeZoneForCollection));
if (filterJson != null && !filterJson.isEmpty()) {
filterJson = URLDecoder.decode(filterJson, "UTF-8");
request.setFilters(ThirdEyeUtils.convertToMultiMap(filterJson));
}
request.setTimeGranularity(Utils.getAggregationTimeGranularity(aggTimeGranularity, collection));
TabularViewHandler handler = new TabularViewHandler(queryCache);
String jsonResponse = null;
try {
TabularViewResponse response = handler.process(request);
jsonResponse = OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(response);
LOG.debug("Tabular response {}", jsonResponse);
} catch (Exception e) {
LOG.error("Exception while processing /data/tabular call", e);
}
return jsonResponse;
}
use of com.linkedin.thirdeye.dashboard.views.tabular.TabularViewHandler in project pinot by linkedin.
the class DataResource method getWowSummary.
@GET
@Path("dashboard/wowsummary")
public WowSummary getWowSummary(@QueryParam("dashboard") String dashboard, @QueryParam("timeRanges") String timeRanges) {
WowSummary wowSummary = new WowSummary();
if (StringUtils.isBlank(dashboard)) {
return wowSummary;
}
List<Long> metricIds = getMetricIdsByDashboard(dashboard);
List<String> timeRangeLabels = Lists.newArrayList(timeRanges.split(","));
// Sort metric's id and metric expression by collections
Multimap<String, Long> datasetToMetrics = ArrayListMultimap.create();
Multimap<String, MetricExpression> datasetToMetricExpressions = ArrayListMultimap.create();
Map<Long, MetricConfigDTO> metricIdToMetricConfig = new HashMap<>();
for (long metricId : metricIds) {
MetricConfigDTO metricConfig = metricConfigDAO.findById(metricId);
metricIdToMetricConfig.put(metricId, metricConfig);
datasetToMetrics.put(metricConfig.getDataset(), metricId);
datasetToMetricExpressions.put(metricConfig.getDataset(), ThirdEyeUtils.getMetricExpressionFromMetricConfig(metricConfig));
}
Multimap<String, MetricSummary> metricAliasToMetricSummariesMap = ArrayListMultimap.create();
// Create query request for each collection
for (String dataset : datasetToMetrics.keySet()) {
TabularViewRequest request = new TabularViewRequest();
request.setCollection(dataset);
request.setMetricExpressions(new ArrayList<>(datasetToMetricExpressions.get(dataset)));
// user and server's timezone, including daylight saving time.
for (String timeRangeLabel : timeRangeLabels) {
DateTimeZone timeZoneForCollection = Utils.getDataTimeZone(dataset);
TimeRange timeRange = getTimeRangeFromLabel(dataset, timeZoneForCollection, timeRangeLabel);
long currentEnd = timeRange.getEnd();
long currentStart = timeRange.getStart();
System.out.println(timeRangeLabel + "Current start end " + new DateTime(currentStart) + " " + new DateTime(currentEnd));
TimeGranularity timeGranularity = new TimeGranularity(1, TimeUnit.HOURS);
request.setBaselineStart(new DateTime(currentStart, timeZoneForCollection).minusDays(7));
request.setBaselineEnd(new DateTime(currentEnd, timeZoneForCollection).minusDays(7));
request.setCurrentStart(new DateTime(currentStart, timeZoneForCollection));
request.setCurrentEnd(new DateTime(currentEnd, timeZoneForCollection));
request.setTimeGranularity(timeGranularity);
TabularViewHandler handler = new TabularViewHandler(queryCache);
try {
TabularViewResponse tabularViewResponse = handler.process(request);
for (String metric : tabularViewResponse.getMetrics()) {
MetricDataset metricDataset = new MetricDataset(metric, dataset);
MetricConfigDTO metricConfig = CACHE_REGISTRY_INSTANCE.getMetricConfigCache().get(metricDataset);
Long metricId = metricConfig.getId();
String metricAlias = metricConfig.getAlias();
GenericResponse response = tabularViewResponse.getData().get(metric);
MetricSummary metricSummary = new MetricSummary();
metricSummary.setMetricId(metricId);
metricSummary.setMetricName(metricConfig.getName());
metricSummary.setMetricAlias(metricAlias);
List<String[]> data = response.getResponseData();
double baselineValue = 0;
double currentValue = 0;
for (String[] responseData : data) {
baselineValue = baselineValue + Double.valueOf(responseData[0]);
currentValue = currentValue + Double.valueOf(responseData[1]);
}
double percentageChange = (currentValue - baselineValue) * 100 / baselineValue;
metricSummary.setBaselineValue(baselineValue);
metricSummary.setCurrentValue(currentValue);
metricSummary.setWowPercentageChange(percentageChange);
metricAliasToMetricSummariesMap.put(metricAlias, metricSummary);
}
} catch (Exception e) {
LOG.error("Exception while processing /data/tabular call", e);
}
}
}
wowSummary.setMetricAliasToMetricSummariesMap(metricAliasToMetricSummariesMap);
return wowSummary;
}
use of com.linkedin.thirdeye.dashboard.views.tabular.TabularViewHandler in project pinot by linkedin.
the class DataResource method getMetricSummary.
/**
* Returns percentage change between current values and baseline values. The values are
* aggregated according to the number of buckets. If the bucket number is 1, then all values
* between the given time ranges are sorted to the corresponding bucket and aggregated.
*
* Note: For current implementation, we assume the number of buckets is always 1.
*/
@GET
@Path("dashboard/metricsummary")
public List<MetricSummary> getMetricSummary(@QueryParam("dashboard") String dashboard, @QueryParam("timeRange") String timeRange) {
List<MetricSummary> metricsSummary = new ArrayList<>();
if (StringUtils.isBlank(dashboard)) {
return metricsSummary;
}
List<Long> metricIds = getMetricIdsByDashboard(dashboard);
// Sort metric's id and metric expression by collections
Multimap<String, Long> datasetToMetrics = ArrayListMultimap.create();
Multimap<String, MetricExpression> datasetToMetricExpressions = ArrayListMultimap.create();
Map<Long, MetricConfigDTO> metricIdToMetricConfig = new HashMap<>();
for (long metricId : metricIds) {
MetricConfigDTO metricConfig = metricConfigDAO.findById(metricId);
metricIdToMetricConfig.put(metricId, metricConfig);
datasetToMetrics.put(metricConfig.getDataset(), metricId);
datasetToMetricExpressions.put(metricConfig.getDataset(), ThirdEyeUtils.getMetricExpressionFromMetricConfig(metricConfig));
}
// Create query request for each collection
for (String dataset : datasetToMetrics.keySet()) {
TabularViewRequest request = new TabularViewRequest();
request.setCollection(dataset);
request.setMetricExpressions(new ArrayList<>(datasetToMetricExpressions.get(dataset)));
// The input start and end time (i.e., currentStart, currentEnd, baselineStart, and
// baselineEnd) are given in millisecond since epoch, which is timezone insensitive. On the
// other hand, the start and end time of the request to be sent to backend database (e.g.,
// Pinot) could be converted to SimpleDateFormat, which is timezone sensitive. Therefore,
// we need to store user's start and end time in DateTime objects with data's timezone
// in order to ensure that the conversion to SimpleDateFormat is always correct regardless
// user and server's timezone, including daylight saving time.
String[] tokens = timeRange.split("_");
TimeGranularity timeGranularity = new TimeGranularity(Integer.valueOf(tokens[0]), TimeUnit.valueOf(tokens[1]));
long currentEnd = Utils.getMaxDataTimeForDataset(dataset);
long currentStart = currentEnd - TimeUnit.MILLISECONDS.convert(Long.valueOf(tokens[0]), TimeUnit.valueOf(tokens[1]));
DateTimeZone timeZoneForCollection = Utils.getDataTimeZone(dataset);
request.setBaselineStart(new DateTime(currentStart, timeZoneForCollection).minusDays(7));
request.setBaselineEnd(new DateTime(currentEnd, timeZoneForCollection).minusDays(7));
request.setCurrentStart(new DateTime(currentStart, timeZoneForCollection));
request.setCurrentEnd(new DateTime(currentEnd, timeZoneForCollection));
request.setTimeGranularity(timeGranularity);
TabularViewHandler handler = new TabularViewHandler(queryCache);
try {
TabularViewResponse tabularViewResponse = handler.process(request);
for (String metric : tabularViewResponse.getMetrics()) {
MetricDataset metricDataset = new MetricDataset(metric, dataset);
MetricConfigDTO metricConfig = CACHE_REGISTRY_INSTANCE.getMetricConfigCache().get(metricDataset);
Long metricId = metricConfig.getId();
GenericResponse response = tabularViewResponse.getData().get(metric);
MetricSummary metricSummary = new MetricSummary();
metricSummary.setMetricId(metricId);
metricSummary.setMetricName(metricConfig.getName());
metricSummary.setMetricAlias(metricConfig.getAlias());
String[] responseData = response.getResponseData().get(0);
double baselineValue = Double.valueOf(responseData[0]);
double curentvalue = Double.valueOf(responseData[1]);
double percentageChange = (curentvalue - baselineValue) * 100 / baselineValue;
metricSummary.setBaselineValue(baselineValue);
metricSummary.setCurrentValue(curentvalue);
metricSummary.setWowPercentageChange(percentageChange);
AnomaliesSummary anomaliesSummary = anomaliesResoure.getAnomalyCountForMetricInRange(metricId, currentStart, currentEnd);
metricSummary.setAnomaliesSummary(anomaliesSummary);
metricsSummary.add(metricSummary);
}
} catch (Exception e) {
LOG.error("Exception while processing /data/tabular call", e);
}
}
return metricsSummary;
}
use of com.linkedin.thirdeye.dashboard.views.tabular.TabularViewHandler in project pinot by linkedin.
the class TimeSeriesResource method getTabularData.
/**
* used when dimension is not passed, i.e. data is requested for all dimensions.
* @param metricId
* @param currentStart
* @param currentEnd
* @param baselineStart
* @param baselineEnd
* @param filters
* @param granularity
* @return
*/
private TimeSeriesCompareMetricView getTabularData(long metricId, long currentStart, long currentEnd, long baselineStart, long baselineEnd, String filters, String granularity) {
TimeSeriesCompareMetricView timeSeriesCompareView = new TimeSeriesCompareMetricView();
try {
MetricConfigDTO metricConfigDTO = metricConfigDAO.findById(metricId);
if (metricConfigDTO != null) {
String dataset = metricConfigDTO.getDataset();
TabularViewRequest request = new TabularViewRequest();
request.setCollection(dataset);
MetricExpression metricExpression = ThirdEyeUtils.getMetricExpressionFromMetricConfig(metricConfigDTO);
request.setMetricExpressions(Arrays.asList(metricExpression));
DateTimeZone timeZoneForCollection = Utils.getDataTimeZone(dataset);
request.setBaselineStart(new DateTime(baselineStart, timeZoneForCollection));
request.setBaselineEnd(new DateTime(baselineEnd, timeZoneForCollection));
request.setCurrentStart(new DateTime(currentStart, timeZoneForCollection));
request.setCurrentEnd(new DateTime(currentEnd, timeZoneForCollection));
request.setTimeGranularity(Utils.getAggregationTimeGranularity(granularity, dataset));
if (filters != null && !filters.isEmpty()) {
filters = URLDecoder.decode(filters, "UTF-8");
request.setFilters(ThirdEyeUtils.convertToMultiMap(filters));
}
TabularViewHandler handler = new TabularViewHandler(queryCache);
TabularViewResponse response = handler.process(request);
timeSeriesCompareView.setStart(currentStart);
timeSeriesCompareView.setEnd(currentEnd);
timeSeriesCompareView.setMetricId(metricConfigDTO.getId());
timeSeriesCompareView.setMetricName(metricConfigDTO.getName());
List<Long> timeBucketsCurrent = new ArrayList<>();
List<Long> timeBucketsBaseline = new ArrayList<>();
int numTimeBuckets = response.getTimeBuckets().size();
double[] currentValues = new double[numTimeBuckets];
double[] baselineValues = new double[numTimeBuckets];
String[] percentageChangeValues = new String[numTimeBuckets];
double[] cumCurrentValues = new double[numTimeBuckets];
double[] cumBaselineValues = new double[numTimeBuckets];
String[] cumPercentageChangeValues = new String[numTimeBuckets];
int currentValIndex = response.getData().get(metricConfigDTO.getName()).getSchema().getColumnsToIndexMapping().get("currentValue");
int baselineValIndex = response.getData().get(metricConfigDTO.getName()).getSchema().getColumnsToIndexMapping().get("baselineValue");
int percentageChangeIndex = response.getData().get(metricConfigDTO.getName()).getSchema().getColumnsToIndexMapping().get("ratio");
int cumCurrentValIndex = response.getData().get(metricConfigDTO.getName()).getSchema().getColumnsToIndexMapping().get("cumulativeCurrentValue");
int cumBaselineValIndex = response.getData().get(metricConfigDTO.getName()).getSchema().getColumnsToIndexMapping().get("cumulativeBaselineValue");
int cumPercentageChangeIndex = response.getData().get(metricConfigDTO.getName()).getSchema().getColumnsToIndexMapping().get("cumulativeRatio");
for (int i = 0; i < numTimeBuckets; i++) {
TimeBucket tb = response.getTimeBuckets().get(i);
timeBucketsCurrent.add(tb.getCurrentStart());
timeBucketsBaseline.add(tb.getBaselineStart());
currentValues[i] = Double.valueOf(response.getData().get(metricConfigDTO.getName()).getResponseData().get(i)[currentValIndex]);
baselineValues[i] = Double.valueOf(response.getData().get(metricConfigDTO.getName()).getResponseData().get(i)[baselineValIndex]);
percentageChangeValues[i] = response.getData().get(metricConfigDTO.getName()).getResponseData().get(i)[percentageChangeIndex];
cumCurrentValues[i] = Double.valueOf(response.getData().get(metricConfigDTO.getName()).getResponseData().get(i)[cumCurrentValIndex]);
cumBaselineValues[i] = Double.valueOf(response.getData().get(metricConfigDTO.getName()).getResponseData().get(i)[cumBaselineValIndex]);
cumPercentageChangeValues[i] = response.getData().get(metricConfigDTO.getName()).getResponseData().get(i)[cumPercentageChangeIndex];
}
timeSeriesCompareView.setTimeBucketsCurrent(timeBucketsCurrent);
timeSeriesCompareView.setTimeBucketsBaseline(timeBucketsBaseline);
ValuesContainer values = new ValuesContainer();
values.setCurrentValues(currentValues);
values.setBaselineValues(baselineValues);
values.setPercentageChange(percentageChangeValues);
values.setCumulativeCurrentValues(cumCurrentValues);
values.setCumulativeBaselineValues(cumBaselineValues);
values.setCumulativePercentageChange(cumPercentageChangeValues);
timeSeriesCompareView.setSubDimensionContributionMap(new LinkedHashMap<>());
timeSeriesCompareView.getSubDimensionContributionMap().put(ALL, values);
}
} catch (Exception e) {
LOG.error(e.getMessage(), e);
throw new WebApplicationException(e);
}
return timeSeriesCompareView;
}
Aggregations