use of com.linkedin.thirdeye.client.cache.MetricDataset in project pinot by linkedin.
the class ThirdEyeCacheRegistry method initCaches.
private static void initCaches(ThirdEyeConfiguration config) {
ThirdEyeCacheRegistry cacheRegistry = ThirdEyeCacheRegistry.getInstance();
RemovalListener<PinotQuery, ResultSetGroup> listener = new RemovalListener<PinotQuery, ResultSetGroup>() {
@Override
public void onRemoval(RemovalNotification<PinotQuery, ResultSetGroup> notification) {
LOGGER.info("Expired {}", notification.getKey().getPql());
}
};
// ResultSetGroup Cache. The size of this cache is limited by the total number of buckets in all ResultSetGroup.
// We estimate that 1 bucket (including overhead) consumes 1KB and this cache is allowed to use up to 50% of max
// heap space.
long maxBucketNumber = getApproximateMaxBucketNumber(DEFAULT_HEAP_PERCENTAGE_FOR_RESULTSETGROUP_CACHE);
LoadingCache<PinotQuery, ResultSetGroup> resultSetGroupCache = CacheBuilder.newBuilder().removalListener(listener).expireAfterAccess(1, TimeUnit.HOURS).maximumWeight(maxBucketNumber).weigher((pinotQuery, resultSetGroup) -> {
int resultSetCount = resultSetGroup.getResultSetCount();
int weight = 0;
for (int idx = 0; idx < resultSetCount; ++idx) {
com.linkedin.pinot.client.ResultSet resultSet = resultSetGroup.getResultSet(idx);
weight += (resultSet.getColumnCount() * resultSet.getRowCount());
}
return weight;
}).build(new ResultSetGroupCacheLoader(pinotThirdeyeClientConfig));
cacheRegistry.registerResultSetGroupCache(resultSetGroupCache);
LOGGER.info("Max bucket number for ResultSetGroup cache is set to {}", maxBucketNumber);
// CollectionMaxDataTime Cache
LoadingCache<String, Long> collectionMaxDataTimeCache = CacheBuilder.newBuilder().refreshAfterWrite(5, TimeUnit.MINUTES).build(new CollectionMaxDataTimeCacheLoader(resultSetGroupCache, datasetConfigDAO));
cacheRegistry.registerCollectionMaxDataTimeCache(collectionMaxDataTimeCache);
// Query Cache
QueryCache queryCache = new QueryCache(thirdEyeClient, Executors.newFixedThreadPool(10));
cacheRegistry.registerQueryCache(queryCache);
// Dimension Filter cache
LoadingCache<String, String> dimensionFiltersCache = CacheBuilder.newBuilder().build(new DimensionFiltersCacheLoader(cacheRegistry.getQueryCache()));
cacheRegistry.registerDimensionFiltersCache(dimensionFiltersCache);
// Dashboards cache
LoadingCache<String, String> dashboardsCache = CacheBuilder.newBuilder().build(new DashboardsCacheLoader(dashboardConfigDAO));
cacheRegistry.registerDashboardsCache(dashboardsCache);
// Collections cache
CollectionsCache collectionsCache = new CollectionsCache(datasetConfigDAO, config);
cacheRegistry.registerCollectionsCache(collectionsCache);
// DatasetConfig cache
LoadingCache<String, DatasetConfigDTO> datasetConfigCache = CacheBuilder.newBuilder().build(new DatasetConfigCacheLoader(datasetConfigDAO));
cacheRegistry.registerDatasetConfigCache(datasetConfigCache);
// MetricConfig cache
LoadingCache<MetricDataset, MetricConfigDTO> metricConfigCache = CacheBuilder.newBuilder().build(new MetricConfigCacheLoader(metricConfigDAO));
cacheRegistry.registerMetricConfigCache(metricConfigCache);
// DashboardConfigs cache
LoadingCache<String, List<DashboardConfigDTO>> dashboardConfigsCache = CacheBuilder.newBuilder().build(new DashboardConfigCacheLoader(dashboardConfigDAO));
cacheRegistry.registerDashboardConfigsCache(dashboardConfigsCache);
}
use of com.linkedin.thirdeye.client.cache.MetricDataset in project pinot by linkedin.
the class HeatMapViewHandler method process.
@Override
public HeatMapViewResponse process(HeatMapViewRequest request) throws Exception {
// query 1 for everything from baseline start to baseline end
// query 2 for everything from current start to current end
// for each dimension group by top 100
// query 1 for everything from baseline start to baseline end
// query for everything from current start to current end
List<String> expressionNames = new ArrayList<>();
Map<String, String> metricExpressions = new HashMap<>();
Set<String> metricOrExpressionNames = new HashSet<>();
for (MetricExpression expression : request.getMetricExpressions()) {
expressionNames.add(expression.getExpressionName());
metricExpressions.put(expression.getExpressionName(), expression.getExpression());
metricOrExpressionNames.add(expression.getExpressionName());
List<MetricFunction> metricFunctions = expression.computeMetricFunctions();
for (MetricFunction function : metricFunctions) {
metricOrExpressionNames.add(function.getMetricName());
}
}
Map<String, HeatMap.Builder> data = new HashMap<>();
TimeOnTimeComparisonRequest comparisonRequest = generateTimeOnTimeComparisonRequest(request);
List<String> groupByDimensions = comparisonRequest.getGroupByDimensions();
final TimeOnTimeComparisonHandler handler = new TimeOnTimeComparisonHandler(queryCache);
// we are tracking per dimension, to validate that its the same for each dimension
Map<String, Map<String, Double>> baselineTotalPerMetricAndDimension = new HashMap<>();
Map<String, Map<String, Double>> currentTotalPerMetricAndDimension = new HashMap<>();
for (String metricOrExpressionName : metricOrExpressionNames) {
Map<String, Double> baselineTotalMap = new HashMap<>();
Map<String, Double> currentTotalMap = new HashMap<>();
baselineTotalPerMetricAndDimension.put(metricOrExpressionName, baselineTotalMap);
currentTotalPerMetricAndDimension.put(metricOrExpressionName, currentTotalMap);
for (String dimension : groupByDimensions) {
baselineTotalMap.put(dimension, 0d);
currentTotalMap.put(dimension, 0d);
}
}
List<Future<TimeOnTimeComparisonResponse>> timeOnTimeComparisonResponsesFutures = getTimeOnTimeComparisonResponses(groupByDimensions, comparisonRequest, handler);
for (int groupByDimensionId = 0; groupByDimensionId < groupByDimensions.size(); groupByDimensionId++) {
String groupByDimension = groupByDimensions.get(groupByDimensionId);
TimeOnTimeComparisonResponse response = timeOnTimeComparisonResponsesFutures.get(groupByDimensionId).get();
int numRows = response.getNumRows();
for (int i = 0; i < numRows; i++) {
Row row = response.getRow(i);
String dimensionValue = row.getDimensionValue();
Map<String, Metric> metricMap = new HashMap<>();
for (Metric metric : row.getMetrics()) {
metricMap.put(metric.getMetricName(), metric);
}
for (Metric metric : row.getMetrics()) {
String metricName = metric.getMetricName();
// update the baselineTotal and current total
Map<String, Double> baselineTotalMap = baselineTotalPerMetricAndDimension.get(metricName);
Map<String, Double> currentTotalMap = currentTotalPerMetricAndDimension.get(metricName);
baselineTotalMap.put(groupByDimension, baselineTotalMap.get(groupByDimension) + metric.getBaselineValue());
currentTotalMap.put(groupByDimension, currentTotalMap.get(groupByDimension) + metric.getCurrentValue());
if (!expressionNames.contains(metricName)) {
continue;
}
String dataKey = metricName + "." + groupByDimension;
HeatMap.Builder heatMapBuilder = data.get(dataKey);
if (heatMapBuilder == null) {
heatMapBuilder = new HeatMap.Builder(groupByDimension);
data.put(dataKey, heatMapBuilder);
}
MetricDataset metricDataset = new MetricDataset(metricName, comparisonRequest.getCollectionName());
MetricConfigDTO metricConfig = CACHE_REGISTRY.getMetricConfigCache().get(metricDataset);
if (StringUtils.isNotBlank(metricConfig.getCellSizeExpression())) {
String metricExpression = metricExpressions.get(metricName);
String[] tokens = metricExpression.split(RATIO_SEPARATOR);
String numerator = tokens[0];
String denominator = tokens[1];
Metric numeratorMetric = metricMap.get(numerator);
Metric denominatorMetric = metricMap.get(denominator);
Double numeratorBaseline = numeratorMetric == null ? 0 : numeratorMetric.getBaselineValue();
Double numeratorCurrent = numeratorMetric == null ? 0 : numeratorMetric.getCurrentValue();
Double denominatorBaseline = denominatorMetric == null ? 0 : denominatorMetric.getBaselineValue();
Double denominatorCurrent = denominatorMetric == null ? 0 : denominatorMetric.getCurrentValue();
Map<String, Double> context = new HashMap<>();
context.put(numerator, numeratorCurrent);
context.put(denominator, denominatorCurrent);
String cellSizeExpression = metricConfig.getCellSizeExpression();
Double cellSize = MetricExpression.evaluateExpression(cellSizeExpression, context);
heatMapBuilder.addCell(dimensionValue, metric.getBaselineValue(), metric.getCurrentValue(), cellSize, cellSizeExpression, numeratorBaseline, denominatorBaseline, numeratorCurrent, denominatorCurrent);
} else {
heatMapBuilder.addCell(dimensionValue, metric.getBaselineValue(), metric.getCurrentValue());
}
}
}
}
ResponseSchema schema = new ResponseSchema();
String[] columns = HeatMapCell.columns();
for (int i = 0; i < columns.length; i++) {
String column = columns[i];
schema.add(column, i);
}
Info summary = new Info();
Map<String, GenericResponse> heatMapViewResponseData = new HashMap<>();
for (MetricExpression expression : request.getMetricExpressions()) {
List<MetricFunction> metricFunctions = expression.computeMetricFunctions();
Double baselineTotal = baselineTotalPerMetricAndDimension.get(expression.getExpressionName()).values().iterator().next();
Double currentTotal = currentTotalPerMetricAndDimension.get(expression.getExpressionName()).values().iterator().next();
// check if its derived
if (metricFunctions.size() > 1) {
Map<String, Double> baselineContext = new HashMap<>();
Map<String, Double> currentContext = new HashMap<>();
for (String metricOrExpression : metricOrExpressionNames) {
baselineContext.put(metricOrExpression, baselineTotalPerMetricAndDimension.get(metricOrExpression).values().iterator().next());
currentContext.put(metricOrExpression, currentTotalPerMetricAndDimension.get(metricOrExpression).values().iterator().next());
}
baselineTotal = MetricExpression.evaluateExpression(expression, baselineContext);
currentTotal = MetricExpression.evaluateExpression(expression, currentContext);
} else {
baselineTotal = baselineTotalPerMetricAndDimension.get(expression.getExpressionName()).values().iterator().next();
currentTotal = currentTotalPerMetricAndDimension.get(expression.getExpressionName()).values().iterator().next();
}
summary.addSimpleField("baselineStart", Long.toString(comparisonRequest.getBaselineStart().getMillis()));
summary.addSimpleField("baselineEnd", Long.toString(comparisonRequest.getBaselineEnd().getMillis()));
summary.addSimpleField("currentStart", Long.toString(comparisonRequest.getCurrentStart().getMillis()));
summary.addSimpleField("currentEnd", Long.toString(comparisonRequest.getCurrentEnd().getMillis()));
summary.addSimpleField("baselineTotal", HeatMapCell.format(baselineTotal));
summary.addSimpleField("currentTotal", HeatMapCell.format(currentTotal));
summary.addSimpleField("deltaChange", HeatMapCell.format(currentTotal - baselineTotal));
summary.addSimpleField("deltaPercentage", HeatMapCell.format((currentTotal - baselineTotal) * 100.0 / baselineTotal));
}
for (Entry<String, HeatMap.Builder> entry : data.entrySet()) {
String dataKey = entry.getKey();
GenericResponse heatMapResponse = new GenericResponse();
List<String[]> heatMapResponseData = new ArrayList<>();
HeatMap.Builder builder = entry.getValue();
HeatMap heatMap = builder.build();
for (HeatMapCell cell : heatMap.heatMapCells) {
String[] newRowData = cell.toArray();
heatMapResponseData.add(newRowData);
}
heatMapResponse.setSchema(schema);
heatMapResponse.setResponseData(heatMapResponseData);
heatMapViewResponseData.put(dataKey, heatMapResponse);
}
HeatMapViewResponse heatMapViewResponse = new HeatMapViewResponse();
heatMapViewResponse.setMetrics(expressionNames);
heatMapViewResponse.setDimensions(groupByDimensions);
heatMapViewResponse.setData(heatMapViewResponseData);
heatMapViewResponse.setMetricExpression(metricExpressions);
heatMapViewResponse.setSummary(summary);
return heatMapViewResponse;
}
use of com.linkedin.thirdeye.client.cache.MetricDataset in project pinot by linkedin.
the class ThirdEyeUtils method getDerivedMetricExpression.
public static String getDerivedMetricExpression(String metricExpressionName, String dataset) throws ExecutionException {
String derivedMetricExpression = null;
MetricDataset metricDataset = new MetricDataset(metricExpressionName, dataset);
MetricConfigDTO metricConfig = CACHE_REGISTRY.getMetricConfigCache().get(metricDataset);
if (metricConfig.isDerived()) {
derivedMetricExpression = metricConfig.getDerivedMetricExpression();
} else {
derivedMetricExpression = MetricConfigBean.DERIVED_METRIC_ID_PREFIX + metricConfig.getId();
}
return derivedMetricExpression;
}
use of com.linkedin.thirdeye.client.cache.MetricDataset in project pinot by linkedin.
the class DataResource method getWowSummary.
@GET
@Path("dashboard/wowsummary")
public WowSummary getWowSummary(@QueryParam("dashboard") String dashboard, @QueryParam("timeRanges") String timeRanges) {
WowSummary wowSummary = new WowSummary();
if (StringUtils.isBlank(dashboard)) {
return wowSummary;
}
List<Long> metricIds = getMetricIdsByDashboard(dashboard);
List<String> timeRangeLabels = Lists.newArrayList(timeRanges.split(","));
// Sort metric's id and metric expression by collections
Multimap<String, Long> datasetToMetrics = ArrayListMultimap.create();
Multimap<String, MetricExpression> datasetToMetricExpressions = ArrayListMultimap.create();
Map<Long, MetricConfigDTO> metricIdToMetricConfig = new HashMap<>();
for (long metricId : metricIds) {
MetricConfigDTO metricConfig = metricConfigDAO.findById(metricId);
metricIdToMetricConfig.put(metricId, metricConfig);
datasetToMetrics.put(metricConfig.getDataset(), metricId);
datasetToMetricExpressions.put(metricConfig.getDataset(), ThirdEyeUtils.getMetricExpressionFromMetricConfig(metricConfig));
}
Multimap<String, MetricSummary> metricAliasToMetricSummariesMap = ArrayListMultimap.create();
// Create query request for each collection
for (String dataset : datasetToMetrics.keySet()) {
TabularViewRequest request = new TabularViewRequest();
request.setCollection(dataset);
request.setMetricExpressions(new ArrayList<>(datasetToMetricExpressions.get(dataset)));
// user and server's timezone, including daylight saving time.
for (String timeRangeLabel : timeRangeLabels) {
DateTimeZone timeZoneForCollection = Utils.getDataTimeZone(dataset);
TimeRange timeRange = getTimeRangeFromLabel(dataset, timeZoneForCollection, timeRangeLabel);
long currentEnd = timeRange.getEnd();
long currentStart = timeRange.getStart();
System.out.println(timeRangeLabel + "Current start end " + new DateTime(currentStart) + " " + new DateTime(currentEnd));
TimeGranularity timeGranularity = new TimeGranularity(1, TimeUnit.HOURS);
request.setBaselineStart(new DateTime(currentStart, timeZoneForCollection).minusDays(7));
request.setBaselineEnd(new DateTime(currentEnd, timeZoneForCollection).minusDays(7));
request.setCurrentStart(new DateTime(currentStart, timeZoneForCollection));
request.setCurrentEnd(new DateTime(currentEnd, timeZoneForCollection));
request.setTimeGranularity(timeGranularity);
TabularViewHandler handler = new TabularViewHandler(queryCache);
try {
TabularViewResponse tabularViewResponse = handler.process(request);
for (String metric : tabularViewResponse.getMetrics()) {
MetricDataset metricDataset = new MetricDataset(metric, dataset);
MetricConfigDTO metricConfig = CACHE_REGISTRY_INSTANCE.getMetricConfigCache().get(metricDataset);
Long metricId = metricConfig.getId();
String metricAlias = metricConfig.getAlias();
GenericResponse response = tabularViewResponse.getData().get(metric);
MetricSummary metricSummary = new MetricSummary();
metricSummary.setMetricId(metricId);
metricSummary.setMetricName(metricConfig.getName());
metricSummary.setMetricAlias(metricAlias);
List<String[]> data = response.getResponseData();
double baselineValue = 0;
double currentValue = 0;
for (String[] responseData : data) {
baselineValue = baselineValue + Double.valueOf(responseData[0]);
currentValue = currentValue + Double.valueOf(responseData[1]);
}
double percentageChange = (currentValue - baselineValue) * 100 / baselineValue;
metricSummary.setBaselineValue(baselineValue);
metricSummary.setCurrentValue(currentValue);
metricSummary.setWowPercentageChange(percentageChange);
metricAliasToMetricSummariesMap.put(metricAlias, metricSummary);
}
} catch (Exception e) {
LOG.error("Exception while processing /data/tabular call", e);
}
}
}
wowSummary.setMetricAliasToMetricSummariesMap(metricAliasToMetricSummariesMap);
return wowSummary;
}
use of com.linkedin.thirdeye.client.cache.MetricDataset in project pinot by linkedin.
the class DataResource method getMetricSummary.
/**
* Returns percentage change between current values and baseline values. The values are
* aggregated according to the number of buckets. If the bucket number is 1, then all values
* between the given time ranges are sorted to the corresponding bucket and aggregated.
*
* Note: For current implementation, we assume the number of buckets is always 1.
*/
@GET
@Path("dashboard/metricsummary")
public List<MetricSummary> getMetricSummary(@QueryParam("dashboard") String dashboard, @QueryParam("timeRange") String timeRange) {
List<MetricSummary> metricsSummary = new ArrayList<>();
if (StringUtils.isBlank(dashboard)) {
return metricsSummary;
}
List<Long> metricIds = getMetricIdsByDashboard(dashboard);
// Sort metric's id and metric expression by collections
Multimap<String, Long> datasetToMetrics = ArrayListMultimap.create();
Multimap<String, MetricExpression> datasetToMetricExpressions = ArrayListMultimap.create();
Map<Long, MetricConfigDTO> metricIdToMetricConfig = new HashMap<>();
for (long metricId : metricIds) {
MetricConfigDTO metricConfig = metricConfigDAO.findById(metricId);
metricIdToMetricConfig.put(metricId, metricConfig);
datasetToMetrics.put(metricConfig.getDataset(), metricId);
datasetToMetricExpressions.put(metricConfig.getDataset(), ThirdEyeUtils.getMetricExpressionFromMetricConfig(metricConfig));
}
// Create query request for each collection
for (String dataset : datasetToMetrics.keySet()) {
TabularViewRequest request = new TabularViewRequest();
request.setCollection(dataset);
request.setMetricExpressions(new ArrayList<>(datasetToMetricExpressions.get(dataset)));
// The input start and end time (i.e., currentStart, currentEnd, baselineStart, and
// baselineEnd) are given in millisecond since epoch, which is timezone insensitive. On the
// other hand, the start and end time of the request to be sent to backend database (e.g.,
// Pinot) could be converted to SimpleDateFormat, which is timezone sensitive. Therefore,
// we need to store user's start and end time in DateTime objects with data's timezone
// in order to ensure that the conversion to SimpleDateFormat is always correct regardless
// user and server's timezone, including daylight saving time.
String[] tokens = timeRange.split("_");
TimeGranularity timeGranularity = new TimeGranularity(Integer.valueOf(tokens[0]), TimeUnit.valueOf(tokens[1]));
long currentEnd = Utils.getMaxDataTimeForDataset(dataset);
long currentStart = currentEnd - TimeUnit.MILLISECONDS.convert(Long.valueOf(tokens[0]), TimeUnit.valueOf(tokens[1]));
DateTimeZone timeZoneForCollection = Utils.getDataTimeZone(dataset);
request.setBaselineStart(new DateTime(currentStart, timeZoneForCollection).minusDays(7));
request.setBaselineEnd(new DateTime(currentEnd, timeZoneForCollection).minusDays(7));
request.setCurrentStart(new DateTime(currentStart, timeZoneForCollection));
request.setCurrentEnd(new DateTime(currentEnd, timeZoneForCollection));
request.setTimeGranularity(timeGranularity);
TabularViewHandler handler = new TabularViewHandler(queryCache);
try {
TabularViewResponse tabularViewResponse = handler.process(request);
for (String metric : tabularViewResponse.getMetrics()) {
MetricDataset metricDataset = new MetricDataset(metric, dataset);
MetricConfigDTO metricConfig = CACHE_REGISTRY_INSTANCE.getMetricConfigCache().get(metricDataset);
Long metricId = metricConfig.getId();
GenericResponse response = tabularViewResponse.getData().get(metric);
MetricSummary metricSummary = new MetricSummary();
metricSummary.setMetricId(metricId);
metricSummary.setMetricName(metricConfig.getName());
metricSummary.setMetricAlias(metricConfig.getAlias());
String[] responseData = response.getResponseData().get(0);
double baselineValue = Double.valueOf(responseData[0]);
double curentvalue = Double.valueOf(responseData[1]);
double percentageChange = (curentvalue - baselineValue) * 100 / baselineValue;
metricSummary.setBaselineValue(baselineValue);
metricSummary.setCurrentValue(curentvalue);
metricSummary.setWowPercentageChange(percentageChange);
AnomaliesSummary anomaliesSummary = anomaliesResoure.getAnomalyCountForMetricInRange(metricId, currentStart, currentEnd);
metricSummary.setAnomaliesSummary(anomaliesSummary);
metricsSummary.add(metricSummary);
}
} catch (Exception e) {
LOG.error("Exception while processing /data/tabular call", e);
}
}
return metricsSummary;
}
Aggregations