use of com.linkedin.thirdeye.api.TimeSpec in project pinot by linkedin.
the class DetectionJobRunner method alignTimestampsToDataTimezone.
private DateTime alignTimestampsToDataTimezone(DateTime inputDateTime, String collection) {
try {
DatasetConfigDTO datasetConfig = DAO_REGISTRY.getDatasetConfigDAO().findByDataset(collection);
TimeSpec timespec = ThirdEyeUtils.getTimeSpecFromDatasetConfig(datasetConfig);
TimeGranularity dataGranularity = timespec.getDataGranularity();
String timeFormat = timespec.getFormat();
if (dataGranularity.getUnit().equals(TimeUnit.DAYS)) {
DateTimeZone dataTimeZone = Utils.getDataTimeZone(collection);
DateTimeFormatter inputDataDateTimeFormatter = DateTimeFormat.forPattern(timeFormat).withZone(dataTimeZone);
long inputMillis = inputDateTime.getMillis();
String inputDateTimeString = inputDataDateTimeFormatter.print(inputMillis);
long timeZoneOffsetMillis = inputDataDateTimeFormatter.parseMillis(inputDateTimeString);
inputDateTime = new DateTime(timeZoneOffsetMillis);
}
} catch (Exception e) {
LOG.error("Exception in aligning timestamp to data time zone", e);
}
return inputDateTime;
}
use of com.linkedin.thirdeye.api.TimeSpec in project pinot by linkedin.
the class DetectionJobSchedulerUtils method getDateTimeFormatterForDataset.
/**
* Get date time formatter according to granularity of dataset
* This is to store the date in the db, in the correct SDF
* @param timeSpec
* @return
*/
public static DateTimeFormatter getDateTimeFormatterForDataset(DatasetConfigDTO datasetConfig, DateTimeZone dateTimeZone) {
String pattern = null;
TimeSpec timeSpec = ThirdEyeUtils.getTimeSpecFromDatasetConfig(datasetConfig);
TimeUnit unit = timeSpec.getDataGranularity().getUnit();
switch(unit) {
case DAYS:
pattern = DAY_FORMAT;
break;
case MINUTES:
case SECONDS:
case MILLISECONDS:
pattern = MINUTE_FORMAT;
break;
case HOURS:
default:
pattern = HOUR_FORMAT;
break;
}
DateTimeFormatter dateTimeFormatter = DateTimeFormat.forPattern(pattern).withZone(dateTimeZone);
return dateTimeFormatter;
}
use of com.linkedin.thirdeye.api.TimeSpec in project pinot by linkedin.
the class DetectionJobSchedulerUtils method getExpectedCompleteBuckets.
/**
* Calculates the number of buckets that a time period can be divided into, depending on the dataset and function frequency
* @param datasetConfig
* @param startTime
* @param endTime
* @return
*/
public static long getExpectedCompleteBuckets(DatasetConfigDTO datasetConfig, long startTime, long endTime) {
TimeSpec timeSpec = ThirdEyeUtils.getTimeSpecFromDatasetConfig(datasetConfig);
// Get this from DataCompletenessUtils because that determines number of buckets to check
long bucketSize = DataCompletenessTaskUtils.getBucketSizeInMSForDataset(timeSpec);
long numBuckets = (endTime - startTime) / bucketSize;
return numBuckets;
}
use of com.linkedin.thirdeye.api.TimeSpec in project pinot by linkedin.
the class PinotThirdEyeClient method parseResultSets.
private List<String[]> parseResultSets(ThirdEyeRequest request, List<ResultSet> resultSets, List<MetricFunction> metricFunctions, List<String> dimensionNames, DatasetConfigDTO datasetConfig) throws ExecutionException {
int numGroupByKeys = 0;
boolean hasGroupBy = false;
if (request.getGroupByTimeGranularity() != null) {
numGroupByKeys += 1;
}
if (request.getGroupBy() != null) {
numGroupByKeys += request.getGroupBy().size();
}
if (numGroupByKeys > 0) {
hasGroupBy = true;
}
int numMetrics = request.getMetricFunctions().size();
int numCols = numGroupByKeys + numMetrics;
boolean hasGroupByTime = false;
String collection = datasetConfig.getDataset();
TimeGranularity dataGranularity = null;
long startTime = request.getStartTimeInclusive().getMillis();
DateTimeZone dateTimeZone = Utils.getDataTimeZone(collection);
DateTime startDateTime = new DateTime(startTime, dateTimeZone);
TimeSpec timespec = ThirdEyeUtils.getTimeSpecFromDatasetConfig(datasetConfig);
dataGranularity = timespec.getDataGranularity();
boolean isISOFormat = false;
DateTimeFormatter inputDataDateTimeFormatter = null;
String timeFormat = timespec.getFormat();
if (timeFormat != null && !timeFormat.equals(TimeSpec.SINCE_EPOCH_FORMAT)) {
isISOFormat = true;
inputDataDateTimeFormatter = DateTimeFormat.forPattern(timeFormat).withZone(dateTimeZone);
}
if (request.getGroupByTimeGranularity() != null) {
hasGroupByTime = true;
}
LinkedHashMap<String, String[]> dataMap = new LinkedHashMap<>();
for (int i = 0; i < resultSets.size(); i++) {
ResultSet resultSet = resultSets.get(i);
int numRows = resultSet.getRowCount();
for (int r = 0; r < numRows; r++) {
boolean skipRowDueToError = false;
String[] groupKeys;
if (hasGroupBy) {
groupKeys = new String[resultSet.getGroupKeyLength()];
for (int grpKeyIdx = 0; grpKeyIdx < resultSet.getGroupKeyLength(); grpKeyIdx++) {
String groupKeyVal = "";
try {
groupKeyVal = resultSet.getGroupKeyString(r, grpKeyIdx);
} catch (Exception e) {
// IGNORE FOR NOW, workaround for Pinot Bug
}
if (hasGroupByTime && grpKeyIdx == 0) {
int timeBucket;
long millis;
if (!isISOFormat) {
millis = dataGranularity.toMillis(Double.valueOf(groupKeyVal).longValue());
} else {
millis = DateTime.parse(groupKeyVal, inputDataDateTimeFormatter).getMillis();
}
if (millis < startTime) {
LOG.error("Data point earlier than requested start time {}: {}", new Date(startTime), new Date(millis));
skipRowDueToError = true;
break;
}
timeBucket = TimeRangeUtils.computeBucketIndex(request.getGroupByTimeGranularity(), startDateTime, new DateTime(millis, dateTimeZone));
groupKeyVal = String.valueOf(timeBucket);
}
groupKeys[grpKeyIdx] = groupKeyVal;
}
if (skipRowDueToError) {
continue;
}
} else {
groupKeys = new String[] {};
}
StringBuilder groupKeyBuilder = new StringBuilder("");
for (String grpKey : groupKeys) {
groupKeyBuilder.append(grpKey).append("|");
}
String compositeGroupKey = groupKeyBuilder.toString();
String[] rowValues = dataMap.get(compositeGroupKey);
if (rowValues == null) {
rowValues = new String[numCols];
Arrays.fill(rowValues, "0");
System.arraycopy(groupKeys, 0, rowValues, 0, groupKeys.length);
dataMap.put(compositeGroupKey, rowValues);
}
rowValues[groupKeys.length + i] = String.valueOf(Double.parseDouble(rowValues[groupKeys.length + i]) + Double.parseDouble(resultSet.getString(r, 0)));
}
}
List<String[]> rows = new ArrayList<>();
rows.addAll(dataMap.values());
return rows;
}
use of com.linkedin.thirdeye.api.TimeSpec in project pinot by linkedin.
the class AnomaliesResource method getTimeSeriesData.
/**
* Get timeseries for metric
* @param collection
* @param filters
* @param start
* @param end
* @param aggTimeGranularity
* @param metric
* @return
* @throws Exception
*/
private JSONObject getTimeSeriesData(String collection, Multimap<String, String> filters, Long start, Long end, String aggTimeGranularity, String metric) throws Exception {
TimeSeriesRequest request = new TimeSeriesRequest();
request.setCollectionName(collection);
DateTimeZone timeZoneForCollection = Utils.getDataTimeZone(collection);
request.setStart(new DateTime(start, timeZoneForCollection));
request.setEnd(new DateTime(end, timeZoneForCollection));
request.setFilterSet(filters);
List<MetricExpression> metricExpressions = Utils.convertToMetricExpressions(metric, MetricAggFunction.SUM, collection);
request.setMetricExpressions(metricExpressions);
request.setAggregationTimeGranularity(Utils.getAggregationTimeGranularity(aggTimeGranularity, collection));
DatasetConfigDTO datasetConfig = CACHE_REGISTRY.getDatasetConfigCache().get(collection);
TimeSpec timespec = ThirdEyeUtils.getTimeSpecFromDatasetConfig(datasetConfig);
if (!request.getAggregationTimeGranularity().getUnit().equals(TimeUnit.DAYS) || !StringUtils.isBlank(timespec.getFormat())) {
request.setEndDateInclusive(true);
}
TimeSeriesHandler handler = new TimeSeriesHandler(CACHE_REGISTRY.getQueryCache());
JSONObject jsonResponseObject = new JSONObject();
try {
TimeSeriesResponse response = handler.handle(request);
JSONObject timeseriesMap = new JSONObject();
JSONArray timeValueArray = new JSONArray();
TreeSet<String> keys = new TreeSet<>();
TreeSet<Long> times = new TreeSet<>();
for (int i = 0; i < response.getNumRows(); i++) {
TimeSeriesRow timeSeriesRow = response.getRow(i);
times.add(timeSeriesRow.getStart());
}
for (Long time : times) {
timeValueArray.put(time);
}
timeseriesMap.put("time", timeValueArray);
for (int i = 0; i < response.getNumRows(); i++) {
TimeSeriesRow timeSeriesRow = response.getRow(i);
for (TimeSeriesMetric metricTimeSeries : timeSeriesRow.getMetrics()) {
String key = metricTimeSeries.getMetricName();
JSONArray valueArray;
if (!timeseriesMap.has(key)) {
valueArray = new JSONArray();
timeseriesMap.put(key, valueArray);
keys.add(key);
} else {
valueArray = timeseriesMap.getJSONArray(key);
}
valueArray.put(metricTimeSeries.getValue());
}
}
JSONObject summaryMap = new JSONObject();
summaryMap.put("currentStart", start);
summaryMap.put("currentEnd", end);
jsonResponseObject.put("timeSeriesData", timeseriesMap);
jsonResponseObject.put("keys", new JSONArray(keys));
jsonResponseObject.put("summary", summaryMap);
} catch (Exception e) {
throw e;
}
LOG.info("Response:{}", jsonResponseObject);
return jsonResponseObject;
}
Aggregations