use of org.joda.time.format.DateTimeFormatter in project pinot by linkedin.
the class DetectionJobRunner method alignTimestampsToDataTimezone.
private DateTime alignTimestampsToDataTimezone(DateTime inputDateTime, String collection) {
try {
DatasetConfigDTO datasetConfig = DAO_REGISTRY.getDatasetConfigDAO().findByDataset(collection);
TimeSpec timespec = ThirdEyeUtils.getTimeSpecFromDatasetConfig(datasetConfig);
TimeGranularity dataGranularity = timespec.getDataGranularity();
String timeFormat = timespec.getFormat();
if (dataGranularity.getUnit().equals(TimeUnit.DAYS)) {
DateTimeZone dataTimeZone = Utils.getDataTimeZone(collection);
DateTimeFormatter inputDataDateTimeFormatter = DateTimeFormat.forPattern(timeFormat).withZone(dataTimeZone);
long inputMillis = inputDateTime.getMillis();
String inputDateTimeString = inputDataDateTimeFormatter.print(inputMillis);
long timeZoneOffsetMillis = inputDataDateTimeFormatter.parseMillis(inputDateTimeString);
inputDateTime = new DateTime(timeZoneOffsetMillis);
}
} catch (Exception e) {
LOG.error("Exception in aligning timestamp to data time zone", e);
}
return inputDateTime;
}
use of org.joda.time.format.DateTimeFormatter in project pinot by linkedin.
the class DetectionJobSchedulerUtils method getDateTimeFormatterForDataset.
/**
* Get date time formatter according to granularity of dataset
* This is to store the date in the db, in the correct SDF
* @param timeSpec
* @return
*/
public static DateTimeFormatter getDateTimeFormatterForDataset(DatasetConfigDTO datasetConfig, DateTimeZone dateTimeZone) {
String pattern = null;
TimeSpec timeSpec = ThirdEyeUtils.getTimeSpecFromDatasetConfig(datasetConfig);
TimeUnit unit = timeSpec.getDataGranularity().getUnit();
switch(unit) {
case DAYS:
pattern = DAY_FORMAT;
break;
case MINUTES:
case SECONDS:
case MILLISECONDS:
pattern = MINUTE_FORMAT;
break;
case HOURS:
default:
pattern = HOUR_FORMAT;
break;
}
DateTimeFormatter dateTimeFormatter = DateTimeFormat.forPattern(pattern).withZone(dateTimeZone);
return dateTimeFormatter;
}
use of org.joda.time.format.DateTimeFormatter in project pinot by linkedin.
the class DetectionJobSchedulerUtils method getNewEntries.
/**
* Create new entries from last entry to current time,
* according to time granularity of dataset in case of HOURLY/DAILY,
* and according to time granularity of function frequency in case of MINUTE level data
* If it is an HOURLY dataset, run detection for every HOUR
* If it is a DAILY dataset, run detection for every DAY
* If it is an n MINUTE level dataset, run detection for every bucket, determined by the frequency field in anomaly function
*
* @param currentDateTime
* @param lastEntryForFunction
* @param anomalyFunction
* @param datasetConfig
* @param dateTimeZone
* @return
*/
public static Map<String, Long> getNewEntries(DateTime currentDateTime, DetectionStatusDTO lastEntryForFunction, AnomalyFunctionDTO anomalyFunction, DatasetConfigDTO datasetConfig, DateTimeZone dateTimeZone) {
Map<String, Long> newEntries = new LinkedHashMap<>();
// get current hour/day, depending on granularity of dataset,
DateTimeFormatter dateTimeFormatterForDataset = DetectionJobSchedulerUtils.getDateTimeFormatterForDataset(datasetConfig, dateTimeZone);
long alignedCurrentMillis = DetectionJobSchedulerUtils.getBoundaryAlignedTimeForDataset(datasetConfig, currentDateTime, anomalyFunction);
// if first ever entry, create it with current time
if (lastEntryForFunction == null) {
String currentDateString = dateTimeFormatterForDataset.print(alignedCurrentMillis);
newEntries.put(currentDateString, dateTimeFormatterForDataset.parseMillis(currentDateString));
} else {
// else create all entries from last entry onwards to current time
long lastMillis = lastEntryForFunction.getDateToCheckInMS();
long bucketSize = DetectionJobSchedulerUtils.getBucketSizeInMSForDataset(datasetConfig, anomalyFunction);
while (lastMillis < alignedCurrentMillis) {
lastMillis = lastMillis + bucketSize;
newEntries.put(dateTimeFormatterForDataset.print(lastMillis), lastMillis);
}
}
return newEntries;
}
use of org.joda.time.format.DateTimeFormatter in project pinot by linkedin.
the class PinotThirdEyeClient method parseResultSets.
private List<String[]> parseResultSets(ThirdEyeRequest request, List<ResultSet> resultSets, List<MetricFunction> metricFunctions, List<String> dimensionNames, DatasetConfigDTO datasetConfig) throws ExecutionException {
int numGroupByKeys = 0;
boolean hasGroupBy = false;
if (request.getGroupByTimeGranularity() != null) {
numGroupByKeys += 1;
}
if (request.getGroupBy() != null) {
numGroupByKeys += request.getGroupBy().size();
}
if (numGroupByKeys > 0) {
hasGroupBy = true;
}
int numMetrics = request.getMetricFunctions().size();
int numCols = numGroupByKeys + numMetrics;
boolean hasGroupByTime = false;
String collection = datasetConfig.getDataset();
TimeGranularity dataGranularity = null;
long startTime = request.getStartTimeInclusive().getMillis();
DateTimeZone dateTimeZone = Utils.getDataTimeZone(collection);
DateTime startDateTime = new DateTime(startTime, dateTimeZone);
TimeSpec timespec = ThirdEyeUtils.getTimeSpecFromDatasetConfig(datasetConfig);
dataGranularity = timespec.getDataGranularity();
boolean isISOFormat = false;
DateTimeFormatter inputDataDateTimeFormatter = null;
String timeFormat = timespec.getFormat();
if (timeFormat != null && !timeFormat.equals(TimeSpec.SINCE_EPOCH_FORMAT)) {
isISOFormat = true;
inputDataDateTimeFormatter = DateTimeFormat.forPattern(timeFormat).withZone(dateTimeZone);
}
if (request.getGroupByTimeGranularity() != null) {
hasGroupByTime = true;
}
LinkedHashMap<String, String[]> dataMap = new LinkedHashMap<>();
for (int i = 0; i < resultSets.size(); i++) {
ResultSet resultSet = resultSets.get(i);
int numRows = resultSet.getRowCount();
for (int r = 0; r < numRows; r++) {
boolean skipRowDueToError = false;
String[] groupKeys;
if (hasGroupBy) {
groupKeys = new String[resultSet.getGroupKeyLength()];
for (int grpKeyIdx = 0; grpKeyIdx < resultSet.getGroupKeyLength(); grpKeyIdx++) {
String groupKeyVal = "";
try {
groupKeyVal = resultSet.getGroupKeyString(r, grpKeyIdx);
} catch (Exception e) {
// IGNORE FOR NOW, workaround for Pinot Bug
}
if (hasGroupByTime && grpKeyIdx == 0) {
int timeBucket;
long millis;
if (!isISOFormat) {
millis = dataGranularity.toMillis(Double.valueOf(groupKeyVal).longValue());
} else {
millis = DateTime.parse(groupKeyVal, inputDataDateTimeFormatter).getMillis();
}
if (millis < startTime) {
LOG.error("Data point earlier than requested start time {}: {}", new Date(startTime), new Date(millis));
skipRowDueToError = true;
break;
}
timeBucket = TimeRangeUtils.computeBucketIndex(request.getGroupByTimeGranularity(), startDateTime, new DateTime(millis, dateTimeZone));
groupKeyVal = String.valueOf(timeBucket);
}
groupKeys[grpKeyIdx] = groupKeyVal;
}
if (skipRowDueToError) {
continue;
}
} else {
groupKeys = new String[] {};
}
StringBuilder groupKeyBuilder = new StringBuilder("");
for (String grpKey : groupKeys) {
groupKeyBuilder.append(grpKey).append("|");
}
String compositeGroupKey = groupKeyBuilder.toString();
String[] rowValues = dataMap.get(compositeGroupKey);
if (rowValues == null) {
rowValues = new String[numCols];
Arrays.fill(rowValues, "0");
System.arraycopy(groupKeys, 0, rowValues, 0, groupKeys.length);
dataMap.put(compositeGroupKey, rowValues);
}
rowValues[groupKeys.length + i] = String.valueOf(Double.parseDouble(rowValues[groupKeys.length + i]) + Double.parseDouble(resultSet.getString(r, 0)));
}
}
List<String[]> rows = new ArrayList<>();
rows.addAll(dataMap.values());
return rows;
}
use of org.joda.time.format.DateTimeFormatter in project pinot by linkedin.
the class PqlUtils method getBetweenClause.
static String getBetweenClause(DateTime start, DateTime endExclusive, TimeSpec timeFieldSpec, String collection) throws ExecutionException {
TimeGranularity dataGranularity = timeFieldSpec.getDataGranularity();
long startMillis = start.getMillis();
long endMillis = endExclusive.getMillis();
long dataGranularityMillis = dataGranularity.toMillis();
String timeField = timeFieldSpec.getColumnName();
String timeFormat = timeFieldSpec.getFormat();
if (timeFormat == null || TimeSpec.SINCE_EPOCH_FORMAT.equals(timeFormat)) {
// Shrink start and end as per data granularity
long startAlignmentDelta = startMillis % dataGranularityMillis;
if (startAlignmentDelta != 0) {
long startMillisAligned = startMillis + dataGranularityMillis - startAlignmentDelta;
start = new DateTime(startMillisAligned);
}
long endAligmentDelta = endMillis % dataGranularityMillis;
if (endAligmentDelta != 0) {
long endMillisAligned = endMillis - endAligmentDelta;
endExclusive = new DateTime(endMillisAligned);
}
}
String startQueryTime;
String endQueryTimeExclusive;
if (timeFormat == null || TimeSpec.SINCE_EPOCH_FORMAT.equals(timeFormat)) {
long startInConvertedUnits = dataGranularity.convertToUnit(start.getMillis());
long endInConvertedUnits = dataGranularity.convertToUnit(endExclusive.getMillis());
startQueryTime = String.valueOf(startInConvertedUnits);
endQueryTimeExclusive = (endInConvertedUnits == startInConvertedUnits + 1) ? startQueryTime : String.valueOf(endInConvertedUnits);
} else {
DateTimeFormatter inputDataDateTimeFormatter = DateTimeFormat.forPattern(timeFormat).withZone(Utils.getDataTimeZone(collection));
startQueryTime = inputDataDateTimeFormatter.print(start);
endQueryTimeExclusive = inputDataDateTimeFormatter.print(endExclusive);
}
if (startQueryTime.equals(endQueryTimeExclusive)) {
return String.format(" %s = %s", timeField, startQueryTime);
} else {
return String.format(" %s >= %s AND %s < %s", timeField, startQueryTime, timeField, endQueryTimeExclusive);
}
}
Aggregations