use of org.joda.time.DateTimeZone in project pinot by linkedin.
the class TimeSeriesResource method getContributorDataForDimension.
private TimeSeriesCompareMetricView getContributorDataForDimension(long metricId, long currentStart, long currentEnd, long baselineStart, long baselineEnd, String dimension, String filters, String granularity) {
MetricConfigDTO metricConfigDTO = metricConfigDAO.findById(metricId);
TimeSeriesCompareMetricView timeSeriesCompareMetricView = new TimeSeriesCompareMetricView(metricConfigDTO.getName(), metricId, currentStart, currentEnd);
try {
String dataset = metricConfigDTO.getDataset();
ContributorViewRequest request = new ContributorViewRequest();
request.setCollection(dataset);
MetricExpression metricExpression = ThirdEyeUtils.getMetricExpressionFromMetricConfig(metricConfigDTO);
request.setMetricExpressions(Arrays.asList(metricExpression));
DateTimeZone timeZoneForCollection = Utils.getDataTimeZone(dataset);
request.setBaselineStart(new DateTime(baselineStart, timeZoneForCollection));
request.setBaselineEnd(new DateTime(baselineEnd, timeZoneForCollection));
request.setCurrentStart(new DateTime(currentStart, timeZoneForCollection));
request.setCurrentEnd(new DateTime(currentEnd, timeZoneForCollection));
request.setTimeGranularity(Utils.getAggregationTimeGranularity(granularity, dataset));
if (filters != null && !filters.isEmpty()) {
filters = URLDecoder.decode(filters, "UTF-8");
request.setFilters(ThirdEyeUtils.convertToMultiMap(filters));
}
request.setGroupByDimensions(Arrays.asList(dimension));
ContributorViewHandler handler = new ContributorViewHandler(queryCache);
ContributorViewResponse response = handler.process(request);
// Assign the time buckets
List<Long> timeBucketsCurrent = new ArrayList<>();
List<Long> timeBucketsBaseline = new ArrayList<>();
timeSeriesCompareMetricView.setTimeBucketsCurrent(timeBucketsCurrent);
timeSeriesCompareMetricView.setTimeBucketsBaseline(timeBucketsBaseline);
Map<String, ValuesContainer> subDimensionValuesMap = new LinkedHashMap<>();
timeSeriesCompareMetricView.setSubDimensionContributionMap(subDimensionValuesMap);
int timeBuckets = response.getTimeBuckets().size();
// this is for over all values
ValuesContainer vw = new ValuesContainer();
subDimensionValuesMap.put(ALL, vw);
vw.setCurrentValues(new double[timeBuckets]);
vw.setBaselineValues(new double[timeBuckets]);
vw.setPercentageChange(new String[timeBuckets]);
vw.setCumulativeCurrentValues(new double[timeBuckets]);
vw.setCumulativeBaselineValues(new double[timeBuckets]);
vw.setCumulativePercentageChange(new String[timeBuckets]);
// lets find the indices
int subDimensionIndex = response.getResponseData().getSchema().getColumnsToIndexMapping().get("dimensionValue");
int currentValueIndex = response.getResponseData().getSchema().getColumnsToIndexMapping().get("currentValue");
int baselineValueIndex = response.getResponseData().getSchema().getColumnsToIndexMapping().get("baselineValue");
int percentageChangeIndex = response.getResponseData().getSchema().getColumnsToIndexMapping().get("percentageChange");
int cumCurrentValueIndex = response.getResponseData().getSchema().getColumnsToIndexMapping().get("cumulativeCurrentValue");
int cumBaselineValueIndex = response.getResponseData().getSchema().getColumnsToIndexMapping().get("cumulativeBaselineValue");
int cumPercentageChangeIndex = response.getResponseData().getSchema().getColumnsToIndexMapping().get("cumulativePercentageChange");
// populate current and baseline time buckets
for (int i = 0; i < timeBuckets; i++) {
TimeBucket tb = response.getTimeBuckets().get(i);
timeBucketsCurrent.add(tb.getCurrentStart());
timeBucketsBaseline.add(tb.getBaselineStart());
}
// set current and baseline values for sub dimensions
for (int i = 0; i < response.getResponseData().getResponseData().size(); i++) {
String[] data = response.getResponseData().getResponseData().get(i);
String subDimension = data[subDimensionIndex];
Double currentVal = Double.valueOf(data[currentValueIndex]);
Double baselineVal = Double.valueOf(data[baselineValueIndex]);
Double percentageChangeVal = Double.valueOf(data[percentageChangeIndex]);
Double cumCurrentVal = Double.valueOf(data[cumCurrentValueIndex]);
Double cumBaselineVal = Double.valueOf(data[cumBaselineValueIndex]);
Double cumPercentageChangeVal = Double.valueOf(data[cumPercentageChangeIndex]);
int index = i % timeBuckets;
// set overAll values
vw.getCurrentValues()[index] += currentVal;
vw.getBaselineValues()[index] += baselineVal;
vw.getCumulativeCurrentValues()[index] += cumCurrentVal;
vw.getCumulativeBaselineValues()[index] += cumBaselineVal;
// set individual sub-dimension values
if (!subDimensionValuesMap.containsKey(subDimension)) {
ValuesContainer subDimVals = new ValuesContainer();
subDimVals.setCurrentValues(new double[timeBuckets]);
subDimVals.setBaselineValues(new double[timeBuckets]);
subDimVals.setPercentageChange(new String[timeBuckets]);
subDimVals.setCumulativeCurrentValues(new double[timeBuckets]);
subDimVals.setCumulativeBaselineValues(new double[timeBuckets]);
subDimVals.setCumulativePercentageChange(new String[timeBuckets]);
subDimensionValuesMap.put(subDimension, subDimVals);
}
subDimensionValuesMap.get(subDimension).getCurrentValues()[index] = currentVal;
subDimensionValuesMap.get(subDimension).getBaselineValues()[index] = baselineVal;
subDimensionValuesMap.get(subDimension).getPercentageChange()[index] = String.format(DECIMAL_FORMAT, percentageChangeVal);
subDimensionValuesMap.get(subDimension).getCumulativeCurrentValues()[index] = cumCurrentVal;
subDimensionValuesMap.get(subDimension).getCumulativeBaselineValues()[index] = cumBaselineVal;
subDimensionValuesMap.get(subDimension).getCumulativePercentageChange()[index] = String.format(DECIMAL_FORMAT, cumPercentageChangeVal);
}
// TODO : compute cumulative values for all
for (int i = 0; i < vw.getCurrentValues().length; i++) {
vw.getPercentageChange()[i] = String.format(DECIMAL_FORMAT, getPercentageChange(vw.getCurrentValues()[i], vw.getBaselineValues()[i]));
vw.getCumulativePercentageChange()[i] = String.format(DECIMAL_FORMAT, getPercentageChange(vw.getCumulativeCurrentValues()[i], vw.getCumulativeBaselineValues()[i]));
}
} catch (Exception e) {
LOG.error(e.getMessage(), e);
throw new WebApplicationException(e);
}
return timeSeriesCompareMetricView;
}
use of org.joda.time.DateTimeZone in project pinot by linkedin.
the class TestDetectionJobSchedulerUtils method testGetNewEntriesForDetectionSchedulerDaily.
@Test
public void testGetNewEntriesForDetectionSchedulerDaily() throws Exception {
DatasetConfigDTO datasetConfig = new DatasetConfigDTO();
datasetConfig.setTimeColumn("Date");
datasetConfig.setTimeUnit(TimeUnit.DAYS);
datasetConfig.setTimeDuration(1);
DateTimeZone dateTimeZone = DateTimeZone.UTC;
AnomalyFunctionDTO anomalyFunction = new AnomalyFunctionDTO();
DateTimeFormatter dateTimeFormatter = DetectionJobSchedulerUtils.getDateTimeFormatterForDataset(datasetConfig, dateTimeZone);
String currentDateTimeString = "201702140337";
String currentDateTimeStringRounded = "20170214";
DateTime currentDateTime = minuteDateTimeFormatter.parseDateTime(currentDateTimeString);
DateTime currentDateTimeRounded = dateTimeFormatter.parseDateTime(currentDateTimeStringRounded);
DetectionStatusDTO lastEntryForFunction = null;
// null last entry
Map<String, Long> newEntries = DetectionJobSchedulerUtils.getNewEntries(currentDateTime, lastEntryForFunction, anomalyFunction, datasetConfig, dateTimeZone);
Assert.assertEquals(newEntries.size(), 1);
Assert.assertEquals(newEntries.get(currentDateTimeStringRounded), new Long(currentDateTimeRounded.getMillis()));
// last entry same as current time
lastEntryForFunction = new DetectionStatusDTO();
lastEntryForFunction.setDateToCheckInSDF(currentDateTimeStringRounded);
lastEntryForFunction.setDateToCheckInMS(currentDateTimeRounded.getMillis());
newEntries = DetectionJobSchedulerUtils.getNewEntries(currentDateTime, lastEntryForFunction, anomalyFunction, datasetConfig, dateTimeZone);
Assert.assertEquals(newEntries.size(), 0);
// last entry 1 day before current time
String lastEntryDateTimeString = "20170213";
DateTime lastEntryDateTime = dateTimeFormatter.parseDateTime(lastEntryDateTimeString);
lastEntryForFunction = new DetectionStatusDTO();
lastEntryForFunction.setDateToCheckInSDF(lastEntryDateTimeString);
lastEntryForFunction.setDateToCheckInMS(lastEntryDateTime.getMillis());
newEntries = DetectionJobSchedulerUtils.getNewEntries(currentDateTime, lastEntryForFunction, anomalyFunction, datasetConfig, dateTimeZone);
Assert.assertEquals(newEntries.size(), 1);
Assert.assertEquals(newEntries.get(currentDateTimeStringRounded), new Long(currentDateTimeRounded.getMillis()));
// last entry 3 days before current time
lastEntryDateTimeString = "20170211";
lastEntryDateTime = dateTimeFormatter.parseDateTime(lastEntryDateTimeString);
lastEntryForFunction = new DetectionStatusDTO();
lastEntryForFunction.setDateToCheckInSDF(lastEntryDateTimeString);
lastEntryForFunction.setDateToCheckInMS(lastEntryDateTime.getMillis());
newEntries = DetectionJobSchedulerUtils.getNewEntries(currentDateTime, lastEntryForFunction, anomalyFunction, datasetConfig, dateTimeZone);
Assert.assertEquals(newEntries.size(), 3);
Assert.assertNotNull(newEntries.get("20170212"));
Assert.assertNotNull(newEntries.get("20170213"));
Assert.assertNotNull(newEntries.get("20170214"));
Assert.assertEquals(newEntries.get(currentDateTimeStringRounded), new Long(currentDateTimeRounded.getMillis()));
}
use of org.joda.time.DateTimeZone in project pinot by linkedin.
the class DataCompletenessTaskUtilsTest method testGetDateTimeFormatterForDataset.
@Test
public void testGetDateTimeFormatterForDataset() {
DateTimeZone zone = DateTimeZone.UTC;
long dateTimeInMS = new DateTime(2017, 01, 12, 15, 30, zone).getMillis();
String columnName = "Date";
// DAYS bucket
TimeGranularity timeGranularity = new TimeGranularity(1, TimeUnit.DAYS);
String timeFormat = TimeSpec.SINCE_EPOCH_FORMAT;
TimeSpec timeSpec = new TimeSpec(columnName, timeGranularity, timeFormat);
DateTimeFormatter dateTimeFormatter = DataCompletenessTaskUtils.getDateTimeFormatterForDataset(timeSpec, zone);
Assert.assertEquals(dateTimeFormatter.print(dateTimeInMS), "20170112");
zone = DateTimeZone.forID("America/Los_Angeles");
long dateTimeInMS1 = new DateTime(2017, 01, 12, 05, 30, zone).getMillis();
// DAYS bucket
timeGranularity = new TimeGranularity(1, TimeUnit.DAYS);
timeSpec = new TimeSpec(columnName, timeGranularity, timeFormat);
dateTimeFormatter = DataCompletenessTaskUtils.getDateTimeFormatterForDataset(timeSpec, zone);
Assert.assertEquals(dateTimeFormatter.print(dateTimeInMS1), "20170112");
// HOURS bucket
zone = DateTimeZone.UTC;
dateTimeInMS = new DateTime(2017, 01, 12, 15, 30, zone).getMillis();
timeGranularity = new TimeGranularity(1, TimeUnit.HOURS);
timeSpec = new TimeSpec(columnName, timeGranularity, timeFormat);
dateTimeFormatter = DataCompletenessTaskUtils.getDateTimeFormatterForDataset(timeSpec, zone);
Assert.assertEquals(dateTimeFormatter.print(dateTimeInMS), "2017011215");
// MINUTES bucket
timeGranularity = new TimeGranularity(1, TimeUnit.MINUTES);
timeSpec = new TimeSpec(columnName, timeGranularity, timeFormat);
dateTimeFormatter = DataCompletenessTaskUtils.getDateTimeFormatterForDataset(timeSpec, zone);
Assert.assertEquals(dateTimeFormatter.print(dateTimeInMS), "201701121530");
// DEFAULT bucket
timeGranularity = new TimeGranularity(1, TimeUnit.MILLISECONDS);
timeSpec = new TimeSpec(columnName, timeGranularity, timeFormat);
dateTimeFormatter = DataCompletenessTaskUtils.getDateTimeFormatterForDataset(timeSpec, zone);
Assert.assertEquals(dateTimeFormatter.print(dateTimeInMS), "2017011215");
}
use of org.joda.time.DateTimeZone in project pinot by linkedin.
the class TestDetectionJobSchedulerUtils method testGetNewEntriesForDetectionSchedulerHourly.
@Test
public void testGetNewEntriesForDetectionSchedulerHourly() throws Exception {
DatasetConfigDTO datasetConfig = new DatasetConfigDTO();
datasetConfig.setTimeColumn("Date");
datasetConfig.setTimeUnit(TimeUnit.HOURS);
datasetConfig.setTimeDuration(1);
DateTimeZone dateTimeZone = DateTimeZone.UTC;
AnomalyFunctionDTO anomalyFunction = new AnomalyFunctionDTO();
DateTimeFormatter dateTimeFormatter = DetectionJobSchedulerUtils.getDateTimeFormatterForDataset(datasetConfig, dateTimeZone);
String currentDateTimeString = "201702140336";
String currentDateTimeStringRounded = "2017021403";
DateTime currentDateTime = minuteDateTimeFormatter.parseDateTime(currentDateTimeString);
DateTime currentDateTimeRounded = dateTimeFormatter.parseDateTime(currentDateTimeStringRounded);
DetectionStatusDTO lastEntryForFunction = null;
// null last entry
Map<String, Long> newEntries = DetectionJobSchedulerUtils.getNewEntries(currentDateTime, lastEntryForFunction, anomalyFunction, datasetConfig, dateTimeZone);
Assert.assertEquals(newEntries.size(), 1);
Assert.assertEquals(newEntries.get(currentDateTimeStringRounded), new Long(currentDateTimeRounded.getMillis()));
// last entry same as current time
lastEntryForFunction = new DetectionStatusDTO();
lastEntryForFunction.setDateToCheckInSDF(currentDateTimeStringRounded);
lastEntryForFunction.setDateToCheckInMS(currentDateTimeRounded.getMillis());
newEntries = DetectionJobSchedulerUtils.getNewEntries(currentDateTime, lastEntryForFunction, anomalyFunction, datasetConfig, dateTimeZone);
Assert.assertEquals(newEntries.size(), 0);
// last entry 1 hour before current time
String lastEntryDateTimeString = "2017021402";
DateTime lastEntryDateTime = dateTimeFormatter.parseDateTime(lastEntryDateTimeString);
lastEntryForFunction = new DetectionStatusDTO();
lastEntryForFunction.setDateToCheckInSDF(lastEntryDateTimeString);
lastEntryForFunction.setDateToCheckInMS(lastEntryDateTime.getMillis());
newEntries = DetectionJobSchedulerUtils.getNewEntries(currentDateTime, lastEntryForFunction, anomalyFunction, datasetConfig, dateTimeZone);
Assert.assertEquals(newEntries.size(), 1);
Assert.assertEquals(newEntries.get(currentDateTimeStringRounded), new Long(currentDateTimeRounded.getMillis()));
// last entry 3 hours before current time
lastEntryDateTimeString = "2017021400";
lastEntryDateTime = dateTimeFormatter.parseDateTime(lastEntryDateTimeString);
lastEntryForFunction = new DetectionStatusDTO();
lastEntryForFunction.setDateToCheckInSDF(lastEntryDateTimeString);
lastEntryForFunction.setDateToCheckInMS(lastEntryDateTime.getMillis());
newEntries = DetectionJobSchedulerUtils.getNewEntries(currentDateTime, lastEntryForFunction, anomalyFunction, datasetConfig, dateTimeZone);
Assert.assertEquals(newEntries.size(), 3);
Assert.assertNotNull(newEntries.get("2017021401"));
Assert.assertNotNull(newEntries.get("2017021402"));
Assert.assertNotNull(newEntries.get("2017021403"));
Assert.assertEquals(newEntries.get(currentDateTimeStringRounded), new Long(currentDateTimeRounded.getMillis()));
}
use of org.joda.time.DateTimeZone in project pinot by linkedin.
the class FetchMetricDataAndExistingAnomaliesTool method fetchMetric.
/**
* Fetch metric from thirdeye
* @param host host name (includes http://)
* @param port port number
* @param dataset dataset/collection name
* @param metric metric name
* @param startTime start time of requested data in DateTime
* @param endTime end time of requested data in DateTime
* @param timeGranularity the time granularity
* @param dimensions the list of dimensions
* @param filterJson filters, in JSON
* @return {dimension-> {DateTime: value}}
* @throws IOException
*/
public Map<String, Map<Long, String>> fetchMetric(String host, int port, String dataset, String metric, DateTime startTime, DateTime endTime, TimeGranularity timeGranularity, String dimensions, String filterJson, String timezone) throws IOException {
HttpClient client = HttpClientBuilder.create().build();
DateTimeZone dateTimeZone = DateTimeZone.forID(timezone);
startTime = new DateTime(startTime, dateTimeZone);
endTime = new DateTime(endTime, dateTimeZone);
// format http GET command
StringBuilder urlBuilder = new StringBuilder(host + ":" + port + DEFAULT_PATH_TO_TIMESERIES);
urlBuilder.append(DATASET + EQUALS + dataset + AND);
urlBuilder.append(METRIC + EQUALS + metric + AND);
urlBuilder.append(VIEW + EQUALS + DEFAULT_VIEW + AND);
urlBuilder.append(TIME_START + EQUALS + Long.toString(startTime.getMillis()) + AND);
urlBuilder.append(TIME_END + EQUALS + Long.toString(endTime.getMillis()) + AND);
urlBuilder.append(GRANULARITY + EQUALS + timeGranularity.toString() + AND);
if (dimensions != null || !dimensions.isEmpty()) {
urlBuilder.append(DIMENSIONS + EQUALS + dimensions + AND);
}
if (filterJson != null || !filterJson.isEmpty()) {
urlBuilder.append(FILTERS + EQUALS + URLEncoder.encode(filterJson, "UTF-8"));
}
HttpGet httpGet = new HttpGet(urlBuilder.toString());
// Execute GET command
httpGet.addHeader("User-Agent", "User");
HttpResponse response = client.execute(httpGet);
LOG.info("Response Code : {}", response.getStatusLine().getStatusCode());
BufferedReader rd = new BufferedReader(new InputStreamReader(response.getEntity().getContent()));
StringBuffer content = new StringBuffer();
String line = "";
while ((line = rd.readLine()) != null) {
content.append(line);
}
Map<String, Map<Long, String>> resultMap = null;
try {
JSONObject jsonObject = new JSONObject(content.toString());
JSONObject timeSeriesData = (JSONObject) jsonObject.get("timeSeriesData");
JSONArray timeArray = (JSONArray) timeSeriesData.get("time");
resultMap = new HashMap<>();
Iterator<String> timeSeriesDataIterator = timeSeriesData.keys();
while (timeSeriesDataIterator.hasNext()) {
String key = timeSeriesDataIterator.next();
if (key.equalsIgnoreCase("time")) {
continue;
}
Map<Long, String> entry = new HashMap<>();
JSONArray observed = (JSONArray) timeSeriesData.get(key);
for (int i = 0; i < timeArray.length(); i++) {
long timestamp = (long) timeArray.get(i);
String observedValue = observed.get(i).toString();
entry.put(timestamp, observedValue);
}
resultMap.put(key, entry);
}
} catch (JSONException e) {
LOG.error("Unable to resolve JSON string {}", e);
}
return resultMap;
}
Aggregations