use of org.joda.time.DateTimeZone in project druid by druid-io.
the class GroupByQueryRunnerTest method testGroupByWithTimeZone.
@Test
public void testGroupByWithTimeZone() {
DateTimeZone tz = DateTimeZone.forID("America/Los_Angeles");
GroupByQuery query = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource).setInterval("2011-03-31T00:00:00-07:00/2011-04-02T00:00:00-07:00").setDimensions(Lists.newArrayList((DimensionSpec) new DefaultDimensionSpec("quality", "alias"))).setAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index"))).setGranularity(new PeriodGranularity(new Period("P1D"), null, tz)).build();
List<Row> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow(new DateTime("2011-03-31", tz), "alias", "automotive", "rows", 1L, "idx", 135L), GroupByQueryRunnerTestHelper.createExpectedRow(new DateTime("2011-03-31", tz), "alias", "business", "rows", 1L, "idx", 118L), GroupByQueryRunnerTestHelper.createExpectedRow(new DateTime("2011-03-31", tz), "alias", "entertainment", "rows", 1L, "idx", 158L), GroupByQueryRunnerTestHelper.createExpectedRow(new DateTime("2011-03-31", tz), "alias", "health", "rows", 1L, "idx", 120L), GroupByQueryRunnerTestHelper.createExpectedRow(new DateTime("2011-03-31", tz), "alias", "mezzanine", "rows", 3L, "idx", 2870L), GroupByQueryRunnerTestHelper.createExpectedRow(new DateTime("2011-03-31", tz), "alias", "news", "rows", 1L, "idx", 121L), GroupByQueryRunnerTestHelper.createExpectedRow(new DateTime("2011-03-31", tz), "alias", "premium", "rows", 3L, "idx", 2900L), GroupByQueryRunnerTestHelper.createExpectedRow(new DateTime("2011-03-31", tz), "alias", "technology", "rows", 1L, "idx", 78L), GroupByQueryRunnerTestHelper.createExpectedRow(new DateTime("2011-03-31", tz), "alias", "travel", "rows", 1L, "idx", 119L), GroupByQueryRunnerTestHelper.createExpectedRow(new DateTime("2011-04-01", tz), "alias", "automotive", "rows", 1L, "idx", 147L), GroupByQueryRunnerTestHelper.createExpectedRow(new DateTime("2011-04-01", tz), "alias", "business", "rows", 1L, "idx", 112L), GroupByQueryRunnerTestHelper.createExpectedRow(new DateTime("2011-04-01", tz), "alias", "entertainment", "rows", 1L, "idx", 166L), GroupByQueryRunnerTestHelper.createExpectedRow(new DateTime("2011-04-01", tz), "alias", "health", "rows", 1L, "idx", 113L), GroupByQueryRunnerTestHelper.createExpectedRow(new DateTime("2011-04-01", tz), "alias", "mezzanine", "rows", 3L, "idx", 2447L), GroupByQueryRunnerTestHelper.createExpectedRow(new DateTime("2011-04-01", tz), "alias", "news", "rows", 1L, "idx", 114L), GroupByQueryRunnerTestHelper.createExpectedRow(new DateTime("2011-04-01", tz), "alias", "premium", "rows", 3L, "idx", 2505L), GroupByQueryRunnerTestHelper.createExpectedRow(new DateTime("2011-04-01", tz), "alias", "technology", "rows", 1L, "idx", 97L), GroupByQueryRunnerTestHelper.createExpectedRow(new DateTime("2011-04-01", tz), "alias", "travel", "rows", 1L, "idx", 126L));
Iterable<Row> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
TestHelper.assertExpectedObjects(expectedResults, results, "");
}
use of org.joda.time.DateTimeZone in project elasticsearch by elastic.
the class DateHistogramIT method testSingleValuedFieldWithTimeZone.
public void testSingleValuedFieldWithTimeZone() throws Exception {
SearchResponse response = client().prepareSearch("idx").addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(1).timeZone(DateTimeZone.forID("+01:00"))).execute().actionGet();
DateTimeZone tz = DateTimeZone.forID("+01:00");
assertSearchResponse(response);
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(6));
DateTime key = new DateTime(2012, 1, 1, 23, 0, DateTimeZone.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 2, 1, 23, 0, DateTimeZone.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 2, 14, 23, 0, DateTimeZone.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 3, 1, 23, 0, DateTimeZone.UTC);
bucket = buckets.get(3);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 3, 14, 23, 0, DateTimeZone.UTC);
bucket = buckets.get(4);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 3, 22, 23, 0, DateTimeZone.UTC);
bucket = buckets.get(5);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
}
use of org.joda.time.DateTimeZone in project elasticsearch by elastic.
the class DateHistogramIT method testSingleValued_timeZone_epoch.
public void testSingleValued_timeZone_epoch() throws Exception {
String format = randomBoolean() ? "epoch_millis" : "epoch_second";
int millisDivider = format.equals("epoch_millis") ? 1 : 1000;
if (randomBoolean()) {
format = format + "||date_optional_time";
}
DateTimeZone tz = DateTimeZone.forID("+01:00");
SearchResponse response = client().prepareSearch("idx").addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(1).timeZone(tz).format(format)).execute().actionGet();
assertSearchResponse(response);
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(6));
List<DateTime> expectedKeys = new ArrayList<>();
expectedKeys.add(new DateTime(2012, 1, 1, 23, 0, DateTimeZone.UTC));
expectedKeys.add(new DateTime(2012, 2, 1, 23, 0, DateTimeZone.UTC));
expectedKeys.add(new DateTime(2012, 2, 14, 23, 0, DateTimeZone.UTC));
expectedKeys.add(new DateTime(2012, 3, 1, 23, 0, DateTimeZone.UTC));
expectedKeys.add(new DateTime(2012, 3, 14, 23, 0, DateTimeZone.UTC));
expectedKeys.add(new DateTime(2012, 3, 22, 23, 0, DateTimeZone.UTC));
Iterator<DateTime> keyIterator = expectedKeys.iterator();
for (Histogram.Bucket bucket : buckets) {
assertThat(bucket, notNullValue());
DateTime expectedKey = keyIterator.next();
assertThat(bucket.getKeyAsString(), equalTo(Long.toString(expectedKey.getMillis() / millisDivider)));
assertThat(((DateTime) bucket.getKey()), equalTo(expectedKey));
assertThat(bucket.getDocCount(), equalTo(1L));
}
}
use of org.joda.time.DateTimeZone in project elasticsearch by elastic.
the class DateHistogramIT method testIssue6965.
public void testIssue6965() {
SearchResponse response = client().prepareSearch("idx").addAggregation(dateHistogram("histo").field("date").timeZone(DateTimeZone.forID("+01:00")).dateHistogramInterval(DateHistogramInterval.MONTH).minDocCount(0)).execute().actionGet();
assertSearchResponse(response);
DateTimeZone tz = DateTimeZone.forID("+01:00");
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(3));
DateTime key = new DateTime(2011, 12, 31, 23, 0, DateTimeZone.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 1, 31, 23, 0, DateTimeZone.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2L));
key = new DateTime(2012, 2, 29, 23, 0, DateTimeZone.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
}
use of org.joda.time.DateTimeZone in project elasticsearch by elastic.
the class DateHistogramIT method testSingleValueFieldWithExtendedBoundsTimezone.
/**
* Test date histogram aggregation with hour interval, timezone shift and
* extended bounds (see https://github.com/elastic/elasticsearch/issues/12278)
*/
public void testSingleValueFieldWithExtendedBoundsTimezone() throws Exception {
String index = "test12278";
prepareCreate(index).setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1).put("index.number_of_replicas", 0)).execute().actionGet();
DateMathParser parser = new DateMathParser(Joda.getStrictStandardDateFormatter());
// we pick a random timezone offset of +12/-12 hours and insert two documents
// one at 00:00 in that time zone and one at 12:00
List<IndexRequestBuilder> builders = new ArrayList<>();
int timeZoneHourOffset = randomIntBetween(-12, 12);
DateTimeZone timezone = DateTimeZone.forOffsetHours(timeZoneHourOffset);
DateTime timeZoneStartToday = new DateTime(parser.parse("now/d", System::currentTimeMillis, false, timezone), DateTimeZone.UTC);
DateTime timeZoneNoonToday = new DateTime(parser.parse("now/d+12h", System::currentTimeMillis, false, timezone), DateTimeZone.UTC);
builders.add(indexDoc(index, timeZoneStartToday, 1));
builders.add(indexDoc(index, timeZoneNoonToday, 2));
indexRandom(true, builders);
ensureSearchable(index);
SearchResponse response = null;
// retrieve those docs with the same time zone and extended bounds
response = client().prepareSearch(index).setQuery(QueryBuilders.rangeQuery("date").from("now/d").to("now/d").includeLower(true).includeUpper(true).timeZone(timezone.getID())).addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.hours(1)).timeZone(timezone).minDocCount(0).extendedBounds(new ExtendedBounds("now/d", "now/d+23h"))).execute().actionGet();
assertSearchResponse(response);
assertThat("Expected 24 buckets for one day aggregation with hourly interval", response.getHits().getTotalHits(), equalTo(2L));
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(24));
for (int i = 0; i < buckets.size(); i++) {
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat("InternalBucket " + i + " had wrong key", (DateTime) bucket.getKey(), equalTo(new DateTime(timeZoneStartToday.getMillis() + (i * 60 * 60 * 1000), DateTimeZone.UTC)));
if (i == 0 || i == 12) {
assertThat(bucket.getDocCount(), equalTo(1L));
} else {
assertThat(bucket.getDocCount(), equalTo(0L));
}
}
internalCluster().wipeIndices("test12278");
}
Aggregations