use of org.joda.time.DateTimeZone in project elasticsearch by elastic.
the class DateHistogramIT method testSingleValueFieldWithExtendedBoundsTimezone.
/**
* Test date histogram aggregation with hour interval, timezone shift and
* extended bounds (see https://github.com/elastic/elasticsearch/issues/12278)
*/
public void testSingleValueFieldWithExtendedBoundsTimezone() throws Exception {
String index = "test12278";
prepareCreate(index).setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1).put("index.number_of_replicas", 0)).execute().actionGet();
DateMathParser parser = new DateMathParser(Joda.getStrictStandardDateFormatter());
// we pick a random timezone offset of +12/-12 hours and insert two documents
// one at 00:00 in that time zone and one at 12:00
List<IndexRequestBuilder> builders = new ArrayList<>();
int timeZoneHourOffset = randomIntBetween(-12, 12);
DateTimeZone timezone = DateTimeZone.forOffsetHours(timeZoneHourOffset);
DateTime timeZoneStartToday = new DateTime(parser.parse("now/d", System::currentTimeMillis, false, timezone), DateTimeZone.UTC);
DateTime timeZoneNoonToday = new DateTime(parser.parse("now/d+12h", System::currentTimeMillis, false, timezone), DateTimeZone.UTC);
builders.add(indexDoc(index, timeZoneStartToday, 1));
builders.add(indexDoc(index, timeZoneNoonToday, 2));
indexRandom(true, builders);
ensureSearchable(index);
SearchResponse response = null;
// retrieve those docs with the same time zone and extended bounds
response = client().prepareSearch(index).setQuery(QueryBuilders.rangeQuery("date").from("now/d").to("now/d").includeLower(true).includeUpper(true).timeZone(timezone.getID())).addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.hours(1)).timeZone(timezone).minDocCount(0).extendedBounds(new ExtendedBounds("now/d", "now/d+23h"))).execute().actionGet();
assertSearchResponse(response);
assertThat("Expected 24 buckets for one day aggregation with hourly interval", response.getHits().getTotalHits(), equalTo(2L));
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(24));
for (int i = 0; i < buckets.size(); i++) {
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat("InternalBucket " + i + " had wrong key", (DateTime) bucket.getKey(), equalTo(new DateTime(timeZoneStartToday.getMillis() + (i * 60 * 60 * 1000), DateTimeZone.UTC)));
if (i == 0 || i == 12) {
assertThat(bucket.getDocCount(), equalTo(1L));
} else {
assertThat(bucket.getDocCount(), equalTo(0L));
}
}
internalCluster().wipeIndices("test12278");
}
use of org.joda.time.DateTimeZone in project elasticsearch by elastic.
the class DateDerivativeIT method testSingleValuedFieldNormalised_timeZone_CET_DstEnd.
/**
* Do a derivative on a date histogram with time zone CET at DST end
*/
public void testSingleValuedFieldNormalised_timeZone_CET_DstEnd() throws Exception {
createIndex(IDX_DST_END);
DateTimeZone timezone = DateTimeZone.forID("CET");
List<IndexRequestBuilder> builders = new ArrayList<>();
addNTimes(1, IDX_DST_END, new DateTime("2012-10-27T01:00:00", timezone), builders);
// day with dst shift -1h, 25h long
addNTimes(2, IDX_DST_END, new DateTime("2012-10-28T01:00:00", timezone), builders);
addNTimes(3, IDX_DST_END, new DateTime("2012-10-29T01:00:00", timezone), builders);
addNTimes(4, IDX_DST_END, new DateTime("2012-10-30T01:00:00", timezone), builders);
indexRandom(true, builders);
ensureSearchable();
SearchResponse response = client().prepareSearch(IDX_DST_END).addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).timeZone(timezone).minDocCount(0).subAggregation(derivative("deriv", "_count").unit(DateHistogramInterval.HOUR))).execute().actionGet();
assertSearchResponse(response);
Histogram deriv = response.getAggregations().get("histo");
assertThat(deriv, notNullValue());
assertThat(deriv.getName(), equalTo("histo"));
List<? extends Bucket> buckets = deriv.getBuckets();
assertThat(buckets.size(), equalTo(4));
assertBucket(buckets.get(0), new DateTime("2012-10-27", timezone).toDateTime(DateTimeZone.UTC), 1L, nullValue(), null, null);
assertBucket(buckets.get(1), new DateTime("2012-10-28", timezone).toDateTime(DateTimeZone.UTC), 2L, notNullValue(), 1d, 1d / 24d);
// the following is normalized using a 25h bucket width
assertBucket(buckets.get(2), new DateTime("2012-10-29", timezone).toDateTime(DateTimeZone.UTC), 3L, notNullValue(), 1d, 1d / 25d);
assertBucket(buckets.get(3), new DateTime("2012-10-30", timezone).toDateTime(DateTimeZone.UTC), 4L, notNullValue(), 1d, 1d / 24d);
}
use of org.joda.time.DateTimeZone in project elasticsearch by elastic.
the class DateDerivativeIT method testSingleValuedFieldNormalised_timeZone_AsiaKathmandu.
/**
* also check for time zone shifts that are not one hour, e.g.
* "Asia/Kathmandu, 1 Jan 1986 - Time Zone Change (IST → NPT), at 00:00:00 clocks were turned forward 00:15 minutes
*/
public void testSingleValuedFieldNormalised_timeZone_AsiaKathmandu() throws Exception {
createIndex(IDX_DST_KATHMANDU);
DateTimeZone timezone = DateTimeZone.forID("Asia/Kathmandu");
List<IndexRequestBuilder> builders = new ArrayList<>();
addNTimes(1, IDX_DST_KATHMANDU, new DateTime("1985-12-31T22:30:00", timezone), builders);
// the shift happens during the next bucket, which includes the 45min that do not start on the full hour
addNTimes(2, IDX_DST_KATHMANDU, new DateTime("1985-12-31T23:30:00", timezone), builders);
addNTimes(3, IDX_DST_KATHMANDU, new DateTime("1986-01-01T01:30:00", timezone), builders);
addNTimes(4, IDX_DST_KATHMANDU, new DateTime("1986-01-01T02:30:00", timezone), builders);
indexRandom(true, builders);
ensureSearchable();
SearchResponse response = client().prepareSearch(IDX_DST_KATHMANDU).addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.HOUR).timeZone(timezone).minDocCount(0).subAggregation(derivative("deriv", "_count").unit(DateHistogramInterval.MINUTE))).execute().actionGet();
assertSearchResponse(response);
Histogram deriv = response.getAggregations().get("histo");
assertThat(deriv, notNullValue());
assertThat(deriv.getName(), equalTo("histo"));
List<? extends Bucket> buckets = deriv.getBuckets();
assertThat(buckets.size(), equalTo(4));
assertBucket(buckets.get(0), new DateTime("1985-12-31T22:00:00", timezone).toDateTime(DateTimeZone.UTC), 1L, nullValue(), null, null);
assertBucket(buckets.get(1), new DateTime("1985-12-31T23:00:00", timezone).toDateTime(DateTimeZone.UTC), 2L, notNullValue(), 1d, 1d / 60d);
// the following is normalized using a 105min bucket width
assertBucket(buckets.get(2), new DateTime("1986-01-01T01:00:00", timezone).toDateTime(DateTimeZone.UTC), 3L, notNullValue(), 1d, 1d / 105d);
assertBucket(buckets.get(3), new DateTime("1986-01-01T02:00:00", timezone).toDateTime(DateTimeZone.UTC), 4L, notNullValue(), 1d, 1d / 60d);
}
use of org.joda.time.DateTimeZone in project openhab1-addons by openhab.
the class CalDavLoaderImpl method updated.
@Override
public void updated(Dictionary<String, ?> config) throws ConfigurationException {
if (config == null) {
log.debug("Update was called with a null configuration for CalDAV IO.");
return;
}
log.debug("Update was called for CalDAV IO.");
CompatibilityHints.setHintEnabled(CompatibilityHints.KEY_RELAXED_PARSING, true);
Map<String, CalDavConfig> configMap = new HashMap<String, CalDavConfig>();
Enumeration<String> iter = config.keys();
while (iter.hasMoreElements()) {
String key = iter.nextElement();
if (key.equals("service.pid")) {
continue;
}
log.trace("processing configuration parameter: {}", key);
if (key.equals(PROP_TIMEZONE)) {
String newTimeZoneStr = Objects.toString(config.get(key), null);
if (StringUtils.isBlank(newTimeZoneStr)) {
log.info("The {} setting was configured with an empty value. Default value '{}' will be used instead.", PROP_TIMEZONE, defaultTimeZone);
continue;
}
DateTimeZone newTimeZone = DateTimeZone.forID(newTimeZoneStr);
if (newTimeZone == null) {
log.warn("Invalid timezone value: {}", newTimeZoneStr);
throw new ConfigurationException(PROP_TIMEZONE, "Invalid timezone value: " + newTimeZoneStr);
}
log.debug("Overriding default timezone {} with {}", defaultTimeZone, newTimeZone);
defaultTimeZone = newTimeZone;
continue;
}
String[] keys = key.split(":");
if (keys.length != 2) {
log.warn("Unable to parse configuration parameter: {}", key);
throw new ConfigurationException("CalDAV IO", "Unable to parse configuration parameter: " + key);
}
String id = keys[0];
String paramKey = keys[1];
CalDavConfig calDavConfig = configMap.get(id);
if (calDavConfig == null) {
calDavConfig = new CalDavConfig();
configMap.put(id, calDavConfig);
}
String value = Objects.toString(config.get(key), null);
calDavConfig.setKey(id);
if (paramKey.equals(PROP_USERNAME)) {
calDavConfig.setUsername(value);
} else if (paramKey.equals(PROP_PASSWORD)) {
calDavConfig.setPassword(value);
} else if (paramKey.equals(PROP_URL)) {
calDavConfig.setUrl(value);
} else if (paramKey.equals(PROP_RELOAD_INTERVAL)) {
calDavConfig.setReloadMinutes(Integer.parseInt(value));
} else if (paramKey.equals(PROP_PRELOAD_TIME)) {
calDavConfig.setPreloadMinutes(Integer.parseInt(value));
} else if (paramKey.equals(PROP_HISTORIC_LOAD_TIME)) {
calDavConfig.setHistoricLoadMinutes(Integer.parseInt(value));
} else if (paramKey.equals(PROP_LAST_MODIFIED_TIMESTAMP_VALID)) {
calDavConfig.setLastModifiedFileTimeStampValid(BooleanUtils.toBoolean(value));
} else if (paramKey.equals(PROP_DISABLE_CERTIFICATE_VERIFICATION)) {
calDavConfig.setDisableCertificateVerification(BooleanUtils.toBoolean(value));
} else if (paramKey.equals(PROP_CHARSET)) {
try {
Charset.forName(value);
calDavConfig.setCharset(value);
} catch (UnsupportedCharsetException e) {
log.warn("Character set not valid: {}", value);
}
}
}
// verify if all required parameters are set
for (String id : configMap.keySet()) {
if (StringUtils.isEmpty(configMap.get(id).getUrl())) {
log.warn("A URL must be configured for calendar '{}'", id);
throw new ConfigurationException("CalDAV IO", "A URL must be configured for calendar '" + id + "'");
}
log.trace("config for calendar '{}': {}", id, configMap.get(id));
}
// initialize event cache
for (CalDavConfig calDavConfig : configMap.values()) {
final CalendarRuntime eventRuntime = new CalendarRuntime();
eventRuntime.setConfig(calDavConfig);
File cachePath = Util.getCachePath(calDavConfig.getKey());
if (!cachePath.exists() && !cachePath.mkdirs()) {
log.warn("cannot create directory ({}) for calendar caching (missing rights?)", cachePath.getAbsoluteFile());
continue;
}
EventStorage.getInstance().getEventCache().put(calDavConfig.getKey(), eventRuntime);
}
log.info("CalDAV IO is properly configured.");
setProperlyConfigured(true);
}
use of org.joda.time.DateTimeZone in project killbill by killbill.
the class TestDefaultSubscriptionBundleTimeline method testOneEntitlementWithInitialBlockingStateImpl.
private void testOneEntitlementWithInitialBlockingStateImpl(final boolean regressionFlagForOlderVersionThan_0_17_X) throws CatalogApiException {
clock.setDay(new LocalDate(2013, 1, 1));
final DateTimeZone accountTimeZone = DateTimeZone.UTC;
final UUID accountId = UUID.randomUUID();
final UUID bundleId = UUID.randomUUID();
final String externalKey = "foo";
final UUID entitlementId = UUID.randomUUID();
final List<SubscriptionBaseTransition> allTransitions = new ArrayList<SubscriptionBaseTransition>();
final List<BlockingState> blockingStates = new ArrayList<BlockingState>();
final BlockingState bs1 = new DefaultBlockingState(UUID.randomUUID(), entitlementId, BlockingStateType.SUBSCRIPTION, DefaultEntitlementApi.ENT_STATE_BLOCKED, DefaultEntitlementService.ENTITLEMENT_SERVICE_NAME, true, true, false, clock.getUTCNow(), clock.getUTCNow(), clock.getUTCNow(), 0L);
blockingStates.add(bs1);
clock.addDays(1);
DateTime effectiveDate = new DateTime(2013, 1, 1, 15, 43, 25, 0, DateTimeZone.UTC);
final SubscriptionBaseTransition tr1 = createTransition(entitlementId, EventType.API_USER, ApiEventType.CREATE, effectiveDate, clock.getUTCNow(), null, "trial");
allTransitions.add(tr1);
if (!regressionFlagForOlderVersionThan_0_17_X) {
final BlockingState bsCreate = new DefaultBlockingState(UUID.randomUUID(), entitlementId, BlockingStateType.SUBSCRIPTION, DefaultEntitlementApi.ENT_STATE_START, DefaultEntitlementService.ENTITLEMENT_SERVICE_NAME, false, false, false, effectiveDate, clock.getUTCNow(), clock.getUTCNow(), 0L);
blockingStates.add(bsCreate);
}
effectiveDate = effectiveDate.plusDays(30);
clock.addDays(30);
final SubscriptionBaseTransition tr2 = createTransition(entitlementId, EventType.PHASE, null, effectiveDate, clock.getUTCNow(), "trial", "phase");
allTransitions.add(tr2);
final String service = "boo";
final BlockingState bs2 = new DefaultBlockingState(UUID.randomUUID(), entitlementId, BlockingStateType.SUBSCRIPTION, "NothingUseful", service, false, false, false, clock.getUTCNow(), clock.getUTCNow(), clock.getUTCNow(), 1L);
blockingStates.add(bs2);
effectiveDate = effectiveDate.plusDays(15);
clock.addDays(15);
final SubscriptionBaseTransition tr3 = createTransition(entitlementId, EventType.API_USER, ApiEventType.CANCEL, effectiveDate, clock.getUTCNow(), "phase", null);
allTransitions.add(tr3);
final List<Entitlement> entitlements = new ArrayList<Entitlement>();
final Entitlement entitlement = createEntitlement(entitlementId, allTransitions, blockingStates);
entitlements.add(entitlement);
final SubscriptionBundleTimeline timeline = new DefaultSubscriptionBundleTimeline(accountId, bundleId, externalKey, entitlements, internalCallContext);
assertEquals(timeline.getAccountId(), accountId);
assertEquals(timeline.getBundleId(), bundleId);
assertEquals(timeline.getExternalKey(), externalKey);
final List<SubscriptionEvent> events = timeline.getSubscriptionEvents();
assertEquals(events.size(), 5);
assertEquals(events.get(0).getEffectiveDate().compareTo(new LocalDate(tr1.getEffectiveTransitionTime(), accountTimeZone)), 0);
assertEquals(events.get(1).getEffectiveDate().compareTo(new LocalDate(tr1.getEffectiveTransitionTime(), accountTimeZone)), 0);
assertEquals(events.get(2).getEffectiveDate().compareTo(new LocalDate(tr2.getEffectiveTransitionTime(), accountTimeZone)), 0);
assertEquals(events.get(3).getEffectiveDate().compareTo(new LocalDate(bs2.getEffectiveDate(), accountTimeZone)), 0);
assertEquals(events.get(4).getEffectiveDate().compareTo(new LocalDate(tr3.getEffectiveTransitionTime(), accountTimeZone)), 0);
assertEquals(events.get(0).getSubscriptionEventType(), SubscriptionEventType.START_ENTITLEMENT);
assertEquals(events.get(1).getSubscriptionEventType(), SubscriptionEventType.START_BILLING);
assertEquals(events.get(2).getSubscriptionEventType(), SubscriptionEventType.PHASE);
assertEquals(events.get(3).getSubscriptionEventType(), SubscriptionEventType.SERVICE_STATE_CHANGE);
assertEquals(events.get(4).getSubscriptionEventType(), SubscriptionEventType.STOP_BILLING);
assertEquals(events.get(0).getServiceName(), DefaultEntitlementService.ENTITLEMENT_SERVICE_NAME);
assertEquals(events.get(1).getServiceName(), EntitlementOrderingBase.BILLING_SERVICE_NAME);
assertEquals(events.get(2).getServiceName(), EntitlementOrderingBase.ENT_BILLING_SERVICE_NAME);
assertEquals(events.get(3).getServiceName(), service);
assertEquals(events.get(4).getServiceName(), EntitlementOrderingBase.BILLING_SERVICE_NAME);
assertNull(events.get(0).getPrevPhase());
assertNull(events.get(1).getPrevPhase());
assertEquals(events.get(1).getNextPhase().getName(), "trial");
assertEquals(events.get(2).getPrevPhase().getName(), "trial");
assertEquals(events.get(2).getNextPhase().getName(), "phase");
assertEquals(events.get(3).getPrevPhase().getName(), "phase");
assertEquals(events.get(3).getNextPhase().getName(), "phase");
assertEquals(events.get(4).getPrevPhase().getName(), "phase");
assertNull(events.get(4).getNextPhase());
}
Aggregations