use of org.opensearch.common.time.DateMathParser in project OpenSearch by opensearch-project.
the class JavaJodaTimeDuellingTests method dateMathToMillis.
private long dateMathToMillis(String text, DateFormatter dateFormatter) {
DateFormatter javaFormatter = dateFormatter.withLocale(randomLocale(random()));
DateMathParser javaDateMath = javaFormatter.toDateMathParser();
return javaDateMath.parse(text, () -> 0, true, (ZoneId) null).toEpochMilli();
}
use of org.opensearch.common.time.DateMathParser in project OpenSearch by opensearch-project.
the class DateFieldTypeTests method isFieldWithinRangeTestCase.
public void isFieldWithinRangeTestCase(DateFieldType ft) throws IOException {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null));
Document doc = new Document();
LongPoint field = new LongPoint("my_date", ft.parse("2015-10-12"));
doc.add(field);
w.addDocument(doc);
field.setLongValue(ft.parse("2016-04-03"));
w.addDocument(doc);
DirectoryReader reader = DirectoryReader.open(w);
DateMathParser alternateFormat = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.toDateMathParser();
doTestIsFieldWithinQuery(ft, reader, null, null);
doTestIsFieldWithinQuery(ft, reader, null, alternateFormat);
doTestIsFieldWithinQuery(ft, reader, DateTimeZone.UTC, null);
doTestIsFieldWithinQuery(ft, reader, DateTimeZone.UTC, alternateFormat);
QueryRewriteContext context = new QueryRewriteContext(xContentRegistry(), writableRegistry(), null, () -> nowInMillis);
// Fields with no value indexed.
DateFieldType ft2 = new DateFieldType("my_date2");
assertEquals(Relation.DISJOINT, ft2.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02", false, false, null, null, context));
IOUtils.close(reader, w, dir);
}
use of org.opensearch.common.time.DateMathParser in project OpenSearch by opensearch-project.
the class RangeQueryBuilder method doToQuery.
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
if (from == null && to == null) {
/**
* Open bounds on both side, we can rewrite to an exists query
* if the {@link FieldNamesFieldMapper} is enabled.
*/
final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType) context.getMapperService().fieldType(FieldNamesFieldMapper.NAME);
if (fieldNamesFieldType == null) {
return new MatchNoDocsQuery("No mappings yet");
}
// Exists query would fail if the fieldNames field is disabled.
if (fieldNamesFieldType.isEnabled()) {
return ExistsQueryBuilder.newFilter(context, fieldName, false);
}
}
MappedFieldType mapper = context.fieldMapper(this.fieldName);
if (mapper == null) {
throw new IllegalStateException("Rewrite first");
}
DateMathParser forcedDateParser = getForceDateParser();
return mapper.rangeQuery(from, to, includeLower, includeUpper, relation, timeZone, forcedDateParser, context);
}
use of org.opensearch.common.time.DateMathParser in project OpenSearch by opensearch-project.
the class RangeQueryBuilder method getRelation.
// Overridable for testing only
protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteContext) throws IOException {
QueryShardContext shardContext = queryRewriteContext.convertToShardContext();
if (shardContext != null) {
final MappedFieldType fieldType = shardContext.fieldMapper(fieldName);
if (fieldType == null) {
return MappedFieldType.Relation.DISJOINT;
}
if (shardContext.getIndexReader() == null) {
// No reader, this may happen e.g. for percolator queries.
return MappedFieldType.Relation.INTERSECTS;
}
DateMathParser dateMathParser = getForceDateParser();
final MappedFieldType.Relation relation = fieldType.isFieldWithinQuery(shardContext.getIndexReader(), from, to, includeLower, includeUpper, timeZone, dateMathParser, queryRewriteContext);
// For validation, always assume that there is an intersection
if (relation == MappedFieldType.Relation.DISJOINT && shardContext.validate()) {
return MappedFieldType.Relation.INTERSECTS;
}
return relation;
}
// Not on the shard, we have no way to know what the relation is.
return MappedFieldType.Relation.INTERSECTS;
}
use of org.opensearch.common.time.DateMathParser in project OpenSearch by opensearch-project.
the class DateHistogramIT method testSingleValueFieldWithExtendedBoundsTimezone.
/**
* Test date histogram aggregation with hour interval, timezone shift and
* extended bounds (see https://github.com/elastic/elasticsearch/issues/12278)
*/
public void testSingleValueFieldWithExtendedBoundsTimezone() throws Exception {
String index = "test12278";
prepareCreate(index).setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1).put("index.number_of_replicas", 0)).get();
DateMathParser parser = DateFormatter.forPattern("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||epoch_millis").toDateMathParser();
// we pick a random timezone offset of +12/-12 hours and insert two documents
// one at 00:00 in that time zone and one at 12:00
List<IndexRequestBuilder> builders = new ArrayList<>();
int timeZoneHourOffset = randomIntBetween(-12, 12);
ZoneId timezone = ZoneOffset.ofHours(timeZoneHourOffset);
ZonedDateTime timeZoneStartToday = parser.parse("now/d", System::currentTimeMillis, false, timezone).atZone(ZoneOffset.UTC);
ZonedDateTime timeZoneNoonToday = parser.parse("now/d+12h", System::currentTimeMillis, false, timezone).atZone(ZoneOffset.UTC);
builders.add(indexDoc(index, timeZoneStartToday, 1));
builders.add(indexDoc(index, timeZoneNoonToday, 2));
indexRandom(true, builders);
ensureSearchable(index);
SearchResponse response = null;
// retrieve those docs with the same time zone and extended bounds
response = client().prepareSearch(index).setQuery(QueryBuilders.rangeQuery("date").from("now/d").to("now/d").includeLower(true).includeUpper(true).timeZone(timezone.getId())).addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.hours(1)).timeZone(timezone).minDocCount(0).extendedBounds(new LongBounds("now/d", "now/d+23h"))).get();
assertSearchResponse(response);
assertThat("Expected 24 buckets for one day aggregation with hourly interval", response.getHits().getTotalHits().value, equalTo(2L));
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(24));
for (int i = 0; i < buckets.size(); i++) {
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
ZonedDateTime zonedDateTime = timeZoneStartToday.plus(i * 60 * 60 * 1000, ChronoUnit.MILLIS);
assertThat("InternalBucket " + i + " had wrong key", (ZonedDateTime) bucket.getKey(), equalTo(zonedDateTime));
if (i == 0 || i == 12) {
assertThat(bucket.getDocCount(), equalTo(1L));
} else {
assertThat(bucket.getDocCount(), equalTo(0L));
}
}
internalCluster().wipeIndices(index);
}
Aggregations