use of org.joda.time.Interval in project druid by druid-io.
the class CachingClusteredClientTest method testIfNoneMatch.
@Test
public void testIfNoneMatch() throws Exception {
Interval interval = new Interval("2016/2017");
final DataSegment dataSegment = new DataSegment("dataSource", interval, "ver", ImmutableMap.<String, Object>of("type", "hdfs", "path", "/tmp"), ImmutableList.of("product"), ImmutableList.of("visited_sum"), NoneShardSpec.instance(), 9, 12334);
final ServerSelector selector = new ServerSelector(dataSegment, new HighestPriorityTierSelectorStrategy(new RandomServerSelectorStrategy()));
selector.addServerAndUpdateSegment(new QueryableDruidServer(servers[0], null), dataSegment);
timeline.add(interval, "ver", new SingleElementPartitionChunk<>(selector));
TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder().dataSource(DATA_SOURCE).intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(interval))).context(ImmutableMap.<String, Object>of("If-None-Match", "aVJV29CJY93rszVW/QBy0arWZo0=")).build();
Map<String, String> responseContext = new HashMap<>();
client.run(query, responseContext);
Assert.assertEquals("Z/eS4rQz5v477iq7Aashr6JPZa0=", responseContext.get("ETag"));
}
use of org.joda.time.Interval in project druid by druid-io.
the class CachingClusteredClientTest method testTimeseriesCachingTimeZone.
@Test
@SuppressWarnings("unchecked")
public void testTimeseriesCachingTimeZone() throws Exception {
final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SOURCE).intervals(SEG_SPEC).filters(DIM_FILTER).granularity(PT1H_TZ_GRANULARITY).aggregators(AGGS).postAggregators(POST_AGGS).context(CONTEXT);
QueryRunner runner = new FinalizeResultsQueryRunner(client, new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()));
testQueryCaching(runner, builder.build(), new Interval("2011-11-04/2011-11-08"), makeTimeResults(new DateTime("2011-11-04", TIMEZONE), 50, 5000, new DateTime("2011-11-05", TIMEZONE), 30, 6000, new DateTime("2011-11-06", TIMEZONE), 23, 85312, new DateTime("2011-11-07", TIMEZONE), 85, 102));
HashMap<String, List> context = new HashMap<String, List>();
TestHelper.assertExpectedResults(makeRenamedTimeResults(new DateTime("2011-11-04", TIMEZONE), 50, 5000, new DateTime("2011-11-05", TIMEZONE), 30, 6000, new DateTime("2011-11-06", TIMEZONE), 23, 85312, new DateTime("2011-11-07", TIMEZONE), 85, 102), runner.run(builder.intervals("2011-11-04/2011-11-08").aggregators(RENAMED_AGGS).postAggregators(RENAMED_POST_AGGS).build(), context));
}
use of org.joda.time.Interval in project druid by druid-io.
the class CachingClusteredClientTest method testTimeBoundaryCachingWhenTimeIsInteger.
@Test
public void testTimeBoundaryCachingWhenTimeIsInteger() throws Exception {
testQueryCaching(client, Druids.newTimeBoundaryQueryBuilder().dataSource(CachingClusteredClientTest.DATA_SOURCE).intervals(CachingClusteredClientTest.SEG_SPEC).context(CachingClusteredClientTest.CONTEXT).build(), new Interval("1970-01-01/1970-01-02"), makeTimeBoundaryResult(new DateTime("1970-01-01"), new DateTime("1970-01-01"), new DateTime("1970-01-02")), new Interval("1970-01-01/2011-01-03"), makeTimeBoundaryResult(new DateTime("1970-01-02"), new DateTime("1970-01-02"), new DateTime("1970-01-03")), new Interval("1970-01-01/2011-01-10"), makeTimeBoundaryResult(new DateTime("1970-01-05"), new DateTime("1970-01-05"), new DateTime("1970-01-10")), new Interval("1970-01-01/2011-01-10"), makeTimeBoundaryResult(new DateTime("1970-01-05T01"), new DateTime("1970-01-05T01"), new DateTime("1970-01-10")));
testQueryCaching(client, Druids.newTimeBoundaryQueryBuilder().dataSource(CachingClusteredClientTest.DATA_SOURCE).intervals(CachingClusteredClientTest.SEG_SPEC).context(CachingClusteredClientTest.CONTEXT).bound(TimeBoundaryQuery.MAX_TIME).build(), new Interval("1970-01-01/2011-01-02"), makeTimeBoundaryResult(new DateTime("1970-01-01"), null, new DateTime("1970-01-02")), new Interval("1970-01-01/2011-01-03"), makeTimeBoundaryResult(new DateTime("1970-01-02"), null, new DateTime("1970-01-03")), new Interval("1970-01-01/2011-01-10"), makeTimeBoundaryResult(new DateTime("1970-01-05"), null, new DateTime("1970-01-10")), new Interval("1970-01-01/2011-01-10"), makeTimeBoundaryResult(new DateTime("1970-01-05T01"), null, new DateTime("1970-01-10")));
testQueryCaching(client, Druids.newTimeBoundaryQueryBuilder().dataSource(CachingClusteredClientTest.DATA_SOURCE).intervals(CachingClusteredClientTest.SEG_SPEC).context(CachingClusteredClientTest.CONTEXT).bound(TimeBoundaryQuery.MIN_TIME).build(), new Interval("1970-01-01/2011-01-02"), makeTimeBoundaryResult(new DateTime("1970-01-01"), new DateTime("1970-01-01"), null), new Interval("1970-01-01/2011-01-03"), makeTimeBoundaryResult(new DateTime("1970-01-02"), new DateTime("1970-01-02"), null), new Interval("1970-01-01/1970-01-10"), makeTimeBoundaryResult(new DateTime("1970-01-05"), new DateTime("1970-01-05"), null), new Interval("1970-01-01/2011-01-10"), makeTimeBoundaryResult(new DateTime("1970-01-05T01"), new DateTime("1970-01-05T01"), null));
}
use of org.joda.time.Interval in project druid by druid-io.
the class CachingClusteredClientTest method testTimeSeriesWithFilter.
@Test
public void testTimeSeriesWithFilter() throws Exception {
DimFilter filter = Druids.newAndDimFilterBuilder().fields(Arrays.asList(Druids.newOrDimFilterBuilder().fields(Arrays.asList(new SelectorDimFilter("dim0", "1", null), new BoundDimFilter("dim0", "222", "333", false, false, false, null, StringComparators.LEXICOGRAPHIC))).build(), Druids.newAndDimFilterBuilder().fields(Arrays.asList(new InDimFilter("dim1", Arrays.asList("0", "1", "2", "3", "4"), null), new BoundDimFilter("dim1", "0", "3", false, true, false, null, StringComparators.LEXICOGRAPHIC), new BoundDimFilter("dim1", "1", "9999", true, false, false, null, StringComparators.LEXICOGRAPHIC))).build())).build();
final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SOURCE).intervals(SEG_SPEC).filters(filter).granularity(GRANULARITY).aggregators(AGGS).postAggregators(POST_AGGS).context(CONTEXT);
QueryRunner runner = new FinalizeResultsQueryRunner(client, new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()));
/*
For dim0 (2011-01-01/2011-01-05), the combined range is {[1,1], [222,333]}, so segments [-inf,1], [1,2], [2,3], and
[3,4] is needed
For dim1 (2011-01-06/2011-01-10), the combined range for the bound filters is {(1,3)}, combined this with the in
filter result in {[2,2]}, so segments [1,2] and [2,3] is needed
*/
List<Iterable<Result<TimeseriesResultValue>>> expectedResult = Arrays.asList(makeTimeResults(new DateTime("2011-01-01"), 50, 5000, new DateTime("2011-01-02"), 10, 1252, new DateTime("2011-01-03"), 20, 6213, new DateTime("2011-01-04"), 30, 743), makeTimeResults(new DateTime("2011-01-07"), 60, 6020, new DateTime("2011-01-08"), 70, 250));
testQueryCachingWithFilter(runner, 3, builder.build(), expectedResult, new Interval("2011-01-01/2011-01-05"), makeTimeResults(new DateTime("2011-01-01"), 50, 5000), new Interval("2011-01-01/2011-01-05"), makeTimeResults(new DateTime("2011-01-02"), 10, 1252), new Interval("2011-01-01/2011-01-05"), makeTimeResults(new DateTime("2011-01-03"), 20, 6213), new Interval("2011-01-01/2011-01-05"), makeTimeResults(new DateTime("2011-01-04"), 30, 743), new Interval("2011-01-01/2011-01-05"), makeTimeResults(new DateTime("2011-01-05"), 40, 6000), new Interval("2011-01-06/2011-01-10"), makeTimeResults(new DateTime("2011-01-06"), 50, 425), new Interval("2011-01-06/2011-01-10"), makeTimeResults(new DateTime("2011-01-07"), 60, 6020), new Interval("2011-01-06/2011-01-10"), makeTimeResults(new DateTime("2011-01-08"), 70, 250), new Interval("2011-01-06/2011-01-10"), makeTimeResults(new DateTime("2011-01-09"), 23, 85312), new Interval("2011-01-06/2011-01-10"), makeTimeResults(new DateTime("2011-01-10"), 100, 512));
}
use of org.joda.time.Interval in project druid by druid-io.
the class CachingClusteredClientTest method testGroupByCaching.
@Test
public void testGroupByCaching() throws Exception {
List<AggregatorFactory> aggsWithUniques = ImmutableList.<AggregatorFactory>builder().addAll(AGGS).add(new HyperUniquesAggregatorFactory("uniques", "uniques")).build();
final HashFunction hashFn = Hashing.murmur3_128();
GroupByQuery.Builder builder = new GroupByQuery.Builder().setDataSource(DATA_SOURCE).setQuerySegmentSpec(SEG_SPEC).setDimFilter(DIM_FILTER).setGranularity(GRANULARITY).setDimensions(Arrays.<DimensionSpec>asList(new DefaultDimensionSpec("a", "a"))).setAggregatorSpecs(aggsWithUniques).setPostAggregatorSpecs(POST_AGGS).setContext(CONTEXT);
final HyperLogLogCollector collector = HyperLogLogCollector.makeLatestCollector();
collector.add(hashFn.hashString("abc123", Charsets.UTF_8).asBytes());
collector.add(hashFn.hashString("123abc", Charsets.UTF_8).asBytes());
testQueryCaching(client, builder.build(), new Interval("2011-01-01/2011-01-02"), makeGroupByResults(new DateTime("2011-01-01"), ImmutableMap.of("a", "a", "rows", 1, "imps", 1, "impers", 1, "uniques", collector)), new Interval("2011-01-02/2011-01-03"), makeGroupByResults(new DateTime("2011-01-02"), ImmutableMap.of("a", "b", "rows", 2, "imps", 2, "impers", 2, "uniques", collector)), new Interval("2011-01-05/2011-01-10"), makeGroupByResults(new DateTime("2011-01-05"), ImmutableMap.of("a", "c", "rows", 3, "imps", 3, "impers", 3, "uniques", collector), new DateTime("2011-01-06"), ImmutableMap.of("a", "d", "rows", 4, "imps", 4, "impers", 4, "uniques", collector), new DateTime("2011-01-07"), ImmutableMap.of("a", "e", "rows", 5, "imps", 5, "impers", 5, "uniques", collector), new DateTime("2011-01-08"), ImmutableMap.of("a", "f", "rows", 6, "imps", 6, "impers", 6, "uniques", collector), new DateTime("2011-01-09"), ImmutableMap.of("a", "g", "rows", 7, "imps", 7, "impers", 7, "uniques", collector)), new Interval("2011-01-05/2011-01-10"), makeGroupByResults(new DateTime("2011-01-05T01"), ImmutableMap.of("a", "c", "rows", 3, "imps", 3, "impers", 3, "uniques", collector), new DateTime("2011-01-06T01"), ImmutableMap.of("a", "d", "rows", 4, "imps", 4, "impers", 4, "uniques", collector), new DateTime("2011-01-07T01"), ImmutableMap.of("a", "e", "rows", 5, "imps", 5, "impers", 5, "uniques", collector), new DateTime("2011-01-08T01"), ImmutableMap.of("a", "f", "rows", 6, "imps", 6, "impers", 6, "uniques", collector), new DateTime("2011-01-09T01"), ImmutableMap.of("a", "g", "rows", 7, "imps", 7, "impers", 7, "uniques", collector)));
QueryRunner runner = new FinalizeResultsQueryRunner(client, GroupByQueryRunnerTest.makeQueryRunnerFactory(new GroupByQueryConfig()).getToolchest());
HashMap<String, Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedObjects(makeGroupByResults(new DateTime("2011-01-05T"), ImmutableMap.of("a", "c", "rows", 3, "imps", 3, "impers", 3, "uniques", collector), new DateTime("2011-01-05T01"), ImmutableMap.of("a", "c", "rows", 3, "imps", 3, "impers", 3, "uniques", collector), new DateTime("2011-01-06T"), ImmutableMap.of("a", "d", "rows", 4, "imps", 4, "impers", 4, "uniques", collector), new DateTime("2011-01-06T01"), ImmutableMap.of("a", "d", "rows", 4, "imps", 4, "impers", 4, "uniques", collector), new DateTime("2011-01-07T"), ImmutableMap.of("a", "e", "rows", 5, "imps", 5, "impers", 5, "uniques", collector), new DateTime("2011-01-07T01"), ImmutableMap.of("a", "e", "rows", 5, "imps", 5, "impers", 5, "uniques", collector), new DateTime("2011-01-08T"), ImmutableMap.of("a", "f", "rows", 6, "imps", 6, "impers", 6, "uniques", collector), new DateTime("2011-01-08T01"), ImmutableMap.of("a", "f", "rows", 6, "imps", 6, "impers", 6, "uniques", collector), new DateTime("2011-01-09T"), ImmutableMap.of("a", "g", "rows", 7, "imps", 7, "impers", 7, "uniques", collector), new DateTime("2011-01-09T01"), ImmutableMap.of("a", "g", "rows", 7, "imps", 7, "impers", 7, "uniques", collector)), runner.run(builder.setInterval("2011-01-05/2011-01-10").build(), context), "");
}
Aggregations