use of org.apache.druid.query.Result in project druid by druid-io.
the class TimeBoundaryQueryRunnerTest method testFilteredTimeBoundaryQuery.
@Test
@SuppressWarnings("unchecked")
public void testFilteredTimeBoundaryQuery() throws IOException {
QueryRunner customRunner = getCustomRunner();
TimeBoundaryQuery timeBoundaryQuery = Druids.newTimeBoundaryQueryBuilder().dataSource("testing").filters("quality", "automotive").build();
Assert.assertTrue(timeBoundaryQuery.hasFilters());
List<Result<TimeBoundaryResultValue>> results = customRunner.run(QueryPlus.wrap(timeBoundaryQuery)).toList();
Assert.assertTrue(Iterables.size(results) > 0);
TimeBoundaryResultValue val = results.iterator().next().getValue();
DateTime minTime = val.getMinTime();
DateTime maxTime = val.getMaxTime();
Assert.assertEquals(DateTimes.of("2011-01-13T00:00:00.000Z"), minTime);
Assert.assertEquals(DateTimes.of("2011-01-16T00:00:00.000Z"), maxTime);
}
use of org.apache.druid.query.Result in project druid by druid-io.
the class SearchQueryQueryToolChestTest method testCacheStrategy.
@Test
public void testCacheStrategy() throws Exception {
CacheStrategy<Result<SearchResultValue>, Object, SearchQuery> strategy = new SearchQueryQueryToolChest(null, null).getCacheStrategy(new SearchQuery(new TableDataSource("dummy"), null, Granularities.ALL, 1, new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), ImmutableList.of(Druids.DIMENSION_IDENTITY.apply("dim1")), new FragmentSearchQuerySpec(ImmutableList.of("a", "b")), null, null));
final Result<SearchResultValue> result = new Result<>(DateTimes.utc(123L), new SearchResultValue(ImmutableList.of(new SearchHit("dim1", "a"))));
Object preparedValue = strategy.prepareForSegmentLevelCache().apply(result);
ObjectMapper objectMapper = new DefaultObjectMapper();
Object fromCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedValue), strategy.getCacheObjectClazz());
Result<SearchResultValue> fromCacheResult = strategy.pullFromSegmentLevelCache().apply(fromCacheValue);
Assert.assertEquals(result, fromCacheResult);
}
use of org.apache.druid.query.Result in project druid by druid-io.
the class SearchBinaryFn method apply.
@Override
public Result<SearchResultValue> apply(Result<SearchResultValue> arg1, Result<SearchResultValue> arg2) {
if (arg1 == null) {
return arg2;
}
if (arg2 == null) {
return arg1;
}
final int limit = gran instanceof AllGranularity ? this.limit : -1;
SearchResultValue arg1Vals = arg1.getValue();
SearchResultValue arg2Vals = arg2.getValue();
Iterable<SearchHit> merged = Iterables.mergeSorted(Arrays.asList(arg1Vals, arg2Vals), searchSortSpec.getComparator());
int maxSize = arg1Vals.getValue().size() + arg2Vals.getValue().size();
if (limit > 0) {
maxSize = Math.min(limit, maxSize);
}
List<SearchHit> results = Lists.newArrayListWithExpectedSize(maxSize);
SearchHit prev = null;
for (SearchHit searchHit : merged) {
if (prev == null) {
prev = searchHit;
continue;
}
if (prev.equals(searchHit)) {
if (prev.getCount() != null && searchHit.getCount() != null) {
prev = new SearchHit(prev.getDimension(), prev.getValue(), prev.getCount() + searchHit.getCount());
} else {
prev = new SearchHit(prev.getDimension(), prev.getValue());
}
} else {
results.add(prev);
prev = searchHit;
if (limit > 0 && results.size() >= limit) {
break;
}
}
}
if (prev != null && (limit < 0 || results.size() < limit)) {
results.add(prev);
}
final DateTime timestamp = gran instanceof AllGranularity ? arg1.getTimestamp() : gran.bucketStart(arg1.getTimestamp());
return new Result<>(timestamp, new SearchResultValue(results));
}
use of org.apache.druid.query.Result in project druid by druid-io.
the class GroupByQueryRunnerTest method testBySegmentResultsOptimizedDimextraction.
@Test
public void testBySegmentResultsOptimizedDimextraction() {
GroupByQuery.Builder builder = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new ExtractionDimensionSpec("quality", "alias", new LookupExtractionFn(new MapLookupExtractor(ImmutableMap.of("mezzanine", "mezzanine0"), false), false, null, true, false))).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setDimFilter(new SelectorDimFilter("quality", "mezzanine", null)).overrideContext(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true));
final GroupByQuery fullQuery = builder.build();
int segmentCount = 32;
Result<BySegmentResultValue<ResultRow>> singleSegmentResult = new Result<>(DateTimes.of("2011-01-12T00:00:00.000Z"), new BySegmentResultValueClass<>(Collections.singletonList(makeRow(fullQuery, "2011-04-01", "alias", "mezzanine0", "rows", 6L, "idx", 4420L)), QueryRunnerTestHelper.SEGMENT_ID.toString(), Intervals.of("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z")));
List<Result> bySegmentResults = new ArrayList<>();
for (int i = 0; i < segmentCount; i++) {
bySegmentResults.add(singleSegmentResult);
}
QueryToolChest toolChest = factory.getToolchest();
List<QueryRunner<ResultRow>> singleSegmentRunners = new ArrayList<>();
for (int i = 0; i < segmentCount; i++) {
singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner));
}
ExecutorService exec = Executors.newCachedThreadPool();
QueryRunner theRunner = toolChest.postMergeQueryDecoration(new FinalizeResultsQueryRunner<>(toolChest.mergeResults(factory.mergeRunners(Executors.newCachedThreadPool(), singleSegmentRunners)), toolChest));
TestHelper.assertExpectedObjects(bySegmentResults, theRunner.run(QueryPlus.wrap(fullQuery)), "bySegment-dim-extraction");
exec.shutdownNow();
}
use of org.apache.druid.query.Result in project druid by druid-io.
the class GroupByQueryRunnerTest method testBySegmentResultsWithAllFiltersWithExtractionFns.
@Test
public void testBySegmentResultsWithAllFiltersWithExtractionFns() {
String extractionJsFn = "function(str) { return 'super-' + str; }";
String jsFn = "function(x) { return(x === 'super-mezzanine') }";
ExtractionFn extractionFn = new JavaScriptExtractionFn(extractionJsFn, false, JavaScriptConfig.getEnabledInstance());
List<DimFilter> superFilterList = new ArrayList<>();
superFilterList.add(new SelectorDimFilter("quality", "super-mezzanine", extractionFn));
superFilterList.add(new InDimFilter("quality", Arrays.asList("not-super-mezzanine", "FOOBAR", "super-mezzanine"), extractionFn));
superFilterList.add(new BoundDimFilter("quality", "super-mezzanine", "super-mezzanine", false, false, true, extractionFn, StringComparators.ALPHANUMERIC));
superFilterList.add(new RegexDimFilter("quality", "super-mezzanine", extractionFn));
superFilterList.add(new SearchQueryDimFilter("quality", new ContainsSearchQuerySpec("super-mezzanine", true), extractionFn));
superFilterList.add(new JavaScriptDimFilter("quality", jsFn, extractionFn, JavaScriptConfig.getEnabledInstance()));
DimFilter superFilter = new AndDimFilter(superFilterList);
GroupByQuery.Builder builder = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setDimFilter(superFilter).overrideContext(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true));
final GroupByQuery fullQuery = builder.build();
int segmentCount = 32;
Result<BySegmentResultValue> singleSegmentResult = new Result<>(DateTimes.of("2011-01-12T00:00:00.000Z"), new BySegmentResultValueClass<>(Collections.singletonList(makeRow(fullQuery, "2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L)), QueryRunnerTestHelper.SEGMENT_ID.toString(), Intervals.of("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z")));
List<Result> bySegmentResults = new ArrayList<>();
for (int i = 0; i < segmentCount; i++) {
bySegmentResults.add(singleSegmentResult);
}
QueryToolChest toolChest = factory.getToolchest();
List<QueryRunner<ResultRow>> singleSegmentRunners = new ArrayList<>();
for (int i = 0; i < segmentCount; i++) {
singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner));
}
ExecutorService exec = Executors.newCachedThreadPool();
QueryRunner theRunner = toolChest.postMergeQueryDecoration(new FinalizeResultsQueryRunner<>(toolChest.mergeResults(factory.mergeRunners(Executors.newCachedThreadPool(), singleSegmentRunners)), toolChest));
TestHelper.assertExpectedObjects(bySegmentResults, theRunner.run(QueryPlus.wrap(fullQuery)), "bySegment-filter");
exec.shutdownNow();
}
Aggregations