use of io.druid.query.Result in project druid by druid-io.
the class TimeBoundaryQueryQueryToolChestTest method testCacheStrategy.
@Test
public void testCacheStrategy() throws Exception {
CacheStrategy<Result<TimeBoundaryResultValue>, Object, TimeBoundaryQuery> strategy = new TimeBoundaryQueryQueryToolChest().getCacheStrategy(new TimeBoundaryQuery(new TableDataSource("dummy"), new MultipleIntervalSegmentSpec(ImmutableList.of(new Interval("2015-01-01/2015-01-02"))), null, null, null));
final Result<TimeBoundaryResultValue> result = new Result<>(new DateTime(123L), new TimeBoundaryResultValue(ImmutableMap.of(TimeBoundaryQuery.MIN_TIME, new DateTime(0L).toString(), TimeBoundaryQuery.MAX_TIME, new DateTime("2015-01-01").toString())));
Object preparedValue = strategy.prepareForCache().apply(result);
ObjectMapper objectMapper = new DefaultObjectMapper();
Object fromCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedValue), strategy.getCacheObjectClazz());
Result<TimeBoundaryResultValue> fromCacheResult = strategy.pullFromCache().apply(fromCacheValue);
Assert.assertEquals(result, fromCacheResult);
}
use of io.druid.query.Result in project druid by druid-io.
the class TopNQueryQueryToolChestTest method testComputeCacheKeyWithDifferentPostAgg.
@Test
public void testComputeCacheKeyWithDifferentPostAgg() throws Exception {
final TopNQuery query1 = new TopNQuery(new TableDataSource("dummy"), VirtualColumns.EMPTY, new DefaultDimensionSpec("test", "test"), new NumericTopNMetricSpec("post"), 3, new MultipleIntervalSegmentSpec(ImmutableList.of(new Interval("2015-01-01/2015-01-02"))), null, Granularities.ALL, ImmutableList.<AggregatorFactory>of(new CountAggregatorFactory("metric1")), ImmutableList.<PostAggregator>of(new ConstantPostAggregator("post", 10)), null);
final TopNQuery query2 = new TopNQuery(new TableDataSource("dummy"), VirtualColumns.EMPTY, new DefaultDimensionSpec("test", "test"), new NumericTopNMetricSpec("post"), 3, new MultipleIntervalSegmentSpec(ImmutableList.of(new Interval("2015-01-01/2015-01-02"))), null, Granularities.ALL, ImmutableList.<AggregatorFactory>of(new CountAggregatorFactory("metric1")), ImmutableList.<PostAggregator>of(new ArithmeticPostAggregator("post", "+", ImmutableList.<PostAggregator>of(new FieldAccessPostAggregator(null, "metric1"), new FieldAccessPostAggregator(null, "metric1")))), null);
final CacheStrategy<Result<TopNResultValue>, Object, TopNQuery> strategy1 = new TopNQueryQueryToolChest(null, null).getCacheStrategy(query1);
final CacheStrategy<Result<TopNResultValue>, Object, TopNQuery> strategy2 = new TopNQueryQueryToolChest(null, null).getCacheStrategy(query2);
Assert.assertFalse(Arrays.equals(strategy1.computeCacheKey(query1), strategy2.computeCacheKey(query2)));
}
use of io.druid.query.Result in project druid by druid-io.
the class TopNQueryRunnerTest method testTopNDimExtractionToOne.
@Test
public void testTopNDimExtractionToOne() throws IOException {
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).dimension(new ExtractionDimensionSpec(QueryRunnerTestHelper.marketDimension, QueryRunnerTestHelper.marketDimension, new JavaScriptExtractionFn("function(f) { return \"POTATO\"; }", false, JavaScriptConfig.getEnabledInstance()))).metric("rows").threshold(10).intervals(QueryRunnerTestHelper.fullOnInterval).aggregators(QueryRunnerTestHelper.commonAggregators).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).build();
Granularity gran = Granularities.DAY;
TimeseriesQuery tsQuery = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(gran).intervals(QueryRunnerTestHelper.fullOnInterval).aggregators(Arrays.asList(QueryRunnerTestHelper.rowsCount, QueryRunnerTestHelper.indexDoubleSum, QueryRunnerTestHelper.qualityUniques)).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).build();
List<Result<TopNResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2011-01-12T00:00:00.000Z"), new TopNResultValue(Arrays.<Map<String, Object>>asList(ImmutableMap.<String, Object>of("addRowsIndexConstant", 504542.5071372986D, "index", 503332.5071372986D, QueryRunnerTestHelper.marketDimension, "POTATO", "uniques", QueryRunnerTestHelper.UNIQUES_9, "rows", 1209L)))));
List<Result<TopNResultValue>> list = Sequences.toList(runWithMerge(query), new ArrayList<Result<TopNResultValue>>());
Assert.assertEquals(list.size(), 1);
Assert.assertEquals("Didn't merge results", list.get(0).getValue().getValue().size(), 1);
TestHelper.assertExpectedResults(expectedResults, list, "Failed to match");
}
use of io.druid.query.Result in project druid by druid-io.
the class TopNQueryRunnerTest method testNumericDimensionTopNWithNullPreviousStop.
@Test
public void testNumericDimensionTopNWithNullPreviousStop() {
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(Granularities.ALL).dimension(QueryRunnerTestHelper.marketDimension).metric(new DimensionTopNMetricSpec(null, StringComparators.NUMERIC)).threshold(2).intervals(QueryRunnerTestHelper.secondOnly).aggregators(duplicateAggregators(QueryRunnerTestHelper.rowsCount, new CountAggregatorFactory("rows1"))).build();
List<Result<TopNResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2011-04-02T00:00:00.000Z"), new TopNResultValue(withDuplicateResults(Arrays.asList(ImmutableMap.<String, Object>of("market", "spot", "rows", 9L), ImmutableMap.<String, Object>of("market", "total_market", "rows", 2L)), "rows", "rows1"))));
TestHelper.assertExpectedResults(expectedResults, runner.run(query, new HashMap<String, Object>()));
}
use of io.druid.query.Result in project druid by druid-io.
the class TopNQueryRunnerTest method testTopNWithExtractionFilter.
@Test
public void testTopNWithExtractionFilter() {
Map<String, String> extractionMap = new HashMap<>();
extractionMap.put("spot", "spot0");
MapLookupExtractor mapLookupExtractor = new MapLookupExtractor(extractionMap, false);
LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, false);
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).dimension(QueryRunnerTestHelper.marketDimension).metric("rows").threshold(3).intervals(QueryRunnerTestHelper.firstToThird).aggregators(QueryRunnerTestHelper.commonAggregators).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).filters(new ExtractionDimFilter(QueryRunnerTestHelper.marketDimension, "spot0", lookupExtractionFn, null)).build();
List<Result<TopNResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2011-04-01T00:00:00.000Z"), new TopNResultValue(Arrays.<Map<String, Object>>asList(ImmutableMap.<String, Object>of(QueryRunnerTestHelper.marketDimension, "spot", "rows", 18L, "index", 2231.8768157958984D, "addRowsIndexConstant", 2250.8768157958984D, "uniques", QueryRunnerTestHelper.UNIQUES_9)))));
assertExpectedResults(expectedResults, query);
// Assert the optimization path as well
final Sequence<Result<TopNResultValue>> retval = runWithPreMergeAndMerge(query);
TestHelper.assertExpectedResults(expectedResults, retval);
}
Aggregations