use of org.apache.druid.query.aggregation.LongSumAggregatorFactory in project druid by druid-io.
the class GroupByQueryRunnerTest method testGroupByFloatColumnDescending.
@Test
public void testGroupByFloatColumnDescending() {
if (config.getDefaultStrategy().equals(GroupByStrategySelector.STRATEGY_V1)) {
expectedException.expect(UnsupportedOperationException.class);
expectedException.expectMessage("GroupBy v1 only supports dimensions with an outputType of STRING.");
}
GroupByQuery query = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("qualityFloat", "qf_alias", ColumnType.FLOAT)).setDimFilter(new InDimFilter("quality", Arrays.asList("entertainment", "technology"), null)).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).addOrderByColumn(new OrderByColumnSpec("qf_alias", OrderByColumnSpec.Direction.DESCENDING, StringComparators.NUMERIC)).setGranularity(QueryRunnerTestHelper.ALL_GRAN).build();
Assert.assertNotEquals(Functions.<Sequence<ResultRow>>identity(), query.getLimitSpec().build(query));
List<ResultRow> expectedResults = Arrays.asList(makeRow(query, "2011-04-01", "qf_alias", 17000.0f, "rows", 2L, "idx", 175L), makeRow(query, "2011-04-01", "qf_alias", 12000.0f, "rows", 2L, "idx", 324L));
Iterable<ResultRow> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
TestHelper.assertExpectedObjects(expectedResults, results, "float");
}
use of org.apache.druid.query.aggregation.LongSumAggregatorFactory in project druid by druid-io.
the class GroupByQueryRunnerTest method testGroupByNoMatchingPrefilter.
@Test
public void testGroupByNoMatchingPrefilter() {
GroupByQuery query = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("quality", "quality")).setDimFilter(new SelectorDimFilter("market", "spot", null, null)).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new FilteredAggregatorFactory(new LongSumAggregatorFactory("index", "index"), new NotDimFilter(new SelectorDimFilter("longNumericNull", null, null)))).setGranularity(QueryRunnerTestHelper.DAY_GRAN).setLimit(1).build();
List<ResultRow> expectedResults = ImmutableList.of(makeRow(query, "2011-04-01", "quality", "automotive", "rows", 1L, "index", 135L));
Iterable<ResultRow> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
TestHelper.assertExpectedObjects(expectedResults, results, "groupBy");
}
use of org.apache.druid.query.aggregation.LongSumAggregatorFactory in project druid by druid-io.
the class GroupByQueryRunnerTest method doTestMergeResultsWithValidLimit.
private void doTestMergeResultsWithValidLimit(final int limit, final int offset) {
GroupByQuery.Builder builder = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setLimitSpec(DefaultLimitSpec.builder().limit(limit).offset(offset).build());
final GroupByQuery fullQuery = builder.build();
List<ResultRow> expectedResults = Arrays.asList(makeRow(fullQuery, "2011-04-01", "alias", "automotive", "rows", 2L, "idx", 269L), makeRow(fullQuery, "2011-04-01", "alias", "business", "rows", 2L, "idx", 217L), makeRow(fullQuery, "2011-04-01", "alias", "entertainment", "rows", 2L, "idx", 319L), makeRow(fullQuery, "2011-04-01", "alias", "health", "rows", 2L, "idx", 216L), makeRow(fullQuery, "2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L), makeRow(fullQuery, "2011-04-01", "alias", "news", "rows", 2L, "idx", 221L), makeRow(fullQuery, "2011-04-01", "alias", "premium", "rows", 6L, "idx", 4416L), makeRow(fullQuery, "2011-04-01", "alias", "technology", "rows", 2L, "idx", 177L), makeRow(fullQuery, "2011-04-01", "alias", "travel", "rows", 2L, "idx", 243L));
QueryRunner<ResultRow> mergeRunner = factory.getToolchest().mergeResults(runner);
TestHelper.assertExpectedObjects(Iterables.limit(Iterables.skip(expectedResults, offset), limit), mergeRunner.run(QueryPlus.wrap(fullQuery)), StringUtils.format("limit: %d", limit));
}
use of org.apache.druid.query.aggregation.LongSumAggregatorFactory in project druid by druid-io.
the class GroupByQueryRunnerTest method testBySegmentResultsWithAllFiltersWithExtractionFns.
@Test
public void testBySegmentResultsWithAllFiltersWithExtractionFns() {
String extractionJsFn = "function(str) { return 'super-' + str; }";
String jsFn = "function(x) { return(x === 'super-mezzanine') }";
ExtractionFn extractionFn = new JavaScriptExtractionFn(extractionJsFn, false, JavaScriptConfig.getEnabledInstance());
List<DimFilter> superFilterList = new ArrayList<>();
superFilterList.add(new SelectorDimFilter("quality", "super-mezzanine", extractionFn));
superFilterList.add(new InDimFilter("quality", Arrays.asList("not-super-mezzanine", "FOOBAR", "super-mezzanine"), extractionFn));
superFilterList.add(new BoundDimFilter("quality", "super-mezzanine", "super-mezzanine", false, false, true, extractionFn, StringComparators.ALPHANUMERIC));
superFilterList.add(new RegexDimFilter("quality", "super-mezzanine", extractionFn));
superFilterList.add(new SearchQueryDimFilter("quality", new ContainsSearchQuerySpec("super-mezzanine", true), extractionFn));
superFilterList.add(new JavaScriptDimFilter("quality", jsFn, extractionFn, JavaScriptConfig.getEnabledInstance()));
DimFilter superFilter = new AndDimFilter(superFilterList);
GroupByQuery.Builder builder = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setDimFilter(superFilter).overrideContext(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true));
final GroupByQuery fullQuery = builder.build();
int segmentCount = 32;
Result<BySegmentResultValue> singleSegmentResult = new Result<>(DateTimes.of("2011-01-12T00:00:00.000Z"), new BySegmentResultValueClass<>(Collections.singletonList(makeRow(fullQuery, "2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L)), QueryRunnerTestHelper.SEGMENT_ID.toString(), Intervals.of("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z")));
List<Result> bySegmentResults = new ArrayList<>();
for (int i = 0; i < segmentCount; i++) {
bySegmentResults.add(singleSegmentResult);
}
QueryToolChest toolChest = factory.getToolchest();
List<QueryRunner<ResultRow>> singleSegmentRunners = new ArrayList<>();
for (int i = 0; i < segmentCount; i++) {
singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner));
}
ExecutorService exec = Executors.newCachedThreadPool();
QueryRunner theRunner = toolChest.postMergeQueryDecoration(new FinalizeResultsQueryRunner<>(toolChest.mergeResults(factory.mergeRunners(Executors.newCachedThreadPool(), singleSegmentRunners)), toolChest));
TestHelper.assertExpectedObjects(bySegmentResults, theRunner.run(QueryPlus.wrap(fullQuery)), "bySegment-filter");
exec.shutdownNow();
}
use of org.apache.druid.query.aggregation.LongSumAggregatorFactory in project druid by druid-io.
the class GroupByQueryRunnerTest method testGroupByWithTimeZone.
@Test
public void testGroupByWithTimeZone() {
DateTimeZone tz = DateTimes.inferTzFromString("America/Los_Angeles");
GroupByQuery query = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-03-31T00:00:00-07:00/2011-04-02T00:00:00-07:00").setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setGranularity(new PeriodGranularity(new Period("P1D"), null, tz)).build();
List<ResultRow> expectedResults = Arrays.asList(makeRow(query, new DateTime("2011-03-31", tz), "alias", "automotive", "rows", 1L, "idx", 135L), makeRow(query, new DateTime("2011-03-31", tz), "alias", "business", "rows", 1L, "idx", 118L), makeRow(query, new DateTime("2011-03-31", tz), "alias", "entertainment", "rows", 1L, "idx", 158L), makeRow(query, new DateTime("2011-03-31", tz), "alias", "health", "rows", 1L, "idx", 120L), makeRow(query, new DateTime("2011-03-31", tz), "alias", "mezzanine", "rows", 3L, "idx", 2870L), makeRow(query, new DateTime("2011-03-31", tz), "alias", "news", "rows", 1L, "idx", 121L), makeRow(query, new DateTime("2011-03-31", tz), "alias", "premium", "rows", 3L, "idx", 2900L), makeRow(query, new DateTime("2011-03-31", tz), "alias", "technology", "rows", 1L, "idx", 78L), makeRow(query, new DateTime("2011-03-31", tz), "alias", "travel", "rows", 1L, "idx", 119L), makeRow(query, new DateTime("2011-04-01", tz), "alias", "automotive", "rows", 1L, "idx", 147L), makeRow(query, new DateTime("2011-04-01", tz), "alias", "business", "rows", 1L, "idx", 112L), makeRow(query, new DateTime("2011-04-01", tz), "alias", "entertainment", "rows", 1L, "idx", 166L), makeRow(query, new DateTime("2011-04-01", tz), "alias", "health", "rows", 1L, "idx", 113L), makeRow(query, new DateTime("2011-04-01", tz), "alias", "mezzanine", "rows", 3L, "idx", 2447L), makeRow(query, new DateTime("2011-04-01", tz), "alias", "news", "rows", 1L, "idx", 114L), makeRow(query, new DateTime("2011-04-01", tz), "alias", "premium", "rows", 3L, "idx", 2505L), makeRow(query, new DateTime("2011-04-01", tz), "alias", "technology", "rows", 1L, "idx", 97L), makeRow(query, new DateTime("2011-04-01", tz), "alias", "travel", "rows", 1L, "idx", 126L));
Iterable<ResultRow> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
TestHelper.assertExpectedObjects(expectedResults, results, "timezone");
}
Aggregations