use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.
the class GroupByQueryRunnerTest method testGroupByWithOrderLimit2.
@Test
public void testGroupByWithOrderLimit2() {
GroupByQuery.Builder builder = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).addOrderByColumn("rows", OrderByColumnSpec.Direction.DESCENDING).addOrderByColumn("alias", OrderByColumnSpec.Direction.DESCENDING).setGranularity(new PeriodGranularity(new Period("P1M"), null, null));
final GroupByQuery query = builder.build();
List<ResultRow> expectedResults = Arrays.asList(makeRow(query, "2011-04-01", "alias", "premium", "rows", 6L, "idx", 4416L), makeRow(query, "2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L), makeRow(query, "2011-04-01", "alias", "travel", "rows", 2L, "idx", 243L), makeRow(query, "2011-04-01", "alias", "technology", "rows", 2L, "idx", 177L), makeRow(query, "2011-04-01", "alias", "news", "rows", 2L, "idx", 221L), makeRow(query, "2011-04-01", "alias", "health", "rows", 2L, "idx", 216L), makeRow(query, "2011-04-01", "alias", "entertainment", "rows", 2L, "idx", 319L), makeRow(query, "2011-04-01", "alias", "business", "rows", 2L, "idx", 217L), makeRow(query, "2011-04-01", "alias", "automotive", "rows", 2L, "idx", 269L));
QueryRunner<ResultRow> mergeRunner = factory.getToolchest().mergeResults(runner);
TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(QueryPlus.wrap(query)), "no-limit");
TestHelper.assertExpectedObjects(Iterables.limit(expectedResults, 5), mergeRunner.run(QueryPlus.wrap(builder.setLimit(5).build())), "limited");
}
use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.
the class GroupByQueryRunnerTest method testGroupByOrderLimitNumeric.
@Test
public void testGroupByOrderLimitNumeric() {
GroupByQuery.Builder builder = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).addOrderByColumn(new OrderByColumnSpec("rows", OrderByColumnSpec.Direction.DESCENDING, StringComparators.NUMERIC)).addOrderByColumn(new OrderByColumnSpec("alias", OrderByColumnSpec.Direction.ASCENDING, StringComparators.NUMERIC)).setGranularity(new PeriodGranularity(new Period("P1M"), null, null));
final GroupByQuery query = builder.build();
List<ResultRow> expectedResults = Arrays.asList(makeRow(query, "2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L), makeRow(query, "2011-04-01", "alias", "premium", "rows", 6L, "idx", 4416L), makeRow(query, "2011-04-01", "alias", "automotive", "rows", 2L, "idx", 269L), makeRow(query, "2011-04-01", "alias", "business", "rows", 2L, "idx", 217L), makeRow(query, "2011-04-01", "alias", "entertainment", "rows", 2L, "idx", 319L), makeRow(query, "2011-04-01", "alias", "health", "rows", 2L, "idx", 216L), makeRow(query, "2011-04-01", "alias", "news", "rows", 2L, "idx", 221L), makeRow(query, "2011-04-01", "alias", "technology", "rows", 2L, "idx", 177L), makeRow(query, "2011-04-01", "alias", "travel", "rows", 2L, "idx", 243L));
QueryRunner<ResultRow> mergeRunner = factory.getToolchest().mergeResults(runner);
TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(QueryPlus.wrap(query)), "no-limit");
TestHelper.assertExpectedObjects(Iterables.limit(expectedResults, 5), mergeRunner.run(QueryPlus.wrap(builder.setLimit(5).build())), "limited");
}
use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.
the class GroupByQueryRunnerTest method testGroupByOrderLimit.
@Test
public void testGroupByOrderLimit() {
GroupByQuery.Builder builder = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).addOrderByColumn("rows").addOrderByColumn("alias", OrderByColumnSpec.Direction.DESCENDING).setGranularity(new PeriodGranularity(new Period("P1M"), null, null));
final GroupByQuery query = builder.build();
List<ResultRow> expectedResults = Arrays.asList(makeRow(query, "2011-04-01", "alias", "travel", "rows", 2L, "idx", 243L), makeRow(query, "2011-04-01", "alias", "technology", "rows", 2L, "idx", 177L), makeRow(query, "2011-04-01", "alias", "news", "rows", 2L, "idx", 221L), makeRow(query, "2011-04-01", "alias", "health", "rows", 2L, "idx", 216L), makeRow(query, "2011-04-01", "alias", "entertainment", "rows", 2L, "idx", 319L), makeRow(query, "2011-04-01", "alias", "business", "rows", 2L, "idx", 217L), makeRow(query, "2011-04-01", "alias", "automotive", "rows", 2L, "idx", 269L), makeRow(query, "2011-04-01", "alias", "premium", "rows", 6L, "idx", 4416L), makeRow(query, "2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L));
QueryRunner<ResultRow> mergeRunner = factory.getToolchest().mergeResults(runner);
TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(QueryPlus.wrap(query)), "no-limit");
TestHelper.assertExpectedObjects(Iterables.limit(expectedResults, 5), mergeRunner.run(QueryPlus.wrap(builder.setLimit(5).build())), "limited");
// Now try it with an expression based aggregator.
List<AggregatorFactory> aggregatorSpecs = Arrays.asList(QueryRunnerTestHelper.ROWS_COUNT, new DoubleSumAggregatorFactory("idx", null, "index / 2 + indexMin", TestExprMacroTable.INSTANCE));
builder.setLimit(Integer.MAX_VALUE).setAggregatorSpecs(aggregatorSpecs);
expectedResults = makeRows(builder.build(), new String[] { "__time", "alias", "rows", "idx" }, new Object[] { "2011-04-01", "travel", 2L, 365.4876403808594D }, new Object[] { "2011-04-01", "technology", 2L, 267.3737487792969D }, new Object[] { "2011-04-01", "news", 2L, 333.3147277832031D }, new Object[] { "2011-04-01", "health", 2L, 325.467529296875D }, new Object[] { "2011-04-01", "entertainment", 2L, 479.916015625D }, new Object[] { "2011-04-01", "business", 2L, 328.083740234375D }, new Object[] { "2011-04-01", "automotive", 2L, 405.5966796875D }, new Object[] { "2011-04-01", "premium", 6L, 6627.927734375D }, new Object[] { "2011-04-01", "mezzanine", 6L, 6635.47998046875D });
TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(QueryPlus.wrap(builder.build())), "no-limit");
TestHelper.assertExpectedObjects(Iterables.limit(expectedResults, 5), mergeRunner.run(QueryPlus.wrap(builder.setLimit(5).build())), "limited");
// Now try it with an expression virtual column.
ExpressionVirtualColumn expressionVirtualColumn = new ExpressionVirtualColumn("expr", "index / 2 + indexMin", ColumnType.FLOAT, TestExprMacroTable.INSTANCE);
List<AggregatorFactory> aggregatorSpecs2 = Arrays.asList(QueryRunnerTestHelper.ROWS_COUNT, new DoubleSumAggregatorFactory("idx", "expr"));
builder.setLimit(Integer.MAX_VALUE).setVirtualColumns(expressionVirtualColumn).setAggregatorSpecs(aggregatorSpecs2);
TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(QueryPlus.wrap(builder.build())), "no-limit");
TestHelper.assertExpectedObjects(Iterables.limit(expectedResults, 5), mergeRunner.run(QueryPlus.wrap(builder.setLimit(5).build())), "limited");
}
use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.
the class VarianceGroupByQueryTest method testPostAggHavingSpec.
@Test
public void testPostAggHavingSpec() {
VarianceTestHelper.RowBuilder expect = new VarianceTestHelper.RowBuilder(new String[] { "alias", "rows", "index", "index_var", "index_stddev" });
GroupByQuery query = queryBuilder.setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, QueryRunnerTestHelper.INDEX_LONG_SUM, VarianceTestHelper.INDEX_VARIANCE_AGGR).setPostAggregatorSpecs(ImmutableList.of(VarianceTestHelper.STD_DEV_OF_INDEX_POST_AGGR)).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setHavingSpec(new OrHavingSpec(ImmutableList.of(// 3 rows
new GreaterThanHavingSpec(VarianceTestHelper.STD_DEV_OF_INDEX_METRIC, 15L)))).build();
List<ResultRow> expectedResults = expect.add("2011-04-01", "automotive", 2L, 269L, 299.0009819048282, 17.29164485827847).add("2011-04-01", "mezzanine", 6L, 4420L, 254083.76447001836, 504.06722217380724).add("2011-04-01", "premium", 6L, 4416L, 252279.2020389339, 502.27403082275106).build(query);
Iterable<ResultRow> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
TestHelper.assertExpectedObjects(expectedResults, results, "havingSpec");
query = query.withLimitSpec(new DefaultLimitSpec(Collections.singletonList(OrderByColumnSpec.asc(VarianceTestHelper.STD_DEV_OF_INDEX_METRIC)), 2));
expectedResults = expect.add("2011-04-01", "automotive", 2L, 269L, 299.0009819048282, 17.29164485827847).add("2011-04-01", "premium", 6L, 4416L, 252279.2020389339, 502.27403082275106).build(query);
results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
TestHelper.assertExpectedObjects(expectedResults, results, "limitSpec");
}
use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeseriesQueryGranularityNotAlignedWithRollupGranularity.
@Test
public void testTimeseriesQueryGranularityNotAlignedWithRollupGranularity() {
TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).filters(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "upfront", "total_market").granularity(new PeriodGranularity(new Period("PT1H"), DateTimes.utc(60000), DateTimeZone.UTC)).intervals(Collections.singletonList(Intervals.of("2011-04-15T00:00:00.000Z/2012"))).aggregators(Arrays.asList(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"))).descending(descending).context(makeContext()).build();
List<Result<TimeseriesResultValue>> expectedResults1 = Collections.singletonList(new Result<>(DateTimes.of("2011-04-14T23:01Z"), new TimeseriesResultValue(ImmutableMap.of("rows", 13L, "idx", 4717L))));
Iterable<Result<TimeseriesResultValue>> results1 = runner.run(QueryPlus.wrap(query1)).toList();
assertExpectedResults(expectedResults1, results1);
}
Aggregations