use of io.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.
the class SearchQueryTest method testEquals.
@Test
public void testEquals() {
Query query1 = Druids.newSearchQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).intervals(QueryRunnerTestHelper.fullOnInterval).dimensions(new DefaultDimensionSpec(QueryRunnerTestHelper.qualityDimension, QueryRunnerTestHelper.qualityDimension)).query("a").build();
Query query2 = Druids.newSearchQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).intervals(QueryRunnerTestHelper.fullOnInterval).dimensions(new DefaultDimensionSpec(QueryRunnerTestHelper.qualityDimension, QueryRunnerTestHelper.qualityDimension)).query("a").build();
Assert.assertEquals(query1, query2);
}
use of io.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.
the class DefaultLimitSpecTest method testBuildWithExplicitOrder.
@Test
public void testBuildWithExplicitOrder() {
DefaultLimitSpec limitSpec = new DefaultLimitSpec(ImmutableList.of(new OrderByColumnSpec("k1", OrderByColumnSpec.Direction.ASCENDING)), 2);
Function<Sequence<Row>, Sequence<Row>> limitFn = limitSpec.build(ImmutableList.<DimensionSpec>of(new DefaultDimensionSpec("k1", "k1")), ImmutableList.<AggregatorFactory>of(new LongSumAggregatorFactory("k2", "k2")), ImmutableList.<PostAggregator>of(new ConstantPostAggregator("k3", 1L)));
Assert.assertEquals(ImmutableList.of(testRowsList.get(0), testRowsList.get(1)), Sequences.toList(limitFn.apply(testRowsSequence), new ArrayList<Row>()));
// if there is an aggregator with same name then that is used to build ordering
limitFn = limitSpec.build(ImmutableList.<DimensionSpec>of(new DefaultDimensionSpec("k1", "k1")), ImmutableList.<AggregatorFactory>of(new LongSumAggregatorFactory("k1", "k1")), ImmutableList.<PostAggregator>of(new ConstantPostAggregator("k3", 1L)));
Assert.assertEquals(ImmutableList.of(testRowsList.get(2), testRowsList.get(0)), Sequences.toList(limitFn.apply(testRowsSequence), new ArrayList<Row>()));
// if there is a post-aggregator with same name then that is used to build ordering
limitFn = limitSpec.build(ImmutableList.<DimensionSpec>of(new DefaultDimensionSpec("k1", "k1")), ImmutableList.<AggregatorFactory>of(new LongSumAggregatorFactory("k2", "k2")), ImmutableList.<PostAggregator>of(new ArithmeticPostAggregator("k1", "+", ImmutableList.<PostAggregator>of(new ConstantPostAggregator("x", 1), new ConstantPostAggregator("y", 1)))));
Assert.assertEquals((List) ImmutableList.of(testRowsList.get(2), testRowsList.get(0)), (List) Sequences.toList(limitFn.apply(testRowsSequence), new ArrayList<Row>()));
// makes same result
limitFn = limitSpec.build(ImmutableList.<DimensionSpec>of(new DefaultDimensionSpec("k1", "k1")), ImmutableList.<AggregatorFactory>of(new LongSumAggregatorFactory("k2", "k2")), ImmutableList.<PostAggregator>of(new ExpressionPostAggregator("k1", "1 + 1")));
Assert.assertEquals((List) ImmutableList.of(testRowsList.get(2), testRowsList.get(0)), (List) Sequences.toList(limitFn.apply(testRowsSequence), new ArrayList<Row>()));
}
use of io.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.
the class SelectQueryRunnerTest method testFullOnSelectWithDimensionSpec.
@Test
public void testFullOnSelectWithDimensionSpec() {
Map<String, String> map = new HashMap<>();
map.put("automotive", "automotive0");
map.put("business", "business0");
map.put("entertainment", "entertainment0");
map.put("health", "health0");
map.put("mezzanine", "mezzanine0");
map.put("news", "news0");
map.put("premium", "premium0");
map.put("technology", "technology0");
map.put("travel", "travel0");
SelectQuery query = newTestQuery().dimensionSpecs(Arrays.<DimensionSpec>asList(new DefaultDimensionSpec(QueryRunnerTestHelper.marketDimension, "mar"), new ExtractionDimensionSpec(QueryRunnerTestHelper.qualityDimension, "qual", new LookupExtractionFn(new MapLookupExtractor(map, true), false, null, true, false)), new DefaultDimensionSpec(QueryRunnerTestHelper.placementDimension, "place"))).build();
HashMap<String, Object> context = new HashMap<String, Object>();
Iterable<Result<SelectResultValue>> results = Sequences.toList(runner.run(query, context), Lists.<Result<SelectResultValue>>newArrayList());
List<Result<SelectResultValue>> expectedResultsAsc = Arrays.asList(new Result<SelectResultValue>(new DateTime("2011-01-12T00:00:00.000Z"), new SelectResultValue(ImmutableMap.of(QueryRunnerTestHelper.segmentId, 2), Sets.newHashSet("mar", "qual", "place"), Sets.newHashSet("index", "quality_uniques", "indexMin", "indexMaxPlusTen"), Arrays.asList(new EventHolder(QueryRunnerTestHelper.segmentId, 0, new ImmutableMap.Builder<String, Object>().put(EventHolder.timestampKey, new DateTime("2011-01-12T00:00:00.000Z")).put("mar", "spot").put("qual", "automotive0").put("place", "preferred").put(QueryRunnerTestHelper.indexMetric, 100.000000F).build()), new EventHolder(QueryRunnerTestHelper.segmentId, 1, new ImmutableMap.Builder<String, Object>().put(EventHolder.timestampKey, new DateTime("2011-01-12T00:00:00.000Z")).put("mar", "spot").put("qual", "business0").put("place", "preferred").put(QueryRunnerTestHelper.indexMetric, 100.000000F).build()), new EventHolder(QueryRunnerTestHelper.segmentId, 2, new ImmutableMap.Builder<String, Object>().put(EventHolder.timestampKey, new DateTime("2011-01-12T00:00:00.000Z")).put("mar", "spot").put("qual", "entertainment0").put("place", "preferred").put(QueryRunnerTestHelper.indexMetric, 100.000000F).build())))));
List<Result<SelectResultValue>> expectedResultsDsc = Arrays.asList(new Result<SelectResultValue>(new DateTime("2011-01-12T00:00:00.000Z"), new SelectResultValue(ImmutableMap.of(QueryRunnerTestHelper.segmentId, -3), Sets.newHashSet("mar", "qual", "place"), Sets.newHashSet("index", "quality_uniques", "indexMin", "indexMaxPlusTen"), Arrays.asList(new EventHolder(QueryRunnerTestHelper.segmentId, -1, new ImmutableMap.Builder<String, Object>().put(EventHolder.timestampKey, new DateTime("2011-04-15T00:00:00.000Z")).put("mar", "upfront").put("qual", "premium0").put("place", "preferred").put(QueryRunnerTestHelper.indexMetric, 780.27197265625F).build()), new EventHolder(QueryRunnerTestHelper.segmentId, -2, new ImmutableMap.Builder<String, Object>().put(EventHolder.timestampKey, new DateTime("2011-04-15T00:00:00.000Z")).put("mar", "upfront").put("qual", "mezzanine0").put("place", "preferred").put(QueryRunnerTestHelper.indexMetric, 962.731201171875F).build()), new EventHolder(QueryRunnerTestHelper.segmentId, -3, new ImmutableMap.Builder<String, Object>().put(EventHolder.timestampKey, new DateTime("2011-04-15T00:00:00.000Z")).put("mar", "total_market").put("qual", "premium0").put("place", "preferred").put(QueryRunnerTestHelper.indexMetric, 1029.0570068359375F).build())))));
verify(descending ? expectedResultsDsc : expectedResultsAsc, results);
}
use of io.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.
the class TopNBinaryFnBenchmark method setUp.
@Override
protected void setUp() throws Exception {
final ConstantPostAggregator constant = new ConstantPostAggregator("const", 1L);
final FieldAccessPostAggregator rowsPostAgg = new FieldAccessPostAggregator("rows", "rows");
final FieldAccessPostAggregator indexPostAgg = new FieldAccessPostAggregator("index", "index");
final List<AggregatorFactory> aggregatorFactories = new ArrayList<>();
aggregatorFactories.add(new CountAggregatorFactory("rows"));
aggregatorFactories.add(new LongSumAggregatorFactory("index", "index"));
for (int i = 1; i < aggCount; i++) {
aggregatorFactories.add(new CountAggregatorFactory("rows" + i));
}
final List<PostAggregator> postAggregators = new ArrayList<>();
for (int i = 0; i < postAggCount; i++) {
postAggregators.add(new ArithmeticPostAggregator("addrowsindexconstant" + i, "+", Lists.newArrayList(constant, rowsPostAgg, indexPostAgg)));
}
final DateTime currTime = new DateTime();
List<Map<String, Object>> list = new ArrayList<>();
for (int i = 0; i < threshold; i++) {
Map<String, Object> res = new HashMap<>();
res.put("testdim", "" + i);
res.put("rows", 1L);
for (int j = 0; j < aggCount; j++) {
res.put("rows" + j, 1L);
}
res.put("index", 1L);
list.add(res);
}
result1 = new Result<>(currTime, new TopNResultValue(list));
List<Map<String, Object>> list2 = new ArrayList<>();
for (int i = 0; i < threshold; i++) {
Map<String, Object> res = new HashMap<>();
res.put("testdim", "" + i);
res.put("rows", 2L);
for (int j = 0; j < aggCount; j++) {
res.put("rows" + j, 2L);
}
res.put("index", 2L);
list2.add(res);
}
result2 = new Result<>(currTime, new TopNResultValue(list2));
fn = new TopNBinaryFn(TopNResultMerger.identity, Granularities.ALL, new DefaultDimensionSpec("testdim", null), new NumericTopNMetricSpec("index"), 100, aggregatorFactories, postAggregators);
}
use of io.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.
the class TopNQueryQueryToolChestTest method testComputeCacheKeyWithDifferentPostAgg.
@Test
public void testComputeCacheKeyWithDifferentPostAgg() throws Exception {
final TopNQuery query1 = new TopNQuery(new TableDataSource("dummy"), VirtualColumns.EMPTY, new DefaultDimensionSpec("test", "test"), new NumericTopNMetricSpec("post"), 3, new MultipleIntervalSegmentSpec(ImmutableList.of(new Interval("2015-01-01/2015-01-02"))), null, Granularities.ALL, ImmutableList.<AggregatorFactory>of(new CountAggregatorFactory("metric1")), ImmutableList.<PostAggregator>of(new ConstantPostAggregator("post", 10)), null);
final TopNQuery query2 = new TopNQuery(new TableDataSource("dummy"), VirtualColumns.EMPTY, new DefaultDimensionSpec("test", "test"), new NumericTopNMetricSpec("post"), 3, new MultipleIntervalSegmentSpec(ImmutableList.of(new Interval("2015-01-01/2015-01-02"))), null, Granularities.ALL, ImmutableList.<AggregatorFactory>of(new CountAggregatorFactory("metric1")), ImmutableList.<PostAggregator>of(new ArithmeticPostAggregator("post", "+", ImmutableList.<PostAggregator>of(new FieldAccessPostAggregator(null, "metric1"), new FieldAccessPostAggregator(null, "metric1")))), null);
final CacheStrategy<Result<TopNResultValue>, Object, TopNQuery> strategy1 = new TopNQueryQueryToolChest(null, null).getCacheStrategy(query1);
final CacheStrategy<Result<TopNResultValue>, Object, TopNQuery> strategy2 = new TopNQueryQueryToolChest(null, null).getCacheStrategy(query2);
Assert.assertFalse(Arrays.equals(strategy1.computeCacheKey(query1), strategy2.computeCacheKey(query2)));
}
Aggregations