use of io.druid.query.filter.SelectorDimFilter in project druid by druid-io.
the class SelectQueryRunnerTest method testFullOnSelectWithFilterOnVirtualColumn.
@Test
public void testFullOnSelectWithFilterOnVirtualColumn() {
SelectQuery query = newTestQuery().intervals("2011-01-13/2011-01-14").filters(new AndDimFilter(Arrays.asList(new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot", null), new BoundDimFilter("expr", "11.1", null, false, false, null, null, StringComparators.NUMERIC)))).granularity(QueryRunnerTestHelper.allGran).dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.qualityDimension)).metrics(Lists.<String>newArrayList(QueryRunnerTestHelper.indexMetric)).pagingSpec(new PagingSpec(null, 10, true)).virtualColumns(new ExpressionVirtualColumn("expr", "index / 10.0")).build();
HashMap<String, Object> context = new HashMap<String, Object>();
Iterable<Result<SelectResultValue>> results = Sequences.toList(runner.run(query, context), Lists.<Result<SelectResultValue>>newArrayList());
final List<List<Map<String, Object>>> events = toEvents(new String[] { EventHolder.timestampKey + ":TIME", null, QueryRunnerTestHelper.qualityDimension + ":STRING", null, null, QueryRunnerTestHelper.indexMetric + ":FLOAT" }, // filtered values with all granularity
new String[] { "2011-01-13T00:00:00.000Z\tspot\thealth\tpreferred\thpreferred\t114.947403", "2011-01-13T00:00:00.000Z\tspot\ttechnology\tpreferred\ttpreferred\t111.356672" });
PagingOffset offset = query.getPagingOffset(QueryRunnerTestHelper.segmentId);
List<Result<SelectResultValue>> expectedResults = toExpected(events, Lists.newArrayList("quality"), Lists.<String>newArrayList("index"), offset.startOffset(), offset.threshold());
verify(expectedResults, results);
}
use of io.druid.query.filter.SelectorDimFilter in project druid by druid-io.
the class SelectQueryRunnerTest method testSelectWithFilterLookupExtractionFn.
@Test
public void testSelectWithFilterLookupExtractionFn() {
Map<String, String> extractionMap = new HashMap<>();
extractionMap.put("total_market", "replaced");
MapLookupExtractor mapLookupExtractor = new MapLookupExtractor(extractionMap, false);
LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, true);
SelectQuery query = newTestQuery().intervals(I_0112_0114).filters(new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "replaced", lookupExtractionFn)).granularity(QueryRunnerTestHelper.dayGran).dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.qualityDimension)).metrics(Lists.<String>newArrayList(QueryRunnerTestHelper.indexMetric)).build();
Iterable<Result<SelectResultValue>> results = Sequences.toList(runner.run(query, Maps.newHashMap()), Lists.<Result<SelectResultValue>>newArrayList());
Iterable<Result<SelectResultValue>> resultsOptimize = Sequences.toList(toolChest.postMergeQueryDecoration(toolChest.mergeResults(toolChest.preMergeQueryDecoration(runner))).run(query, Maps.<String, Object>newHashMap()), Lists.<Result<SelectResultValue>>newArrayList());
final List<List<Map<String, Object>>> events = toEvents(new String[] { EventHolder.timestampKey + ":TIME", null, QueryRunnerTestHelper.qualityDimension + ":STRING", null, null, QueryRunnerTestHelper.indexMetric + ":FLOAT" }, // filtered values with day granularity
new String[] { "2011-01-12T00:00:00.000Z\ttotal_market\tmezzanine\tpreferred\tmpreferred\t1000.000000", "2011-01-12T00:00:00.000Z\ttotal_market\tpremium\tpreferred\tppreferred\t1000.000000" }, new String[] { "2011-01-13T00:00:00.000Z\ttotal_market\tmezzanine\tpreferred\tmpreferred\t1040.945505", "2011-01-13T00:00:00.000Z\ttotal_market\tpremium\tpreferred\tppreferred\t1689.012875" });
PagingOffset offset = query.getPagingOffset(QueryRunnerTestHelper.segmentId);
List<Result<SelectResultValue>> expectedResults = toExpected(events, Lists.newArrayList(QueryRunnerTestHelper.qualityDimension), Lists.<String>newArrayList(QueryRunnerTestHelper.indexMetric), offset.startOffset(), offset.threshold());
verify(expectedResults, results);
verify(expectedResults, resultsOptimize);
}
use of io.druid.query.filter.SelectorDimFilter in project druid by druid-io.
the class TopNQueryRunnerTest method testTopNOverNullDimensionWithFilter.
@Test
public void testTopNOverNullDimensionWithFilter() {
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).dimension("null_column").filters(new SelectorDimFilter("null_column", null, null)).metric(QueryRunnerTestHelper.indexMetric).threshold(4).intervals(QueryRunnerTestHelper.fullOnInterval).aggregators(Lists.newArrayList(Iterables.concat(QueryRunnerTestHelper.commonAggregators, Lists.newArrayList(new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index"))))).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).build();
Map<String, Object> map = Maps.newHashMap();
map.put("null_column", null);
map.put("rows", 1209L);
map.put("index", 503332.5071372986D);
map.put("addRowsIndexConstant", 504542.5071372986D);
map.put("uniques", QueryRunnerTestHelper.UNIQUES_9);
map.put("maxIndex", 1870.06103515625D);
map.put("minIndex", 59.02102279663086D);
List<Result<TopNResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2011-01-12T00:00:00.000Z"), new TopNResultValue(Arrays.asList(map))));
assertExpectedResults(expectedResults, query);
}
use of io.druid.query.filter.SelectorDimFilter in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeseriesWithInvertedFilterOnNonExistentDimension.
@Test
public void testTimeseriesWithInvertedFilterOnNonExistentDimension() {
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.dayGran).filters(new NotDimFilter(new SelectorDimFilter("bobby", "sally", null))).intervals(QueryRunnerTestHelper.firstToThird).aggregators(QueryRunnerTestHelper.commonAggregators).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).descending(descending).build();
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2011-04-01"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("rows", 13L, "index", 6626.151596069336, "addRowsIndexConstant", 6640.151596069336, "uniques", QueryRunnerTestHelper.UNIQUES_9))), new Result<>(new DateTime("2011-04-02"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("rows", 13L, "index", 5833.2095947265625, "addRowsIndexConstant", 5847.2095947265625, "uniques", QueryRunnerTestHelper.UNIQUES_9))));
Iterable<Result<TimeseriesResultValue>> results = Sequences.toList(runner.run(query, new HashMap<String, Object>()), Lists.<Result<TimeseriesResultValue>>newArrayList());
assertExpectedResults(expectedResults, results);
}
use of io.druid.query.filter.SelectorDimFilter in project druid by druid-io.
the class TimeFilteringTest method testTimeFilterWithTimeFormatExtractionFn.
@Test
public void testTimeFilterWithTimeFormatExtractionFn() {
ExtractionFn exfn = new TimeFormatExtractionFn("EEEE", DateTimeZone.forID("America/New_York"), "en", null, false);
assertFilterMatches(new SelectorDimFilter(Column.TIME_COLUMN_NAME, "Wednesday", exfn), ImmutableList.<String>of("0", "1", "2", "3", "4", "5"));
}
Aggregations