use of org.apache.druid.query.lookup.LookupExtractionFn in project druid by druid-io.
the class DefaultGroupByQueryMetricsTest method testDefaultGroupByQueryMetricsQuery.
/**
* Tests that passed a query {@link DefaultGroupByQueryMetrics} produces events with a certain set of dimensions,
* no more, no less.
*/
@Test
public void testDefaultGroupByQueryMetricsQuery() {
CachingEmitter cachingEmitter = new CachingEmitter();
ServiceEmitter serviceEmitter = new ServiceEmitter("", "", cachingEmitter);
DefaultGroupByQueryMetrics queryMetrics = new DefaultGroupByQueryMetrics();
GroupByQuery.Builder builder = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new ExtractionDimensionSpec("quality", "alias", new LookupExtractionFn(new MapLookupExtractor(ImmutableMap.of("mezzanine", "mezzanine0"), false), false, null, true, false))).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setDimFilter(new SelectorDimFilter("quality", "mezzanine", null)).setContext(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true));
GroupByQuery query = builder.build();
queryMetrics.query(query);
queryMetrics.reportQueryTime(0).emit(serviceEmitter);
Map<String, Object> actualEvent = cachingEmitter.getLastEmittedEvent().toMap();
Assert.assertEquals(16, actualEvent.size());
Assert.assertTrue(actualEvent.containsKey("feed"));
Assert.assertTrue(actualEvent.containsKey("timestamp"));
Assert.assertEquals("", actualEvent.get("host"));
Assert.assertEquals("", actualEvent.get("service"));
Assert.assertEquals(QueryRunnerTestHelper.DATA_SOURCE, actualEvent.get(DruidMetrics.DATASOURCE));
Assert.assertEquals(query.getType(), actualEvent.get(DruidMetrics.TYPE));
Interval expectedInterval = Intervals.of("2011-04-02/2011-04-04");
Assert.assertEquals(Collections.singletonList(expectedInterval.toString()), actualEvent.get(DruidMetrics.INTERVAL));
Assert.assertEquals("true", actualEvent.get("hasFilters"));
Assert.assertEquals(expectedInterval.toDuration().toString(), actualEvent.get("duration"));
Assert.assertEquals("", actualEvent.get(DruidMetrics.ID));
Assert.assertEquals(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true), actualEvent.get("context"));
// GroupBy-specific dimensions
Assert.assertEquals("1", actualEvent.get("numDimensions"));
Assert.assertEquals("2", actualEvent.get("numMetrics"));
Assert.assertEquals("0", actualEvent.get("numComplexMetrics"));
// Metric
Assert.assertEquals("query/time", actualEvent.get("metric"));
Assert.assertEquals(0L, actualEvent.get("value"));
}
use of org.apache.druid.query.lookup.LookupExtractionFn in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeSeriesWithSelectionFilterLookupExtractionFn.
@Test
public void testTimeSeriesWithSelectionFilterLookupExtractionFn() {
Map<String, String> extractionMap = new HashMap<>();
extractionMap.put("spot", "upfront");
MapLookupExtractor mapLookupExtractor = new MapLookupExtractor(extractionMap, false);
LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, true, null, true, true);
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.DAY_GRAN).filters(new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "upfront", lookupExtractionFn)).intervals(QueryRunnerTestHelper.FIRST_TO_THIRD).aggregators(QueryRunnerTestHelper.ROWS_COUNT, QueryRunnerTestHelper.INDEX_LONG_SUM, QueryRunnerTestHelper.QUALITY_UNIQUES).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).context(makeContext()).build();
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<>(DateTimes.of("2011-04-01"), new TimeseriesResultValue(ImmutableMap.of("rows", 11L, "index", 3783L, "addRowsIndexConstant", 3795.0, "uniques", QueryRunnerTestHelper.UNIQUES_9))), new Result<>(DateTimes.of("2011-04-02"), new TimeseriesResultValue(ImmutableMap.of("rows", 11L, "index", 3313L, "addRowsIndexConstant", 3325.0, "uniques", QueryRunnerTestHelper.UNIQUES_9))));
Iterable<Result<TimeseriesResultValue>> results = runner.run(QueryPlus.wrap(query)).toList();
TestHelper.assertExpectedResults(expectedResults, results);
QueryToolChest<Result<TimeseriesResultValue>, TimeseriesQuery> toolChest = new TimeseriesQueryQueryToolChest();
QueryRunner<Result<TimeseriesResultValue>> optimizedRunner = toolChest.postMergeQueryDecoration(toolChest.mergeResults(toolChest.preMergeQueryDecoration(runner)));
Iterable<Result<TimeseriesResultValue>> results2 = new FinalizeResultsQueryRunner(optimizedRunner, toolChest).run(QueryPlus.wrap(query)).toList();
TestHelper.assertExpectedResults(expectedResults, results2);
}
use of org.apache.druid.query.lookup.LookupExtractionFn in project druid by druid-io.
the class TopNQueryRunnerTest method testTopNLexicographicDimExtractionUnOptimalNamespace.
@Test
public void testTopNLexicographicDimExtractionUnOptimalNamespace() {
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).dimension(new ExtractionDimensionSpec(QueryRunnerTestHelper.MARKET_DIMENSION, QueryRunnerTestHelper.MARKET_DIMENSION, new LookupExtractionFn(new MapLookupExtractor(ImmutableMap.of("spot", "2spot", "total_market", "3total_market", "upfront", "1upfront"), false), true, null, false, false))).metric(new DimensionTopNMetricSpec(null, StringComparators.LEXICOGRAPHIC)).threshold(4).intervals(QueryRunnerTestHelper.FIRST_TO_THIRD).aggregators(commonAggregators).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).build();
List<Result<TopNResultValue>> expectedResults = Collections.singletonList(new Result<>(DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue(Arrays.<Map<String, Object>>asList(ImmutableMap.of(QueryRunnerTestHelper.MARKET_DIMENSION, "1upfront", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, "uniques", QueryRunnerTestHelper.UNIQUES_2), ImmutableMap.of(QueryRunnerTestHelper.MARKET_DIMENSION, "2spot", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9), ImmutableMap.of(QueryRunnerTestHelper.MARKET_DIMENSION, "3total_market", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2)))));
assertExpectedResults(expectedResults, query);
}
use of org.apache.druid.query.lookup.LookupExtractionFn in project druid by druid-io.
the class TopNQueryRunnerTest method testTopNDimExtractionFastTopNUnOptimal.
@Test
public // Test query path that must rebucket the data
void testTopNDimExtractionFastTopNUnOptimal() {
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).dimension(new ExtractionDimensionSpec(QueryRunnerTestHelper.MARKET_DIMENSION, QueryRunnerTestHelper.MARKET_DIMENSION, new LookupExtractionFn(new MapLookupExtractor(ImmutableMap.of("spot", "spot0", "total_market", "total_market0", "upfront", "upfront0"), false), true, null, false, false))).metric("rows").threshold(4).intervals(QueryRunnerTestHelper.FIRST_TO_THIRD).aggregators(commonAggregators).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).build();
List<Result<TopNResultValue>> expectedResults = Collections.singletonList(new Result<>(DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue(Arrays.<Map<String, Object>>asList(ImmutableMap.of(QueryRunnerTestHelper.MARKET_DIMENSION, "spot0", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9), ImmutableMap.of(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market0", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2), ImmutableMap.of(QueryRunnerTestHelper.MARKET_DIMENSION, "upfront0", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, "uniques", QueryRunnerTestHelper.UNIQUES_2)))));
assertExpectedResults(expectedResults, query);
}
use of org.apache.druid.query.lookup.LookupExtractionFn in project druid by druid-io.
the class TopNQueryRunnerTest method testTopNWithExtractionFilterNoExistingValue.
@Test
public void testTopNWithExtractionFilterNoExistingValue() {
Map<String, String> extractionMap = new HashMap<>();
MapLookupExtractor mapLookupExtractor = new MapLookupExtractor(extractionMap, false);
LookupExtractionFn lookupExtractionFn;
if (NullHandling.replaceWithDefault()) {
lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, true);
extractionMap.put("", "NULL");
} else {
extractionMap.put("", "NOT_USED");
lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, "NULL", true, true);
}
DimFilter extractionFilter = new ExtractionDimFilter("null_column", "NULL", lookupExtractionFn, null);
TopNQueryBuilder topNQueryBuilder = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).dimension("null_column").metric(QueryRunnerTestHelper.INDEX_METRIC).threshold(4).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).aggregators(Lists.newArrayList(Iterables.concat(commonAggregators, Lists.newArrayList(new FilteredAggregatorFactory(new DoubleMaxAggregatorFactory("maxIndex", "index"), extractionFilter), new DoubleMinAggregatorFactory("minIndex", "index"))))).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT);
TopNQuery topNQueryWithNULLValueExtraction = topNQueryBuilder.filters(extractionFilter).build();
Map<String, Object> map = new HashMap<>();
map.put("null_column", null);
map.put("rows", 1209L);
map.put("index", 503332.5071372986D);
map.put("addRowsIndexConstant", 504542.5071372986D);
map.put("uniques", QueryRunnerTestHelper.UNIQUES_9);
map.put("maxIndex", 1870.061029D);
map.put("minIndex", 59.02102279663086D);
List<Result<TopNResultValue>> expectedResults = Collections.singletonList(new Result<>(DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue(Collections.singletonList(map))));
assertExpectedResults(expectedResults, topNQueryWithNULLValueExtraction);
// Assert the optimization path as well
final Sequence<Result<TopNResultValue>> retval = runWithPreMergeAndMerge(topNQueryWithNULLValueExtraction);
TestHelper.assertExpectedResults(expectedResults, retval);
}
Aggregations