use of io.druid.query.dimension.ExtractionDimensionSpec in project druid by druid-io.
the class TopNQueryRunnerTest method testTopNDimExtractionToOne.
@Test
public void testTopNDimExtractionToOne() throws IOException {
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).dimension(new ExtractionDimensionSpec(QueryRunnerTestHelper.marketDimension, QueryRunnerTestHelper.marketDimension, new JavaScriptExtractionFn("function(f) { return \"POTATO\"; }", false, JavaScriptConfig.getEnabledInstance()))).metric("rows").threshold(10).intervals(QueryRunnerTestHelper.fullOnInterval).aggregators(QueryRunnerTestHelper.commonAggregators).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).build();
Granularity gran = Granularities.DAY;
TimeseriesQuery tsQuery = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(gran).intervals(QueryRunnerTestHelper.fullOnInterval).aggregators(Arrays.asList(QueryRunnerTestHelper.rowsCount, QueryRunnerTestHelper.indexDoubleSum, QueryRunnerTestHelper.qualityUniques)).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).build();
List<Result<TopNResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2011-01-12T00:00:00.000Z"), new TopNResultValue(Arrays.<Map<String, Object>>asList(ImmutableMap.<String, Object>of("addRowsIndexConstant", 504542.5071372986D, "index", 503332.5071372986D, QueryRunnerTestHelper.marketDimension, "POTATO", "uniques", QueryRunnerTestHelper.UNIQUES_9, "rows", 1209L)))));
List<Result<TopNResultValue>> list = Sequences.toList(runWithMerge(query), new ArrayList<Result<TopNResultValue>>());
Assert.assertEquals(list.size(), 1);
Assert.assertEquals("Didn't merge results", list.get(0).getValue().getValue().size(), 1);
TestHelper.assertExpectedResults(expectedResults, list, "Failed to match");
}
use of io.druid.query.dimension.ExtractionDimensionSpec in project druid by druid-io.
the class TopNQueryRunnerTest method testInvertedTopNLexicographicDimExtraction2.
@Test
public void testInvertedTopNLexicographicDimExtraction2() {
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).dimension(new ExtractionDimensionSpec(QueryRunnerTestHelper.marketDimension, QueryRunnerTestHelper.marketDimension, new RegexDimExtractionFn("..(.)", false, null))).metric(new InvertedTopNMetricSpec(new DimensionTopNMetricSpec(null, StringComparators.LEXICOGRAPHIC))).threshold(4).intervals(QueryRunnerTestHelper.firstToThird).aggregators(QueryRunnerTestHelper.commonAggregators).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).build();
List<Result<TopNResultValue>> expectedResults = Arrays.asList(new Result<TopNResultValue>(new DateTime("2011-04-01T00:00:00.000Z"), new TopNResultValue(Arrays.<Map<String, Object>>asList(ImmutableMap.<String, Object>of(QueryRunnerTestHelper.marketDimension, "t", "rows", 4L, "index", 5351.814697265625D, "addRowsIndexConstant", 5356.814697265625D, "uniques", QueryRunnerTestHelper.UNIQUES_2), ImmutableMap.<String, Object>of(QueryRunnerTestHelper.marketDimension, "o", "rows", 18L, "index", 2231.8768157958984D, "addRowsIndexConstant", 2250.8768157958984D, "uniques", QueryRunnerTestHelper.UNIQUES_9), ImmutableMap.<String, Object>of(QueryRunnerTestHelper.marketDimension, "f", "rows", 4L, "index", 4875.669677734375D, "addRowsIndexConstant", 4880.669677734375D, "uniques", QueryRunnerTestHelper.UNIQUES_2)))));
assertExpectedResults(expectedResults, query);
}
use of io.druid.query.dimension.ExtractionDimensionSpec in project druid by druid-io.
the class TopNQueryRunnerTest method testFullOnTopNDimExtractionAllNulls.
@Test
public void testFullOnTopNDimExtractionAllNulls() {
String jsFn = "function(str) { return null; }";
ExtractionFn jsExtractionFn = new JavaScriptExtractionFn(jsFn, false, JavaScriptConfig.getEnabledInstance());
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).dimension(new ExtractionDimensionSpec(QueryRunnerTestHelper.marketDimension, QueryRunnerTestHelper.marketDimension, jsExtractionFn)).metric(QueryRunnerTestHelper.indexMetric).threshold(4).intervals(QueryRunnerTestHelper.fullOnInterval).aggregators(Lists.<AggregatorFactory>newArrayList(Iterables.concat(QueryRunnerTestHelper.commonAggregators, Lists.newArrayList(new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index"))))).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).build();
Map<String, Object> expectedMap = new HashMap<>();
expectedMap.put(QueryRunnerTestHelper.marketDimension, null);
expectedMap.put("rows", 1209L);
expectedMap.put("index", 503332.5071372986D);
expectedMap.put("addRowsIndexConstant", 504542.5071372986D);
expectedMap.put("uniques", 9.019833517963864);
expectedMap.put("maxIndex", 1870.06103515625D);
expectedMap.put("minIndex", 59.02102279663086D);
List<Result<TopNResultValue>> expectedResults = Arrays.asList(new Result<TopNResultValue>(new DateTime("2011-01-12T00:00:00.000Z"), new TopNResultValue(Arrays.<Map<String, Object>>asList(expectedMap))));
assertExpectedResults(expectedResults, query);
}
use of io.druid.query.dimension.ExtractionDimensionSpec in project druid by druid-io.
the class TopNQueryRunnerTest method testFullOnTopNStringOutputAsLong.
@Test
public void testFullOnTopNStringOutputAsLong() {
ExtractionFn strlenFn = StrlenExtractionFn.instance();
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).dimension(new ExtractionDimensionSpec(QueryRunnerTestHelper.qualityDimension, "alias", ValueType.LONG, strlenFn)).metric("maxIndex").threshold(4).intervals(QueryRunnerTestHelper.fullOnInterval).aggregators(Lists.<AggregatorFactory>newArrayList(Iterables.concat(QueryRunnerTestHelper.commonAggregators, Lists.newArrayList(new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index"))))).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).build();
List<Result<TopNResultValue>> expectedResults = Arrays.asList(new Result<TopNResultValue>(new DateTime("2011-01-12T00:00:00.000Z"), new TopNResultValue(Arrays.<Map<String, Object>>asList(ImmutableMap.<String, Object>builder().put("alias", 9L).put(QueryRunnerTestHelper.indexMetric, 217725.42022705078D).put("rows", 279L).put("addRowsIndexConstant", 218005.42022705078D).put("uniques", QueryRunnerTestHelper.UNIQUES_1).put("maxIndex", 1870.06103515625D).put("minIndex", 91.27055358886719D).build(), ImmutableMap.<String, Object>builder().put("alias", 7L).put(QueryRunnerTestHelper.indexMetric, 210865.67966461182D).put("rows", 279L).put("addRowsIndexConstant", 211145.67966461182D).put("uniques", QueryRunnerTestHelper.UNIQUES_1).put("maxIndex", 1862.7379150390625D).put("minIndex", 99.2845230102539D).build(), ImmutableMap.<String, Object>builder().put("alias", 10L).put(QueryRunnerTestHelper.indexMetric, 20479.497562408447D).put("rows", 186L).put("addRowsIndexConstant", 20666.497562408447D).put("uniques", QueryRunnerTestHelper.UNIQUES_2).put("maxIndex", 277.2735290527344D).put("minIndex", 59.02102279663086D).build(), ImmutableMap.<String, Object>builder().put("alias", 13L).put(QueryRunnerTestHelper.indexMetric, 12086.472755432129D).put("rows", 93L).put("addRowsIndexConstant", 12180.472755432129D).put("uniques", QueryRunnerTestHelper.UNIQUES_1).put("maxIndex", 193.78756713867188D).put("minIndex", 84.71052551269531D).build()))));
assertExpectedResults(expectedResults, query);
}
use of io.druid.query.dimension.ExtractionDimensionSpec in project druid by druid-io.
the class TopNQueryRunnerTest method testTopNLexicographicDimExtraction.
@Test
public void testTopNLexicographicDimExtraction() {
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).dimension(new ExtractionDimensionSpec(QueryRunnerTestHelper.marketDimension, QueryRunnerTestHelper.marketDimension, new RegexDimExtractionFn("(.)", false, null))).metric(new DimensionTopNMetricSpec(null, StringComparators.LEXICOGRAPHIC)).threshold(4).intervals(QueryRunnerTestHelper.firstToThird).aggregators(QueryRunnerTestHelper.commonAggregators).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).build();
List<Result<TopNResultValue>> expectedResults = Arrays.asList(new Result<TopNResultValue>(new DateTime("2011-04-01T00:00:00.000Z"), new TopNResultValue(Arrays.<Map<String, Object>>asList(ImmutableMap.<String, Object>of(QueryRunnerTestHelper.marketDimension, "s", "rows", 18L, "index", 2231.8768157958984D, "addRowsIndexConstant", 2250.8768157958984D, "uniques", QueryRunnerTestHelper.UNIQUES_9), ImmutableMap.<String, Object>of(QueryRunnerTestHelper.marketDimension, "t", "rows", 4L, "index", 5351.814697265625D, "addRowsIndexConstant", 5356.814697265625D, "uniques", QueryRunnerTestHelper.UNIQUES_2), ImmutableMap.<String, Object>of(QueryRunnerTestHelper.marketDimension, "u", "rows", 4L, "index", 4875.669677734375D, "addRowsIndexConstant", 4880.669677734375D, "uniques", QueryRunnerTestHelper.UNIQUES_2)))));
assertExpectedResults(expectedResults, query);
}
Aggregations