use of io.druid.query.filter.SelectorDimFilter in project druid by druid-io.
the class GroupByQueryRunnerTest method testGroupByLongColumnWithExFn.
@Test
public void testGroupByLongColumnWithExFn() {
if (config.getDefaultStrategy().equals(GroupByStrategySelector.STRATEGY_V1)) {
expectedException.expect(UnsupportedOperationException.class);
expectedException.expectMessage("GroupBy v1 does not support dimension selectors with unknown cardinality.");
}
String jsFn = "function(str) { return 'super-' + str; }";
ExtractionFn jsExtractionFn = new JavaScriptExtractionFn(jsFn, false, JavaScriptConfig.getEnabledInstance());
GroupByQuery query = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource).setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird).setDimensions(Lists.<DimensionSpec>newArrayList(new ExtractionDimensionSpec("qualityLong", "ql_alias", jsExtractionFn))).setDimFilter(new SelectorDimFilter("quality", "entertainment", null)).setAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index"))).setGranularity(QueryRunnerTestHelper.dayGran).build();
List<Row> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "ql_alias", "super-1200", "rows", 1L, "idx", 158L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "ql_alias", "super-1200", "rows", 1L, "idx", 166L));
Iterable<Row> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
TestHelper.assertExpectedObjects(expectedResults, results, "");
}
use of io.druid.query.filter.SelectorDimFilter in project druid by druid-io.
the class DatasourceIngestionSpecTest method testMultiIntervalSerde.
@Test
public void testMultiIntervalSerde() throws Exception {
//defaults
String jsonStr = "{\n" + " \"dataSource\": \"test\",\n" + " \"intervals\": [\"2014/2015\", \"2016/2017\"]\n" + "}\n";
DatasourceIngestionSpec actual = MAPPER.readValue(MAPPER.writeValueAsString(MAPPER.readValue(jsonStr, DatasourceIngestionSpec.class)), DatasourceIngestionSpec.class);
List<Interval> intervals = ImmutableList.of(Interval.parse("2014/2015"), Interval.parse("2016/2017"));
DatasourceIngestionSpec expected = new DatasourceIngestionSpec("test", null, intervals, null, null, null, null, null, false);
Assert.assertEquals(expected, actual);
//non-defaults
jsonStr = "{\n" + " \"dataSource\": \"test\",\n" + " \"intervals\": [\"2014/2015\", \"2016/2017\"],\n" + " \"segments\": [{\n" + " \"dataSource\":\"test\",\n" + " \"interval\":\"2014-01-01T00:00:00.000Z/2017-01-01T00:00:00.000Z\",\n" + " \"version\":\"v0\",\n" + " \"loadSpec\":null,\n" + " \"dimensions\":\"\",\n" + " \"metrics\":\"\",\n" + " \"shardSpec\":{\"type\":\"none\"},\n" + " \"binaryVersion\":9,\n" + " \"size\":128,\n" + " \"identifier\":\"test_2014-01-01T00:00:00.000Z_2017-01-01T00:00:00.000Z_v0\"\n" + " }],\n" + " \"filter\": { \"type\": \"selector\", \"dimension\": \"dim\", \"value\": \"value\"},\n" + " \"granularity\": \"day\",\n" + " \"dimensions\": [\"d1\", \"d2\"],\n" + " \"metrics\": [\"m1\", \"m2\", \"m3\"],\n" + " \"ignoreWhenNoSegments\": true\n" + "}\n";
expected = new DatasourceIngestionSpec("test", null, intervals, ImmutableList.of(new DataSegment("test", Interval.parse("2014/2017"), "v0", null, null, null, null, 9, 128)), new SelectorDimFilter("dim", "value", null), Granularities.DAY, Lists.newArrayList("d1", "d2"), Lists.newArrayList("m1", "m2", "m3"), true);
actual = MAPPER.readValue(MAPPER.writeValueAsString(MAPPER.readValue(jsonStr, DatasourceIngestionSpec.class)), DatasourceIngestionSpec.class);
Assert.assertEquals(expected, actual);
}
use of io.druid.query.filter.SelectorDimFilter in project druid by druid-io.
the class DatasourceIngestionSpecTest method testSingleIntervalSerde.
@Test
public void testSingleIntervalSerde() throws Exception {
Interval interval = Interval.parse("2014/2015");
DatasourceIngestionSpec expected = new DatasourceIngestionSpec("test", interval, null, null, new SelectorDimFilter("dim", "value", null), Granularities.DAY, Lists.newArrayList("d1", "d2"), Lists.newArrayList("m1", "m2", "m3"), false);
DatasourceIngestionSpec actual = MAPPER.readValue(MAPPER.writeValueAsString(expected), DatasourceIngestionSpec.class);
Assert.assertEquals(ImmutableList.of(interval), actual.getIntervals());
Assert.assertEquals(expected, actual);
}
use of io.druid.query.filter.SelectorDimFilter in project druid by druid-io.
the class TopNQueryRunnerTest method testTopNOverPartialNullDimensionWithFilterOnNullValue.
@Test
public void testTopNOverPartialNullDimensionWithFilterOnNullValue() {
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(Granularities.ALL).dimension("partial_null_column").metric(QueryRunnerTestHelper.uniqueMetric).filters(new SelectorDimFilter("partial_null_column", null, null)).threshold(1000).intervals(QueryRunnerTestHelper.firstToThird).aggregators(QueryRunnerTestHelper.commonAggregators).build();
Map<String, Object> map = Maps.newHashMap();
map.put("partial_null_column", null);
map.put("rows", 22L);
map.put("index", 7583.691513061523D);
map.put("uniques", QueryRunnerTestHelper.UNIQUES_9);
List<Result<TopNResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2011-04-01T00:00:00.000Z"), new TopNResultValue(Arrays.asList(map))));
assertExpectedResults(expectedResults, query);
}
use of io.druid.query.filter.SelectorDimFilter in project druid by druid-io.
the class TopNQueryRunnerTest method testTopNOverPartialNullDimensionWithFilterOnNOTNullValue.
@Test
public void testTopNOverPartialNullDimensionWithFilterOnNOTNullValue() {
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(Granularities.ALL).dimension("partial_null_column").metric(QueryRunnerTestHelper.uniqueMetric).filters(new SelectorDimFilter("partial_null_column", "value", null)).threshold(1000).intervals(QueryRunnerTestHelper.firstToThird).aggregators(QueryRunnerTestHelper.commonAggregators).build();
List<Result<TopNResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2011-04-01T00:00:00.000Z"), new TopNResultValue(Arrays.asList(ImmutableMap.<String, Object>of("partial_null_column", "value", "rows", 4L, "index", 4875.669677734375D, "uniques", QueryRunnerTestHelper.UNIQUES_2)))));
assertExpectedResults(expectedResults, query);
}
Aggregations