use of org.apache.druid.query.filter.SelectorDimFilter in project druid by druid-io.
the class UserCompactionTaskTransformConfigTest method testSerde.
@Test
public void testSerde() throws IOException {
NullHandling.initializeForTests();
final UserCompactionTaskTransformConfig expected = new UserCompactionTaskTransformConfig(new SelectorDimFilter("dim1", "foo", null));
final ObjectMapper mapper = new DefaultObjectMapper();
final byte[] json = mapper.writeValueAsBytes(expected);
final UserCompactionTaskTransformConfig fromJson = (UserCompactionTaskTransformConfig) mapper.readValue(json, UserCompactionTaskTransformConfig.class);
Assert.assertEquals(expected, fromJson);
}
use of org.apache.druid.query.filter.SelectorDimFilter in project druid by druid-io.
the class SearchQueryRunnerTest method testSearchMultiAndFilter.
@Test
public void testSearchMultiAndFilter() {
List<SearchHit> expectedHits = new ArrayList<>();
expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "automotive", 93));
DimFilter filter = new AndDimFilter(new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null), new SelectorDimFilter(QueryRunnerTestHelper.QUALITY_DIMENSION, "automotive", null));
checkSearchQuery(Druids.newSearchQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).filters(filter).dimensions(QueryRunnerTestHelper.QUALITY_DIMENSION).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).query("a").build(), expectedHits);
}
use of org.apache.druid.query.filter.SelectorDimFilter in project druid by druid-io.
the class DimFilterHavingSpecTest method testSerde.
@Test
public void testSerde() throws Exception {
final DimFilterHavingSpec havingSpec = new DimFilterHavingSpec(new SelectorDimFilter("foo", "1", null), false);
final ObjectMapper objectMapper = new DefaultObjectMapper();
Assert.assertEquals(havingSpec, objectMapper.readValue(objectMapper.writeValueAsBytes(havingSpec), HavingSpec.class));
}
use of org.apache.druid.query.filter.SelectorDimFilter in project druid by druid-io.
the class DimFilterHavingSpecTest method testRowSignature.
@Test
public void testRowSignature() {
final DimFilterHavingSpec havingSpec = new DimFilterHavingSpec(new SelectorDimFilter("foo", "1", null), null);
havingSpec.setQuery(GroupByQuery.builder().setDataSource("dummy").setInterval("1000/3000").setGranularity(Granularities.ALL).setDimensions(new DefaultDimensionSpec("foo", "foo", ColumnType.LONG)).build());
Assert.assertTrue(havingSpec.eval(ResultRow.of(1L)));
Assert.assertFalse(havingSpec.eval(ResultRow.of(2L)));
}
use of org.apache.druid.query.filter.SelectorDimFilter in project druid by druid-io.
the class ScanQueryRunnerTest method testFullOnSelectWithFilterLimitAndDescendingTimeOrderingListFormat.
@Test
public void testFullOnSelectWithFilterLimitAndDescendingTimeOrderingListFormat() {
// limits shouldn't matter -> all rows should be returned if time-ordering on the broker is occurring
for (int limit : new int[] { 3, 1, 5, 7, 0 }) {
ScanQuery query = newTestQuery().intervals(I_0112_0114).filters(new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null)).columns(QueryRunnerTestHelper.TIME_DIMENSION, QueryRunnerTestHelper.QUALITY_DIMENSION, QueryRunnerTestHelper.INDEX_METRIC).limit(limit).order(ScanQuery.Order.DESCENDING).build();
Iterable<ScanResultValue> results = runner.run(QueryPlus.wrap(query)).toList();
String[] seg1Results = new String[] { "2011-01-12T00:00:00.000Z\tspot\tautomotive\tpreferred\tapreferred\t100.000000", "2011-01-12T00:00:00.000Z\tspot\tbusiness\tpreferred\tbpreferred\t100.000000", "2011-01-12T00:00:00.000Z\tspot\tentertainment\tpreferred\tepreferred\t100.000000", "2011-01-12T00:00:00.000Z\tspot\thealth\tpreferred\thpreferred\t100.000000", "2011-01-12T00:00:00.000Z\tspot\tmezzanine\tpreferred\tmpreferred\t100.000000", "2011-01-12T00:00:00.000Z\tspot\tnews\tpreferred\tnpreferred\t100.000000", "2011-01-12T00:00:00.000Z\tspot\tpremium\tpreferred\tppreferred\t100.000000", "2011-01-12T00:00:00.000Z\tspot\ttechnology\tpreferred\ttpreferred\t100.000000", "2011-01-12T00:00:00.000Z\tspot\ttravel\tpreferred\ttpreferred\t100.000000" };
String[] seg2Results = new String[] { "2011-01-13T00:00:00.000Z\tspot\tautomotive\tpreferred\tapreferred\t94.874713", "2011-01-13T00:00:00.000Z\tspot\tbusiness\tpreferred\tbpreferred\t103.629399", "2011-01-13T00:00:00.000Z\tspot\tentertainment\tpreferred\tepreferred\t110.087299", "2011-01-13T00:00:00.000Z\tspot\thealth\tpreferred\thpreferred\t114.947403", "2011-01-13T00:00:00.000Z\tspot\tmezzanine\tpreferred\tmpreferred\t104.465767", "2011-01-13T00:00:00.000Z\tspot\tnews\tpreferred\tnpreferred\t102.851683", "2011-01-13T00:00:00.000Z\tspot\tpremium\tpreferred\tppreferred\t108.863011", "2011-01-13T00:00:00.000Z\tspot\ttechnology\tpreferred\ttpreferred\t111.356672", "2011-01-13T00:00:00.000Z\tspot\ttravel\tpreferred\ttpreferred\t106.236928" };
String[] expectedRet = (String[]) ArrayUtils.addAll(seg1Results, seg2Results);
ArrayUtils.reverse(expectedRet);
final List<List<Map<String, Object>>> descendingEvents = toEvents(new String[] { legacy ? getTimestampName() + ":TIME" : ColumnHolder.TIME_COLUMN_NAME, null, QueryRunnerTestHelper.QUALITY_DIMENSION + ":STRING", null, null, QueryRunnerTestHelper.INDEX_METRIC + ":DOUBLE" }, expectedRet);
if (legacy) {
for (List<Map<String, Object>> batch : descendingEvents) {
for (Map<String, Object> event : batch) {
event.put("__time", ((DateTime) event.get("timestamp")).getMillis());
}
}
} else {
for (List<Map<String, Object>> batch : descendingEvents) {
for (Map<String, Object> event : batch) {
event.put("__time", (DateTimes.of((String) event.get("__time"))).getMillis());
}
}
}
List<ScanResultValue> descendingExpectedResults = toExpected(descendingEvents, legacy ? Lists.newArrayList(QueryRunnerTestHelper.TIME_DIMENSION, getTimestampName(), // getTimestampName() always returns the legacy timestamp when legacy is true
"quality", "index") : Lists.newArrayList(QueryRunnerTestHelper.TIME_DIMENSION, "quality", "index"), 0, limit);
verify(descendingExpectedResults, results);
}
}
Aggregations