use of org.apache.druid.query.filter.SelectorDimFilter in project druid by druid-io.
the class GroupByQueryRunnerTest method testGroupByNestedDoubleTimeExtractionFnWithLongOutputTypes.
@Test
public void testGroupByNestedDoubleTimeExtractionFnWithLongOutputTypes() {
// Cannot vectorize due to extraction dimension spec.
cannotVectorize();
if (config.getDefaultStrategy().equals(GroupByStrategySelector.STRATEGY_V1)) {
expectedException.expect(UnsupportedOperationException.class);
expectedException.expectMessage("GroupBy v1 only supports dimensions with an outputType of STRING.");
}
GroupByQuery subquery = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("quality", "alias"), new ExtractionDimensionSpec(ColumnHolder.TIME_COLUMN_NAME, "time_day", ColumnType.LONG, new TimeFormatExtractionFn(null, null, null, Granularities.DAY, true))).setDimFilter(new SelectorDimFilter("quality", "technology", null)).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT).setGranularity(QueryRunnerTestHelper.DAY_GRAN).build();
GroupByQuery outerQuery = makeQueryBuilder().setDataSource(subquery).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("alias", "alias"), new ExtractionDimensionSpec("time_day", "time_week", ColumnType.LONG, new TimeFormatExtractionFn(null, null, null, Granularities.WEEK, true))).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT).setGranularity(QueryRunnerTestHelper.ALL_GRAN).build();
List<ResultRow> expectedResults = Collections.singletonList(makeRow(outerQuery, "2011-04-01", "alias", "technology", "time_week", 1301270400000L, "rows", 2L));
Iterable<ResultRow> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, outerQuery);
TestHelper.assertExpectedObjects(expectedResults, results, "extraction-fn");
}
use of org.apache.druid.query.filter.SelectorDimFilter in project druid by druid-io.
the class GroupByQueryRunnerTest method testGroupByWithHavingSpecOnLongAndFloat.
@Test
public void testGroupByWithHavingSpecOnLongAndFloat() {
if (config.getDefaultStrategy().equals(GroupByStrategySelector.STRATEGY_V1)) {
expectedException.expect(UnsupportedOperationException.class);
expectedException.expectMessage("GroupBy v1 only supports dimensions with an outputType of STRING.");
}
GroupByQuery query = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("market", "alias"), new DefaultDimensionSpec("qualityLong", "ql_alias", ColumnType.LONG), new DefaultDimensionSpec("__time", "time_alias", ColumnType.LONG), new DefaultDimensionSpec("index", "index_alias", ColumnType.FLOAT)).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT).setHavingSpec(new DimFilterHavingSpec(new AndDimFilter(Lists.newArrayList(new SelectorDimFilter("ql_alias", "1400", null), new SelectorDimFilter("time_alias", "1301616000000", null), new BoundDimFilter("index_alias", "1310.0", "1320.0", true, true, null, null, StringComparators.NUMERIC))), null)).setGranularity(QueryRunnerTestHelper.ALL_GRAN).build();
List<ResultRow> expectedResults = Collections.singletonList(makeRow(query, "2011-04-01", "alias", "total_market", "time_alias", 1301616000000L, "index_alias", 1314.8397, "ql_alias", 1400L, "rows", 1L));
Iterable<ResultRow> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
TestHelper.assertExpectedObjects(expectedResults, results, "havingspec-long-float");
}
use of org.apache.druid.query.filter.SelectorDimFilter in project druid by druid-io.
the class GroupByQueryQueryToolChestTest method testResultLevelCacheKeyWithHavingDimFilterHavingSpec.
@Test
public void testResultLevelCacheKeyWithHavingDimFilterHavingSpec() {
final DimFilterHavingSpec havingSpec1 = new DimFilterHavingSpec(new AndDimFilter(ImmutableList.of(new OrDimFilter(ImmutableList.of(new BoundDimFilter("rows", "2", null, true, false, null, null, StringComparators.NUMERIC), new SelectorDimFilter("idx", "217", null))), new SelectorDimFilter("__time", String.valueOf(DateTimes.of("2011-04-01").getMillis()), null))), null);
final DimFilterHavingSpec havingSpec2 = new DimFilterHavingSpec(new AndDimFilter(ImmutableList.of(new OrDimFilter(ImmutableList.of(new BoundDimFilter("rows", "2", null, true, false, null, null, StringComparators.NUMERIC), new SelectorDimFilter("idx", "317", null))), new SelectorDimFilter("__time", String.valueOf(DateTimes.of("2011-04-01").getMillis()), null))), null);
final GroupByQuery query1 = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setPostAggregatorSpecs(ImmutableList.of(new ExpressionPostAggregator("post", "alias + 'x'", null, TestExprMacroTable.INSTANCE))).setGranularity(QueryRunnerTestHelper.DAY_GRAN).setLimitSpec(new DefaultLimitSpec(ImmutableList.of(new OrderByColumnSpec("post", OrderByColumnSpec.Direction.DESCENDING)), Integer.MAX_VALUE)).setHavingSpec(havingSpec1).build();
final GroupByQuery query2 = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setPostAggregatorSpecs(ImmutableList.of(new ExpressionPostAggregator("post", "alias + 'x'", null, TestExprMacroTable.INSTANCE))).setGranularity(QueryRunnerTestHelper.DAY_GRAN).setLimitSpec(new DefaultLimitSpec(ImmutableList.of(new OrderByColumnSpec("post", OrderByColumnSpec.Direction.DESCENDING)), Integer.MAX_VALUE)).setHavingSpec(havingSpec2).build();
final CacheStrategy<ResultRow, Object, GroupByQuery> strategy1 = new GroupByQueryQueryToolChest(null).getCacheStrategy(query1);
final CacheStrategy<ResultRow, Object, GroupByQuery> strategy2 = new GroupByQueryQueryToolChest(null).getCacheStrategy(query2);
Assert.assertTrue(Arrays.equals(strategy1.computeCacheKey(query1), strategy2.computeCacheKey(query2)));
Assert.assertFalse(Arrays.equals(strategy1.computeResultLevelCacheKey(query1), strategy2.computeResultLevelCacheKey(query2)));
}
use of org.apache.druid.query.filter.SelectorDimFilter in project druid by druid-io.
the class DefaultGroupByQueryMetricsTest method testDefaultGroupByQueryMetricsQuery.
/**
* Tests that passed a query {@link DefaultGroupByQueryMetrics} produces events with a certain set of dimensions,
* no more, no less.
*/
@Test
public void testDefaultGroupByQueryMetricsQuery() {
CachingEmitter cachingEmitter = new CachingEmitter();
ServiceEmitter serviceEmitter = new ServiceEmitter("", "", cachingEmitter);
DefaultGroupByQueryMetrics queryMetrics = new DefaultGroupByQueryMetrics();
GroupByQuery.Builder builder = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new ExtractionDimensionSpec("quality", "alias", new LookupExtractionFn(new MapLookupExtractor(ImmutableMap.of("mezzanine", "mezzanine0"), false), false, null, true, false))).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setDimFilter(new SelectorDimFilter("quality", "mezzanine", null)).setContext(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true));
GroupByQuery query = builder.build();
queryMetrics.query(query);
queryMetrics.reportQueryTime(0).emit(serviceEmitter);
Map<String, Object> actualEvent = cachingEmitter.getLastEmittedEvent().toMap();
Assert.assertEquals(16, actualEvent.size());
Assert.assertTrue(actualEvent.containsKey("feed"));
Assert.assertTrue(actualEvent.containsKey("timestamp"));
Assert.assertEquals("", actualEvent.get("host"));
Assert.assertEquals("", actualEvent.get("service"));
Assert.assertEquals(QueryRunnerTestHelper.DATA_SOURCE, actualEvent.get(DruidMetrics.DATASOURCE));
Assert.assertEquals(query.getType(), actualEvent.get(DruidMetrics.TYPE));
Interval expectedInterval = Intervals.of("2011-04-02/2011-04-04");
Assert.assertEquals(Collections.singletonList(expectedInterval.toString()), actualEvent.get(DruidMetrics.INTERVAL));
Assert.assertEquals("true", actualEvent.get("hasFilters"));
Assert.assertEquals(expectedInterval.toDuration().toString(), actualEvent.get("duration"));
Assert.assertEquals("", actualEvent.get(DruidMetrics.ID));
Assert.assertEquals(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true), actualEvent.get("context"));
// GroupBy-specific dimensions
Assert.assertEquals("1", actualEvent.get("numDimensions"));
Assert.assertEquals("2", actualEvent.get("numMetrics"));
Assert.assertEquals("0", actualEvent.get("numComplexMetrics"));
// Metric
Assert.assertEquals("query/time", actualEvent.get("metric"));
Assert.assertEquals(0L, actualEvent.get("value"));
}
use of org.apache.druid.query.filter.SelectorDimFilter in project druid by druid-io.
the class DataSchemaTest method testTransformSpec.
@Test
public void testTransformSpec() {
Map<String, Object> parserMap = jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("time", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("time", "dimA", "dimB", "col2"))), null, null, null), null), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT);
DataSchema schema = new DataSchema(IdUtilsTest.VALID_ID_CHARS, parserMap, new AggregatorFactory[] { new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2") }, new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Intervals.of("2014/2015"))), new TransformSpec(new SelectorDimFilter("dimA", "foo", null), ImmutableList.of(new ExpressionTransform("expr", "concat(dimA,dimA)", TestExprMacroTable.INSTANCE))), jsonMapper);
// Test hack that produces a StringInputRowParser.
final StringInputRowParser parser = (StringInputRowParser) schema.getParser();
final InputRow row1bb = parser.parseBatch(ByteBuffer.wrap("{\"time\":\"2000-01-01\",\"dimA\":\"foo\"}".getBytes(StandardCharsets.UTF_8))).get(0);
Assert.assertEquals(DateTimes.of("2000-01-01"), row1bb.getTimestamp());
Assert.assertEquals("foo", row1bb.getRaw("dimA"));
Assert.assertEquals("foofoo", row1bb.getRaw("expr"));
final InputRow row1string = parser.parse("{\"time\":\"2000-01-01\",\"dimA\":\"foo\"}");
Assert.assertEquals(DateTimes.of("2000-01-01"), row1string.getTimestamp());
Assert.assertEquals("foo", row1string.getRaw("dimA"));
Assert.assertEquals("foofoo", row1string.getRaw("expr"));
final InputRow row2 = parser.parseBatch(ByteBuffer.wrap("{\"time\":\"2000-01-01\",\"dimA\":\"x\"}".getBytes(StandardCharsets.UTF_8))).get(0);
Assert.assertNull(row2);
}
Aggregations