use of org.apache.druid.query.topn.TopNQuery in project druid by druid-io.
the class DatasourceOptimizerTest method testOptimize.
@Test(timeout = 60_000L)
public void testOptimize() throws InterruptedException {
// insert datasource metadata
String dataSource = "derivative";
String baseDataSource = "base";
Set<String> dims = Sets.newHashSet("dim1", "dim2", "dim3");
Set<String> metrics = Sets.newHashSet("cost");
DerivativeDataSourceMetadata metadata = new DerivativeDataSourceMetadata(baseDataSource, dims, metrics);
metadataStorageCoordinator.insertDataSourceMetadata(dataSource, metadata);
// insert base datasource segments
List<Boolean> baseResult = Lists.transform(ImmutableList.of("2011-04-01/2011-04-02", "2011-04-02/2011-04-03", "2011-04-03/2011-04-04", "2011-04-04/2011-04-05", "2011-04-05/2011-04-06"), interval -> {
final DataSegment segment = createDataSegment("base", interval, "v1", Lists.newArrayList("dim1", "dim2", "dim3", "dim4"), 1024 * 1024);
try {
metadataStorageCoordinator.announceHistoricalSegments(Sets.newHashSet(segment));
announceSegmentForServer(druidServer, segment, zkPathsConfig, jsonMapper);
} catch (IOException e) {
return false;
}
return true;
});
// insert derivative segments
List<Boolean> derivativeResult = Lists.transform(ImmutableList.of("2011-04-01/2011-04-02", "2011-04-02/2011-04-03", "2011-04-03/2011-04-04"), interval -> {
final DataSegment segment = createDataSegment("derivative", interval, "v1", Lists.newArrayList("dim1", "dim2", "dim3"), 1024);
try {
metadataStorageCoordinator.announceHistoricalSegments(Sets.newHashSet(segment));
announceSegmentForServer(druidServer, segment, zkPathsConfig, jsonMapper);
} catch (IOException e) {
return false;
}
return true;
});
Assert.assertFalse(baseResult.contains(false));
Assert.assertFalse(derivativeResult.contains(false));
derivativesManager.start();
while (DerivativeDataSourceManager.getAllDerivatives().isEmpty()) {
TimeUnit.SECONDS.sleep(1L);
}
// build user query
TopNQuery userQuery = new TopNQueryBuilder().dataSource("base").granularity(QueryRunnerTestHelper.ALL_GRAN).dimension("dim1").metric("cost").threshold(4).intervals("2011-04-01/2011-04-06").aggregators(new LongSumAggregatorFactory("cost", "cost")).build();
List<Query> expectedQueryAfterOptimizing = Lists.newArrayList(new TopNQueryBuilder().dataSource("derivative").granularity(QueryRunnerTestHelper.ALL_GRAN).dimension("dim1").metric("cost").threshold(4).intervals(new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-01/2011-04-04")))).aggregators(new LongSumAggregatorFactory("cost", "cost")).build(), new TopNQueryBuilder().dataSource("base").granularity(QueryRunnerTestHelper.ALL_GRAN).dimension("dim1").metric("cost").threshold(4).intervals(new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-04/2011-04-06")))).aggregators(new LongSumAggregatorFactory("cost", "cost")).build());
Assert.assertEquals(expectedQueryAfterOptimizing, optimizer.optimize(userQuery));
derivativesManager.stop();
}
use of org.apache.druid.query.topn.TopNQuery in project druid by druid-io.
the class MaterializedViewQueryTest method testQuerySerialization.
@Test
public void testQuerySerialization() throws IOException {
TopNQuery topNQuery = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).dimension(QueryRunnerTestHelper.MARKET_DIMENSION).metric(QueryRunnerTestHelper.INDEX_METRIC).threshold(4).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).aggregators(Lists.newArrayList(Iterables.concat(QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS, Lists.newArrayList(new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index"))))).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).build();
MaterializedViewQuery query = new MaterializedViewQuery(topNQuery, optimizer);
String json = JSON_MAPPER.writeValueAsString(query);
Query serdeQuery = JSON_MAPPER.readValue(json, Query.class);
Assert.assertEquals(query, serdeQuery);
Assert.assertEquals(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), query.getDataSource());
Assert.assertEquals(QueryRunnerTestHelper.ALL_GRAN, query.getGranularity());
Assert.assertEquals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC.getIntervals(), query.getIntervals());
}
use of org.apache.druid.query.topn.TopNQuery in project druid by druid-io.
the class MaterializedViewUtilsTest method testGetRequiredFieldsFromTopNQuery.
@Test
public void testGetRequiredFieldsFromTopNQuery() throws Exception {
String queryStr = "{\n" + " \"queryType\": \"topN\",\n" + " \"dataSource\": \"sample_data\",\n" + " \"dimension\": \"sample_dim\",\n" + " \"threshold\": 5,\n" + " \"metric\": \"count\",\n" + " \"granularity\": \"all\",\n" + " \"filter\": {\n" + " \"type\": \"and\",\n" + " \"fields\": [\n" + " {\n" + " \"type\": \"selector\",\n" + " \"dimension\": \"dim1\",\n" + " \"value\": \"some_value\"\n" + " },\n" + " {\n" + " \"type\": \"selector\",\n" + " \"dimension\": \"dim2\",\n" + " \"value\": \"some_other_val\"\n" + " }\n" + " ]\n" + " },\n" + " \"aggregations\": [\n" + " {\n" + " \"type\": \"longSum\",\n" + " \"name\": \"count\",\n" + " \"fieldName\": \"count\"\n" + " },\n" + " {\n" + " \"type\": \"doubleSum\",\n" + " \"name\": \"some_metric\",\n" + " \"fieldName\": \"some_metric\"\n" + " }\n" + " ],\n" + " \"postAggregations\": [\n" + " {\n" + " \"type\": \"arithmetic\",\n" + " \"name\": \"average\",\n" + " \"fn\": \"/\",\n" + " \"fields\": [\n" + " {\n" + " \"type\": \"fieldAccess\",\n" + " \"name\": \"some_metric\",\n" + " \"fieldName\": \"some_metric\"\n" + " },\n" + " {\n" + " \"type\": \"fieldAccess\",\n" + " \"name\": \"count\",\n" + " \"fieldName\": \"count\"\n" + " }\n" + " ]\n" + " }\n" + " ],\n" + " \"intervals\": [\n" + " \"2013-08-31T00:00:00.000/2013-09-03T00:00:00.000\"\n" + " ]\n" + "}";
TopNQuery query = jsonMapper.readValue(queryStr, TopNQuery.class);
Set<String> fields = MaterializedViewUtils.getRequiredFields(query);
Assert.assertEquals(Sets.newHashSet("sample_dim", "dim1", "dim2", "count", "some_metric"), fields);
}
use of org.apache.druid.query.topn.TopNQuery in project druid by druid-io.
the class DistinctCountTopNQueryTest method testTopNWithDistinctCountAgg.
@Test
public void testTopNWithDistinctCountAgg() throws Exception {
TopNQueryEngine engine = new TopNQueryEngine(pool);
IncrementalIndex index = new OnheapIncrementalIndex.Builder().setIndexSchema(new IncrementalIndexSchema.Builder().withQueryGranularity(Granularities.SECOND).withMetrics(new CountAggregatorFactory("cnt")).build()).setMaxRowCount(1000).build();
String visitor_id = "visitor_id";
String client_type = "client_type";
DateTime time = DateTimes.of("2016-03-04T00:00:00.000Z");
long timestamp = time.getMillis();
index.add(new MapBasedInputRow(timestamp, Lists.newArrayList(visitor_id, client_type), ImmutableMap.of(visitor_id, "0", client_type, "iphone")));
index.add(new MapBasedInputRow(timestamp, Lists.newArrayList(visitor_id, client_type), ImmutableMap.of(visitor_id, "1", client_type, "iphone")));
index.add(new MapBasedInputRow(timestamp, Lists.newArrayList(visitor_id, client_type), ImmutableMap.of(visitor_id, "2", client_type, "android")));
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).dimension(client_type).metric("UV").threshold(10).aggregators(QueryRunnerTestHelper.ROWS_COUNT, new DistinctCountAggregatorFactory("UV", visitor_id, null)).build();
final Iterable<Result<TopNResultValue>> results = engine.query(query, new IncrementalIndexStorageAdapter(index), null).toList();
List<Result<TopNResultValue>> expectedResults = Collections.singletonList(new Result<>(time, new TopNResultValue(Arrays.<Map<String, Object>>asList(ImmutableMap.of(client_type, "iphone", "UV", 2L, "rows", 2L), ImmutableMap.of(client_type, "android", "UV", 1L, "rows", 1L)))));
TestHelper.assertExpectedResults(expectedResults, results);
}
use of org.apache.druid.query.topn.TopNQuery in project druid by druid-io.
the class MultiValuedDimensionTest method testTopNExpression.
@Test
public void testTopNExpression() {
TopNQuery query = new TopNQueryBuilder().dataSource("xx").granularity(Granularities.ALL).dimension(new DefaultDimensionSpec("texpr", "texpr")).virtualColumns(new ExpressionVirtualColumn("texpr", "map(x -> concat(x, 'foo'), tags)", ColumnType.STRING, TestExprMacroTable.INSTANCE)).metric("count").intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).aggregators(new CountAggregatorFactory("count")).threshold(15).build();
try (CloseableStupidPool<ByteBuffer> pool = TestQueryRunners.createDefaultNonBlockingPool()) {
QueryRunnerFactory factory = new TopNQueryRunnerFactory(pool, new TopNQueryQueryToolChest(new TopNQueryConfig()), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
QueryRunner<Result<TopNResultValue>> runner = QueryRunnerTestHelper.makeQueryRunner(factory, new QueryableIndexSegment(queryableIndex, SegmentId.dummy("sid1")), null);
Sequence<Result<TopNResultValue>> result = runner.run(QueryPlus.wrap(query));
List<Map<String, Object>> expected = ImmutableList.<Map<String, Object>>builder().add(ImmutableMap.of("texpr", "t3foo", "count", 2L)).add(ImmutableMap.of("texpr", "t5foo", "count", 2L)).add(new HashMap<String, Object>() {
{
put("texpr", NullHandling.sqlCompatible() ? "foo" : null);
put("count", 1L);
}
}).add(ImmutableMap.of("texpr", "t1foo", "count", 1L)).add(ImmutableMap.of("texpr", "t2foo", "count", 1L)).add(ImmutableMap.of("texpr", "t4foo", "count", 1L)).add(ImmutableMap.of("texpr", "t6foo", "count", 1L)).add(ImmutableMap.of("texpr", "t7foo", "count", 1L)).build();
List<Result<TopNResultValue>> expectedResults = Collections.singletonList(new Result<TopNResultValue>(DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue(expected)));
TestHelper.assertExpectedObjects(expectedResults, result.toList(), "filteredDim");
}
}
Aggregations