use of org.apache.druid.segment.QueryableIndexSegment in project druid by druid-io.
the class MultiValuedDimensionTest method testGroupByExpressionArrayFnArg.
@Test
public void testGroupByExpressionArrayFnArg() {
if (config.getDefaultStrategy().equals(GroupByStrategySelector.STRATEGY_V1)) {
expectedException.expect(RuntimeException.class);
expectedException.expectMessage("GroupBy v1 does not support dimension selectors with unknown cardinality.");
}
GroupByQuery query = GroupByQuery.builder().setDataSource("xx").setQuerySegmentSpec(new LegacySegmentSpec("1970/3000")).setGranularity(Granularities.ALL).setDimensions(new DefaultDimensionSpec("tt", "tt")).setVirtualColumns(new ExpressionVirtualColumn("tt", "array_to_string(map(tags -> concat('foo', tags), tags), ', ')", ColumnType.STRING, TestExprMacroTable.INSTANCE)).setAggregatorSpecs(new CountAggregatorFactory("count")).setContext(context).build();
Sequence<ResultRow> result = helper.runQueryOnSegmentsObjs(ImmutableList.of(new QueryableIndexSegment(queryableIndex, SegmentId.dummy("sid1")), new IncrementalIndexSegment(incrementalIndex, SegmentId.dummy("sid2"))), query);
List<ResultRow> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tt", NullHandling.replaceWithDefault() ? null : "foo", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tt", "foot1, foot2, foot3", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tt", "foot3, foot4, foot5", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tt", "foot5, foot6, foot7", "count", 2L));
TestHelper.assertExpectedObjects(expectedResults, result.toList(), "expr-array-fn");
}
use of org.apache.druid.segment.QueryableIndexSegment in project druid by druid-io.
the class MultiValuedDimensionTest method testGroupByNoFilter.
@Test
public void testGroupByNoFilter() {
GroupByQuery query = GroupByQuery.builder().setDataSource("xx").setQuerySegmentSpec(new LegacySegmentSpec("1970/3000")).setGranularity(Granularities.ALL).setDimensions(new DefaultDimensionSpec("tags", "tags")).setAggregatorSpecs(new CountAggregatorFactory("count")).build();
Sequence<ResultRow> result = helper.runQueryOnSegmentsObjs(ImmutableList.of(new QueryableIndexSegment(queryableIndex, SegmentId.dummy("sid1")), new IncrementalIndexSegment(incrementalIndex, SegmentId.dummy("sid2"))), query);
List<ResultRow> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tags", NullHandling.replaceWithDefault() ? null : "", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tags", "t1", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tags", "t2", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tags", "t3", "count", 4L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tags", "t4", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tags", "t5", "count", 4L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tags", "t6", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tags", "t7", "count", 2L));
TestHelper.assertExpectedObjects(expectedResults, result.toList(), "noFilter");
}
use of org.apache.druid.segment.QueryableIndexSegment in project druid by druid-io.
the class MultiValuedDimensionTest method testGroupByExpressionMultiConflicting.
@Test
public void testGroupByExpressionMultiConflicting() {
expectedException.expect(RuntimeException.class);
expectedException.expectMessage("Invalid expression: (concat [(map ([x] -> (concat [x, othertags])), [tags]), tags]); [tags] used as both scalar and array variables");
GroupByQuery query = GroupByQuery.builder().setDataSource("xx").setQuerySegmentSpec(new LegacySegmentSpec("1970/3000")).setGranularity(Granularities.ALL).setDimensions(new DefaultDimensionSpec("texpr", "texpr")).setVirtualColumns(new ExpressionVirtualColumn("texpr", "concat(map((x) -> concat(x, othertags), tags), tags)", ColumnType.STRING, TestExprMacroTable.INSTANCE)).setLimit(5).setAggregatorSpecs(new CountAggregatorFactory("count")).setContext(context).build();
helper.runQueryOnSegmentsObjs(ImmutableList.of(new QueryableIndexSegment(queryableIndex, SegmentId.dummy("sid1")), new IncrementalIndexSegment(incrementalIndex, SegmentId.dummy("sid2"))), query).toList();
}
use of org.apache.druid.segment.QueryableIndexSegment in project druid by druid-io.
the class GroupByBenchmark method querySingleQueryableIndex.
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void querySingleQueryableIndex(Blackhole blackhole, QueryableIndexState state) {
QueryRunner<ResultRow> runner = QueryBenchmarkUtil.makeQueryRunner(factory, SegmentId.dummy("qIndex"), new QueryableIndexSegment(state.queryableIndexes.get(0), SegmentId.dummy("qIndex")));
final Sequence<ResultRow> results = GroupByBenchmark.runQuery(factory, runner, query);
final ResultRow lastRow = results.accumulate(null, (accumulated, in) -> in);
blackhole.consume(lastRow);
}
use of org.apache.druid.segment.QueryableIndexSegment in project druid by druid-io.
the class GroupByBenchmark method makeMultiRunners.
private List<QueryRunner<ResultRow>> makeMultiRunners(QueryableIndexState state) {
List<QueryRunner<ResultRow>> runners = new ArrayList<>();
for (int i = 0; i < state.numSegments; i++) {
String segmentName = "qIndex " + i;
QueryRunner<ResultRow> runner = QueryBenchmarkUtil.makeQueryRunner(factory, SegmentId.dummy(segmentName), new QueryableIndexSegment(state.queryableIndexes.get(i), SegmentId.dummy(segmentName)));
runners.add(factory.getToolchest().preMergeQueryDecoration(runner));
}
return runners;
}
Aggregations