use of org.apache.druid.segment.QueryableIndexSegment in project druid by druid-io.
the class MultiValuedDimensionTest method testGroupByWithDimFilter.
@Test
public void testGroupByWithDimFilter() {
GroupByQuery query = GroupByQuery.builder().setDataSource("xx").setQuerySegmentSpec(new LegacySegmentSpec("1970/3000")).setGranularity(Granularities.ALL).setDimensions(new DefaultDimensionSpec("tags", "tags")).setAggregatorSpecs(new CountAggregatorFactory("count")).setDimFilter(new SelectorDimFilter("tags", "t3", null)).setContext(context).build();
Sequence<ResultRow> result = helper.runQueryOnSegmentsObjs(ImmutableList.of(new QueryableIndexSegment(queryableIndex, SegmentId.dummy("sid1")), new IncrementalIndexSegment(incrementalIndex, SegmentId.dummy("sid2"))), query);
List<ResultRow> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970-01-01T00:00:00.000Z", "tags", "t1", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970-01-01T00:00:00.000Z", "tags", "t2", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970-01-01T00:00:00.000Z", "tags", "t3", "count", 4L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970-01-01T00:00:00.000Z", "tags", "t4", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970-01-01T00:00:00.000Z", "tags", "t5", "count", 2L));
TestHelper.assertExpectedObjects(expectedResults, result.toList(), "dimFilter");
}
use of org.apache.druid.segment.QueryableIndexSegment in project druid by druid-io.
the class MultiValuedDimensionTest method testGroupByWithDimFilterNullishResults.
@Test
public void testGroupByWithDimFilterNullishResults() {
GroupByQuery query = GroupByQuery.builder().setDataSource("xx").setQuerySegmentSpec(new LegacySegmentSpec("1970/3000")).setGranularity(Granularities.ALL).setDimensions(new DefaultDimensionSpec("tags", "tags")).setAggregatorSpecs(new CountAggregatorFactory("count")).setDimFilter(new InDimFilter("product", ImmutableList.of("product_5", "product_6", "product_8"), null)).setContext(context).build();
Sequence<ResultRow> result = helper.runQueryOnSegmentsObjs(ImmutableList.of(new QueryableIndexSegment(queryableIndexNullSampler, SegmentId.dummy("sid1")), new IncrementalIndexSegment(incrementalIndexNullSampler, SegmentId.dummy("sid2"))), query);
List<ResultRow> expectedResults;
// grouping input is filtered to [], null, [""]
if (NullHandling.replaceWithDefault()) {
// when sql compatible null handling is disabled, the inputs are effectively [], null, [null] and
// are all grouped as null
expectedResults = Collections.singletonList(GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970-01-01T00:00:00.000Z", "tags", null, "count", 6L));
} else {
// with sql compatible null handling, null and [] = null, but [""] = ""
expectedResults = ImmutableList.of(GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970-01-01T00:00:00.000Z", "tags", null, "count", 4L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970-01-01T00:00:00.000Z", "tags", "", "count", 2L));
}
TestHelper.assertExpectedObjects(expectedResults, result.toList(), "filter-nullish");
}
use of org.apache.druid.segment.QueryableIndexSegment in project druid by druid-io.
the class MultiValuedDimensionTest method testGroupByExpressionMultiMulti.
@Test
public void testGroupByExpressionMultiMulti() {
if (config.getDefaultStrategy().equals(GroupByStrategySelector.STRATEGY_V1)) {
expectedException.expect(RuntimeException.class);
expectedException.expectMessage("GroupBy v1 does not support dimension selectors with unknown cardinality.");
}
GroupByQuery query = GroupByQuery.builder().setDataSource("xx").setQuerySegmentSpec(new LegacySegmentSpec("1970/3000")).setGranularity(Granularities.ALL).setDimensions(new DefaultDimensionSpec("texpr", "texpr")).setVirtualColumns(new ExpressionVirtualColumn("texpr", "cartesian_map((x,y) -> concat(x, y), tags, othertags)", ColumnType.STRING, TestExprMacroTable.INSTANCE)).setLimit(5).setAggregatorSpecs(new CountAggregatorFactory("count")).setContext(context).build();
Sequence<ResultRow> result = helper.runQueryOnSegmentsObjs(ImmutableList.of(new QueryableIndexSegment(queryableIndex, SegmentId.dummy("sid1")), new IncrementalIndexSegment(incrementalIndex, SegmentId.dummy("sid2"))), query);
List<ResultRow> expectedResults = NullHandling.sqlCompatible() ? Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "texpr", "t1u1", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "texpr", "t1u2", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "texpr", "t2u1", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "texpr", "t2u2", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "texpr", "t3u1", "count", 2L)) : Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "texpr", null, "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "texpr", "t1u1", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "texpr", "t1u2", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "texpr", "t2u1", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "texpr", "t2u2", "count", 2L));
TestHelper.assertExpectedObjects(expectedResults, result.toList(), "expr-multi-multi");
}
use of org.apache.druid.segment.QueryableIndexSegment in project druid by druid-io.
the class MultiValuedDimensionTest method testGroupByExpressionMultiMultiAuto.
@Test
public void testGroupByExpressionMultiMultiAuto() {
if (config.getDefaultStrategy().equals(GroupByStrategySelector.STRATEGY_V1)) {
expectedException.expect(RuntimeException.class);
expectedException.expectMessage("GroupBy v1 does not support dimension selectors with unknown cardinality.");
}
GroupByQuery query = GroupByQuery.builder().setDataSource("xx").setQuerySegmentSpec(new LegacySegmentSpec("1970/3000")).setGranularity(Granularities.ALL).setDimensions(new DefaultDimensionSpec("texpr", "texpr")).setVirtualColumns(new ExpressionVirtualColumn("texpr", "map((x) -> concat(x, othertags), tags)", ColumnType.STRING, TestExprMacroTable.INSTANCE)).setLimit(5).setAggregatorSpecs(new CountAggregatorFactory("count")).setContext(context).build();
Sequence<ResultRow> result = helper.runQueryOnSegmentsObjs(ImmutableList.of(new QueryableIndexSegment(queryableIndex, SegmentId.dummy("sid1")), new IncrementalIndexSegment(incrementalIndex, SegmentId.dummy("sid2"))), query);
List<ResultRow> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "texpr", "t1u1", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "texpr", "t1u2", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "texpr", "t2u1", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "texpr", "t2u2", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "texpr", "t3u1", "count", 2L));
TestHelper.assertExpectedObjects(expectedResults, result.toList(), "expr-multi-multi-auto");
}
use of org.apache.druid.segment.QueryableIndexSegment in project druid by druid-io.
the class MultiValuedDimensionTest method testGroupByExpressionMultiConflictingAlso.
@Test
public void testGroupByExpressionMultiConflictingAlso() {
expectedException.expect(RuntimeException.class);
expectedException.expectMessage("Invalid expression: (array_concat [tags, (array_append [othertags, tags])]); [tags] used as both scalar and array variables");
GroupByQuery query = GroupByQuery.builder().setDataSource("xx").setQuerySegmentSpec(new LegacySegmentSpec("1970/3000")).setGranularity(Granularities.ALL).setDimensions(new DefaultDimensionSpec("texpr", "texpr")).setVirtualColumns(new ExpressionVirtualColumn("texpr", "array_concat(tags, (array_append(othertags, tags)))", ColumnType.STRING, TestExprMacroTable.INSTANCE)).setLimit(5).setAggregatorSpecs(new CountAggregatorFactory("count")).setContext(context).build();
helper.runQueryOnSegmentsObjs(ImmutableList.of(new QueryableIndexSegment(queryableIndex, SegmentId.dummy("sid1")), new IncrementalIndexSegment(incrementalIndex, SegmentId.dummy("sid2"))), query).toList();
}
Aggregations