use of org.apache.druid.segment.IncrementalIndexSegment in project druid by druid-io.
the class MultiValuedDimensionTest method testGroupByExpressionMultiMultiBackwardsCompat0dot22andOlder.
@Test
public void testGroupByExpressionMultiMultiBackwardsCompat0dot22andOlder() {
try {
ExpressionProcessing.initializeForHomogenizeNullMultiValueStrings();
if (config.getDefaultStrategy().equals(GroupByStrategySelector.STRATEGY_V1)) {
expectedException.expect(RuntimeException.class);
expectedException.expectMessage("GroupBy v1 does not support dimension selectors with unknown cardinality.");
}
GroupByQuery query = GroupByQuery.builder().setDataSource("xx").setQuerySegmentSpec(new LegacySegmentSpec("1970/3000")).setGranularity(Granularities.ALL).setDimensions(new DefaultDimensionSpec("texpr", "texpr")).setVirtualColumns(new ExpressionVirtualColumn("texpr", "cartesian_map((x,y) -> concat(x, y), tags, othertags)", ColumnType.STRING, TestExprMacroTable.INSTANCE)).setLimit(5).setAggregatorSpecs(new CountAggregatorFactory("count")).setContext(context).build();
Sequence<ResultRow> result = helper.runQueryOnSegmentsObjs(ImmutableList.of(new QueryableIndexSegment(queryableIndex, SegmentId.dummy("sid1")), new IncrementalIndexSegment(incrementalIndex, SegmentId.dummy("sid2"))), query);
List<ResultRow> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "texpr", "t1u1", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "texpr", "t1u2", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "texpr", "t2u1", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "texpr", "t2u2", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "texpr", "t3u1", "count", 2L));
TestHelper.assertExpectedObjects(expectedResults, result.toList(), "expr-multi-multi");
} finally {
ExpressionProcessing.initializeForTests(null);
}
}
use of org.apache.druid.segment.IncrementalIndexSegment in project druid by druid-io.
the class MultiValuedDimensionTest method testGroupByWithDimFilterEmptyResults.
@Test
public void testGroupByWithDimFilterEmptyResults() {
GroupByQuery query = GroupByQuery.builder().setDataSource("xx").setQuerySegmentSpec(new LegacySegmentSpec("1970/3000")).setGranularity(Granularities.ALL).setDimensions(new DefaultDimensionSpec("tags", "tags")).setAggregatorSpecs(new CountAggregatorFactory("count")).setDimFilter(new InDimFilter("product", ImmutableList.of("product_5"), null)).setContext(context).build();
Sequence<ResultRow> result = helper.runQueryOnSegmentsObjs(ImmutableList.of(new QueryableIndexSegment(queryableIndexNullSampler, SegmentId.dummy("sid1")), new IncrementalIndexSegment(incrementalIndexNullSampler, SegmentId.dummy("sid2"))), query);
List<ResultRow> expectedResults = Collections.singletonList(GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970-01-01T00:00:00.000Z", "tags", null, "count", 2L));
TestHelper.assertExpectedObjects(expectedResults, result.toList(), "filter-empty");
}
use of org.apache.druid.segment.IncrementalIndexSegment in project druid by druid-io.
the class GroupByQueryRunnerFactoryTest method createSegment.
private Segment createSegment() throws Exception {
IncrementalIndex incrementalIndex = new OnheapIncrementalIndex.Builder().setSimpleTestingIndexSchema(new CountAggregatorFactory("count")).setConcurrentEventAdd(true).setMaxRowCount(5000).build();
StringInputRowParser parser = new StringInputRowParser(new CSVParseSpec(new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("product", "tags"))), "\t", ImmutableList.of("timestamp", "product", "tags"), false, 0), "UTF-8");
String[] rows = new String[] { "2011-01-12T00:00:00.000Z,product_1,t1", "2011-01-13T00:00:00.000Z,product_2,t2", "2011-01-14T00:00:00.000Z,product_3,t2" };
for (String row : rows) {
incrementalIndex.add(parser.parse(row));
}
closerRule.closeLater(incrementalIndex);
return new IncrementalIndexSegment(incrementalIndex, SegmentId.dummy("test"));
}
use of org.apache.druid.segment.IncrementalIndexSegment in project druid by druid-io.
the class DataSourceMetadataQueryTest method testMaxIngestedEventTime.
@Test
public void testMaxIngestedEventTime() throws Exception {
final IncrementalIndex rtIndex = new OnheapIncrementalIndex.Builder().setSimpleTestingIndexSchema(new CountAggregatorFactory("count")).setMaxRowCount(1000).build();
final QueryRunner runner = QueryRunnerTestHelper.makeQueryRunner(new DataSourceMetadataQueryRunnerFactory(new DataSourceQueryQueryToolChest(DefaultGenericQueryMetricsFactory.instance()), QueryRunnerTestHelper.NOOP_QUERYWATCHER), new IncrementalIndexSegment(rtIndex, SegmentId.dummy("test")), null);
DateTime timestamp = DateTimes.nowUtc();
rtIndex.add(new MapBasedInputRow(timestamp.getMillis(), ImmutableList.of("dim1"), ImmutableMap.of("dim1", "x")));
DataSourceMetadataQuery dataSourceMetadataQuery = Druids.newDataSourceMetadataQueryBuilder().dataSource("testing").build();
ResponseContext context = ConcurrentResponseContext.createEmpty();
context.initializeMissingSegments();
Iterable<Result<DataSourceMetadataResultValue>> results = runner.run(QueryPlus.wrap(dataSourceMetadataQuery), context).toList();
DataSourceMetadataResultValue val = results.iterator().next().getValue();
DateTime maxIngestedEventTime = val.getMaxIngestedEventTime();
Assert.assertEquals(timestamp, maxIngestedEventTime);
}
use of org.apache.druid.segment.IncrementalIndexSegment in project druid by druid-io.
the class TopNQueryQueryToolChestTest method testMinTopNThreshold.
@Test
public void testMinTopNThreshold() {
TopNQueryConfig config = new TopNQueryConfig();
final TopNQueryQueryToolChest chest = new TopNQueryQueryToolChest(config);
try (CloseableStupidPool<ByteBuffer> pool = TestQueryRunners.createDefaultNonBlockingPool()) {
QueryRunnerFactory factory = new TopNQueryRunnerFactory(pool, chest, QueryRunnerTestHelper.NOOP_QUERYWATCHER);
QueryRunner<Result<TopNResultValue>> runner = QueryRunnerTestHelper.makeQueryRunner(factory, new IncrementalIndexSegment(TestIndex.getIncrementalTestIndex(), SEGMENT_ID), null);
Map<String, Object> context = new HashMap<>();
context.put("minTopNThreshold", 500);
TopNQueryBuilder builder = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).dimension(QueryRunnerTestHelper.PLACEMENTISH_DIMENSION).metric(QueryRunnerTestHelper.INDEX_METRIC).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).aggregators(QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS);
TopNQuery query1 = builder.threshold(10).context(null).build();
MockQueryRunner mockRunner = new MockQueryRunner(runner);
new TopNQueryQueryToolChest.ThresholdAdjustingQueryRunner(mockRunner, config).run(QueryPlus.wrap(query1));
Assert.assertEquals(1000, mockRunner.query.getThreshold());
TopNQuery query2 = builder.threshold(10).context(context).build();
new TopNQueryQueryToolChest.ThresholdAdjustingQueryRunner(mockRunner, config).run(QueryPlus.wrap(query2));
Assert.assertEquals(500, mockRunner.query.getThreshold());
TopNQuery query3 = builder.threshold(2000).context(context).build();
new TopNQueryQueryToolChest.ThresholdAdjustingQueryRunner(mockRunner, config).run(QueryPlus.wrap(query3));
Assert.assertEquals(2000, mockRunner.query.getThreshold());
}
}
Aggregations