use of org.apache.druid.query.metadata.SegmentMetadataQueryConfig in project druid by druid-io.
the class QueryStackTests method createQueryRunnerFactoryConglomerate.
public static QueryRunnerFactoryConglomerate createQueryRunnerFactoryConglomerate(final Closer closer, final DruidProcessingConfig processingConfig, final Supplier<Integer> minTopNThresholdSupplier) {
final CloseableStupidPool<ByteBuffer> stupidPool = new CloseableStupidPool<>("TopNQueryRunnerFactory-bufferPool", () -> ByteBuffer.allocate(COMPUTE_BUFFER_SIZE));
closer.register(stupidPool);
final Pair<GroupByQueryRunnerFactory, Closer> factoryCloserPair = GroupByQueryRunnerTest.makeQueryRunnerFactory(GroupByQueryRunnerTest.DEFAULT_MAPPER, new GroupByQueryConfig() {
@Override
public String getDefaultStrategy() {
return GroupByStrategySelector.STRATEGY_V2;
}
}, processingConfig);
final GroupByQueryRunnerFactory groupByQueryRunnerFactory = factoryCloserPair.lhs;
closer.register(factoryCloserPair.rhs);
final QueryRunnerFactoryConglomerate conglomerate = new DefaultQueryRunnerFactoryConglomerate(ImmutableMap.<Class<? extends Query>, QueryRunnerFactory>builder().put(SegmentMetadataQuery.class, new SegmentMetadataQueryRunnerFactory(new SegmentMetadataQueryQueryToolChest(new SegmentMetadataQueryConfig("P1W")), QueryRunnerTestHelper.NOOP_QUERYWATCHER)).put(ScanQuery.class, new ScanQueryRunnerFactory(new ScanQueryQueryToolChest(new ScanQueryConfig(), new DefaultGenericQueryMetricsFactory()), new ScanQueryEngine(), new ScanQueryConfig())).put(TimeseriesQuery.class, new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(), new TimeseriesQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER)).put(TopNQuery.class, new TopNQueryRunnerFactory(stupidPool, new TopNQueryQueryToolChest(new TopNQueryConfig() {
@Override
public int getMinTopNThreshold() {
return minTopNThresholdSupplier.get();
}
}), QueryRunnerTestHelper.NOOP_QUERYWATCHER)).put(GroupByQuery.class, groupByQueryRunnerFactory).build());
return conglomerate;
}
use of org.apache.druid.query.metadata.SegmentMetadataQueryConfig in project druid by druid-io.
the class SegmentMetadataQueryConfigTest method testSerdeSegmentMetadataQueryConfig.
@Test
public void testSerdeSegmentMetadataQueryConfig() {
Injector injector = Guice.createInjector(new Module() {
@Override
public void configure(Binder binder) {
binder.install(new PropertiesModule(Collections.singletonList("test.runtime.properties")));
binder.install(new ConfigModule());
binder.install(new DruidGuiceExtensions());
JsonConfigProvider.bind(binder, "druid.query.segmentMetadata", SegmentMetadataQueryConfig.class);
}
@Provides
@LazySingleton
public ObjectMapper jsonMapper() {
return new DefaultObjectMapper();
}
});
Properties props = injector.getInstance(Properties.class);
SegmentMetadataQueryConfig config = injector.getInstance(SegmentMetadataQueryConfig.class);
EnumSet<SegmentMetadataQuery.AnalysisType> expectedDefaultAnalysis = config.getDefaultAnalysisTypes();
String actualDefaultAnalysis = props.getProperty("druid.query.segmentMetadata.defaultAnalysisTypes");
Iterator<SegmentMetadataQuery.AnalysisType> it = expectedDefaultAnalysis.iterator();
StringBuilder expectedDefaultAnalysisAsStringBuilder = new StringBuilder();
expectedDefaultAnalysisAsStringBuilder.append('[');
while (it.hasNext()) {
SegmentMetadataQuery.AnalysisType e = it.next();
expectedDefaultAnalysisAsStringBuilder.append("\"").append(e).append("\"");
if (it.hasNext()) {
expectedDefaultAnalysisAsStringBuilder.append(',').append(' ');
}
}
expectedDefaultAnalysisAsStringBuilder.append(']');
Assert.assertEquals(expectedDefaultAnalysisAsStringBuilder.toString(), actualDefaultAnalysis);
Assert.assertEquals(props.getProperty("druid.query.segmentMetadata.defaultHistory"), config.getDefaultHistory().toString());
}
use of org.apache.druid.query.metadata.SegmentMetadataQueryConfig in project druid by druid-io.
the class ClientInfoResourceTest method testGetDatasourceWithConfiguredDefaultInterval.
@Test
public void testGetDatasourceWithConfiguredDefaultInterval() {
ClientInfoResource defaultResource = getResourceTestHelper(serverInventoryView, timelineServerView, new SegmentMetadataQueryConfig("P100Y"));
Map<String, Object> expected = ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1", "d2", "d3", "d4", "d5"), KEY_METRICS, ImmutableSet.of("m1", "m2", "m3", "m4", "m5"));
Map<String, Object> actual = defaultResource.getDatasource(dataSource, null, null);
Assert.assertEquals(expected, actual);
}
use of org.apache.druid.query.metadata.SegmentMetadataQueryConfig in project druid by druid-io.
the class ClientInfoResourceTest method setup.
@Before
public void setup() {
VersionedIntervalTimeline<String, ServerSelector> timeline = new VersionedIntervalTimeline<>(Ordering.natural());
DruidServer server = new DruidServer("name", "host", null, 1234, ServerType.HISTORICAL, "tier", 0);
addSegment(timeline, server, "1960-02-13/1961-02-14", ImmutableList.of("d5"), ImmutableList.of("m5"), "v0");
// segments within [2014-02-13, 2014-02-18]
addSegment(timeline, server, "2014-02-13/2014-02-14", ImmutableList.of("d1"), ImmutableList.of("m1"), "v0");
addSegment(timeline, server, "2014-02-14/2014-02-15", ImmutableList.of("d1"), ImmutableList.of("m1"), "v0");
addSegment(timeline, server, "2014-02-16/2014-02-17", ImmutableList.of("d1"), ImmutableList.of("m1"), "v0");
addSegment(timeline, server, "2014-02-17/2014-02-18", ImmutableList.of("d2"), ImmutableList.of("m2"), "v0");
// segments within [2015-02-01, 2015-02-13]
addSegment(timeline, server, "2015-02-01/2015-02-07", ImmutableList.of("d1"), ImmutableList.of("m1"), "v1");
addSegment(timeline, server, "2015-02-07/2015-02-13", ImmutableList.of("d1"), ImmutableList.of("m1"), "v1");
addSegmentWithShardSpec(timeline, server, "2015-02-03/2015-02-05", ImmutableList.of("d1", "d2"), ImmutableList.of("m1", "m2"), "v2", new NumberedShardSpec(0, 2));
addSegmentWithShardSpec(timeline, server, "2015-02-03/2015-02-05", ImmutableList.of("d1", "d2", "d3"), ImmutableList.of("m1", "m2", "m3"), "v2", new NumberedShardSpec(1, 2));
addSegment(timeline, server, "2015-02-09/2015-02-10", ImmutableList.of("d1", "d3"), ImmutableList.of("m1", "m3"), "v2");
addSegment(timeline, server, "2015-02-11/2015-02-12", ImmutableList.of("d3"), ImmutableList.of("m3"), "v2");
// segments within [2015-03-13, 2015-03-19]
addSegment(timeline, server, "2015-03-13/2015-03-19", ImmutableList.of("d1"), ImmutableList.of("m1"), "v3");
addSegment(timeline, server, "2015-03-13/2015-03-14", ImmutableList.of("d1"), ImmutableList.of("m1"), "v4");
addSegment(timeline, server, "2015-03-14/2015-03-15", ImmutableList.of("d1"), ImmutableList.of("m1"), "v5");
addSegment(timeline, server, "2015-03-15/2015-03-16", ImmutableList.of("d1"), ImmutableList.of("m1"), "v6");
// imcomplete segment
addSegmentWithShardSpec(timeline, server, "2015-04-03/2015-04-05", ImmutableList.of("d4"), ImmutableList.of("m4"), "v7", new NumberedShardSpec(0, 2));
serverInventoryView = EasyMock.createMock(FilteredServerInventoryView.class);
EasyMock.expect(serverInventoryView.getInventory()).andReturn(ImmutableList.of(server)).anyTimes();
timelineServerView = EasyMock.createMock(TimelineServerView.class);
EasyMock.expect(timelineServerView.getTimeline(EasyMock.anyObject(DataSourceAnalysis.class))).andReturn((Optional) Optional.of(timeline));
EasyMock.replay(serverInventoryView, timelineServerView);
resource = getResourceTestHelper(serverInventoryView, timelineServerView, new SegmentMetadataQueryConfig());
}
Aggregations