Search in sources :

Example 1 with QueryableDruidServer

use of org.apache.druid.client.selector.QueryableDruidServer in project druid by druid-io.

the class CachingClusteredClientTest method testEtagforDifferentQueryInterval.

@Test
public void testEtagforDifferentQueryInterval() {
    final Interval interval = Intervals.of("2016-01-01/2016-01-02");
    final Interval queryInterval = Intervals.of("2016-01-01T14:00:00/2016-01-02T14:00:00");
    final Interval queryInterval2 = Intervals.of("2016-01-01T18:00:00/2016-01-02T18:00:00");
    final DataSegment dataSegment = new DataSegment("dataSource", interval, "ver", ImmutableMap.of("type", "hdfs", "path", "/tmp"), ImmutableList.of("product"), ImmutableList.of("visited_sum"), NoneShardSpec.instance(), 9, 12334);
    final ServerSelector selector = new ServerSelector(dataSegment, new HighestPriorityTierSelectorStrategy(new RandomServerSelectorStrategy()));
    selector.addServerAndUpdateSegment(new QueryableDruidServer(servers[0], null), dataSegment);
    timeline.add(interval, "ver", new SingleElementPartitionChunk<>(selector));
    final TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder().dataSource(DATA_SOURCE).intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(queryInterval))).context(ImmutableMap.of("If-None-Match", "aVJV29CJY93rszVW/QBy0arWZo0=")).randomQueryId().build();
    final TimeBoundaryQuery query2 = Druids.newTimeBoundaryQueryBuilder().dataSource(DATA_SOURCE).intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(queryInterval2))).context(ImmutableMap.of("If-None-Match", "aVJV29CJY93rszVW/QBy0arWZo0=")).randomQueryId().build();
    final ResponseContext responseContext = initializeResponseContext();
    getDefaultQueryRunner().run(QueryPlus.wrap(query), responseContext);
    final String etag1 = responseContext.getEntityTag();
    getDefaultQueryRunner().run(QueryPlus.wrap(query2), responseContext);
    final String etag2 = responseContext.getEntityTag();
    Assert.assertNotEquals(etag1, etag2);
}
Also used : ServerSelector(org.apache.druid.client.selector.ServerSelector) HighestPriorityTierSelectorStrategy(org.apache.druid.client.selector.HighestPriorityTierSelectorStrategy) ResponseContext(org.apache.druid.query.context.ResponseContext) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) TimeBoundaryQuery(org.apache.druid.query.timeboundary.TimeBoundaryQuery) DataSegment(org.apache.druid.timeline.DataSegment) RandomServerSelectorStrategy(org.apache.druid.client.selector.RandomServerSelectorStrategy) Interval(org.joda.time.Interval) QueryableDruidServer(org.apache.druid.client.selector.QueryableDruidServer) Test(org.junit.Test)

Example 2 with QueryableDruidServer

use of org.apache.druid.client.selector.QueryableDruidServer in project druid by druid-io.

the class CachingClusteredClientTest method testCachingOverBulkLimitEnforcesLimit.

@Test
@SuppressWarnings("unchecked")
public void testCachingOverBulkLimitEnforcesLimit() {
    final int limit = 10;
    final Interval interval = Intervals.of("2011-01-01/2011-01-02");
    final TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SOURCE).intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(interval))).filters(DIM_FILTER).granularity(GRANULARITY).aggregators(AGGS).postAggregators(POST_AGGS).context(CONTEXT).randomQueryId().build();
    final ResponseContext context = initializeResponseContext();
    final Cache cache = EasyMock.createStrictMock(Cache.class);
    final Capture<Iterable<Cache.NamedKey>> cacheKeyCapture = EasyMock.newCapture();
    EasyMock.expect(cache.getBulk(EasyMock.capture(cacheKeyCapture))).andReturn(ImmutableMap.of()).once();
    EasyMock.replay(cache);
    client = makeClient(new ForegroundCachePopulator(JSON_MAPPER, new CachePopulatorStats(), -1), cache, limit);
    final DruidServer lastServer = servers[random.nextInt(servers.length)];
    final DataSegment dataSegment = EasyMock.createNiceMock(DataSegment.class);
    EasyMock.expect(dataSegment.getId()).andReturn(SegmentId.dummy(DATA_SOURCE)).anyTimes();
    EasyMock.replay(dataSegment);
    final ServerSelector selector = new ServerSelector(dataSegment, new HighestPriorityTierSelectorStrategy(new RandomServerSelectorStrategy()));
    selector.addServerAndUpdateSegment(new QueryableDruidServer(lastServer, null), dataSegment);
    timeline.add(interval, "v", new SingleElementPartitionChunk<>(selector));
    getDefaultQueryRunner().run(QueryPlus.wrap(query), context);
    Assert.assertTrue("Capture cache keys", cacheKeyCapture.hasCaptured());
    Assert.assertTrue("Cache key below limit", ImmutableList.copyOf(cacheKeyCapture.getValue()).size() <= limit);
    EasyMock.verify(cache);
    EasyMock.reset(cache);
    cacheKeyCapture.reset();
    EasyMock.expect(cache.getBulk(EasyMock.capture(cacheKeyCapture))).andReturn(ImmutableMap.of()).once();
    EasyMock.replay(cache);
    client = makeClient(new ForegroundCachePopulator(JSON_MAPPER, new CachePopulatorStats(), -1), cache, 0);
    getDefaultQueryRunner().run(QueryPlus.wrap(query), context);
    EasyMock.verify(cache);
    EasyMock.verify(dataSegment);
    Assert.assertTrue("Capture cache keys", cacheKeyCapture.hasCaptured());
    Assert.assertTrue("Cache Keys empty", ImmutableList.copyOf(cacheKeyCapture.getValue()).isEmpty());
}
Also used : TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) MergeIterable(org.apache.druid.java.util.common.guava.MergeIterable) FunctionalIterable(org.apache.druid.java.util.common.guava.FunctionalIterable) QueryableDruidServer(org.apache.druid.client.selector.QueryableDruidServer) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) DataSegment(org.apache.druid.timeline.DataSegment) QueryableDruidServer(org.apache.druid.client.selector.QueryableDruidServer) ServerSelector(org.apache.druid.client.selector.ServerSelector) CachePopulatorStats(org.apache.druid.client.cache.CachePopulatorStats) ResponseContext(org.apache.druid.query.context.ResponseContext) HighestPriorityTierSelectorStrategy(org.apache.druid.client.selector.HighestPriorityTierSelectorStrategy) ForegroundCachePopulator(org.apache.druid.client.cache.ForegroundCachePopulator) RandomServerSelectorStrategy(org.apache.druid.client.selector.RandomServerSelectorStrategy) Interval(org.joda.time.Interval) MapCache(org.apache.druid.client.cache.MapCache) Cache(org.apache.druid.client.cache.Cache) Test(org.junit.Test)

Example 3 with QueryableDruidServer

use of org.apache.druid.client.selector.QueryableDruidServer in project druid by druid-io.

the class DirectDruidClientTest method testRun.

@Test
public void testRun() throws Exception {
    final URL url = new URL(StringUtils.format("http://%s/druid/v2/", hostName));
    SettableFuture<InputStream> futureResult = SettableFuture.create();
    Capture<Request> capturedRequest = EasyMock.newCapture();
    EasyMock.expect(httpClient.go(EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject(), EasyMock.anyObject(Duration.class))).andReturn(futureResult).times(1);
    SettableFuture futureException = SettableFuture.create();
    EasyMock.expect(httpClient.go(EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject(), EasyMock.anyObject(Duration.class))).andReturn(futureException).times(1);
    EasyMock.expect(httpClient.go(EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject(), EasyMock.anyObject(Duration.class))).andReturn(SettableFuture.create()).atLeastOnce();
    EasyMock.replay(httpClient);
    DirectDruidClient client2 = new DirectDruidClient(new ReflectionQueryToolChestWarehouse(), QueryRunnerTestHelper.NOOP_QUERYWATCHER, new DefaultObjectMapper(), httpClient, "http", "foo2", new NoopServiceEmitter());
    QueryableDruidServer queryableDruidServer2 = new QueryableDruidServer(new DruidServer("test1", "localhost", null, 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 0), client2);
    serverSelector.addServerAndUpdateSegment(queryableDruidServer2, serverSelector.getSegment());
    TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder().dataSource("test").build();
    query = query.withOverriddenContext(ImmutableMap.of(DirectDruidClient.QUERY_FAIL_TIME, Long.MAX_VALUE));
    Sequence s1 = client.run(QueryPlus.wrap(query));
    Assert.assertTrue(capturedRequest.hasCaptured());
    Assert.assertEquals(url, capturedRequest.getValue().getUrl());
    Assert.assertEquals(HttpMethod.POST, capturedRequest.getValue().getMethod());
    Assert.assertEquals(1, client.getNumOpenConnections());
    // simulate read timeout
    client.run(QueryPlus.wrap(query));
    Assert.assertEquals(2, client.getNumOpenConnections());
    futureException.setException(new ReadTimeoutException());
    Assert.assertEquals(1, client.getNumOpenConnections());
    // subsequent connections should work
    client.run(QueryPlus.wrap(query));
    client.run(QueryPlus.wrap(query));
    client.run(QueryPlus.wrap(query));
    Assert.assertTrue(client.getNumOpenConnections() == 4);
    // produce result for first connection
    futureResult.set(new ByteArrayInputStream(StringUtils.toUtf8("[{\"timestamp\":\"2014-01-01T01:02:03Z\", \"result\": 42.0}]")));
    List<Result> results = s1.toList();
    Assert.assertEquals(1, results.size());
    Assert.assertEquals(DateTimes.of("2014-01-01T01:02:03Z"), results.get(0).getTimestamp());
    Assert.assertEquals(3, client.getNumOpenConnections());
    client2.run(QueryPlus.wrap(query));
    client2.run(QueryPlus.wrap(query));
    Assert.assertEquals(2, client2.getNumOpenConnections());
    Assert.assertEquals(serverSelector.pick(null), queryableDruidServer2);
    EasyMock.verify(httpClient);
}
Also used : SettableFuture(com.google.common.util.concurrent.SettableFuture) ByteArrayInputStream(java.io.ByteArrayInputStream) PipedInputStream(java.io.PipedInputStream) InputStream(java.io.InputStream) ReadTimeoutException(org.jboss.netty.handler.timeout.ReadTimeoutException) Request(org.apache.druid.java.util.http.client.Request) QueryableDruidServer(org.apache.druid.client.selector.QueryableDruidServer) NoopServiceEmitter(org.apache.druid.server.metrics.NoopServiceEmitter) TimeBoundaryQuery(org.apache.druid.query.timeboundary.TimeBoundaryQuery) Sequence(org.apache.druid.java.util.common.guava.Sequence) URL(java.net.URL) QueryableDruidServer(org.apache.druid.client.selector.QueryableDruidServer) Result(org.apache.druid.query.Result) ByteArrayInputStream(java.io.ByteArrayInputStream) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) HttpResponseHandler(org.apache.druid.java.util.http.client.response.HttpResponseHandler) ReflectionQueryToolChestWarehouse(org.apache.druid.query.ReflectionQueryToolChestWarehouse) Test(org.junit.Test)

Example 4 with QueryableDruidServer

use of org.apache.druid.client.selector.QueryableDruidServer in project druid by druid-io.

the class CachingClusteredClientPerfTest method testGetQueryRunnerForSegments_singleIntervalLargeSegments.

@Test(timeout = 10_000)
public void testGetQueryRunnerForSegments_singleIntervalLargeSegments() {
    final int segmentCount = 30_000;
    final Interval interval = Intervals.of("2021-02-13/2021-02-14");
    final List<SegmentDescriptor> segmentDescriptors = new ArrayList<>(segmentCount);
    final List<DataSegment> dataSegments = new ArrayList<>(segmentCount);
    final VersionedIntervalTimeline<String, ServerSelector> timeline = new VersionedIntervalTimeline<>(Ordering.natural());
    final DruidServer server = new DruidServer("server", "localhost:9000", null, Long.MAX_VALUE, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, DruidServer.DEFAULT_PRIORITY);
    for (int ii = 0; ii < segmentCount; ii++) {
        segmentDescriptors.add(new SegmentDescriptor(interval, "1", ii));
        DataSegment segment = makeDataSegment("test", interval, "1", ii);
        dataSegments.add(segment);
    }
    timeline.addAll(Iterators.transform(dataSegments.iterator(), segment -> {
        ServerSelector ss = new ServerSelector(segment, new HighestPriorityTierSelectorStrategy(new RandomServerSelectorStrategy()));
        ss.addServerAndUpdateSegment(new QueryableDruidServer(server, new MockQueryRunner()), segment);
        return new VersionedIntervalTimeline.PartitionChunkEntry<>(segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(ss));
    }));
    TimelineServerView serverView = Mockito.mock(TimelineServerView.class);
    QueryScheduler queryScheduler = Mockito.mock(QueryScheduler.class);
    // mock scheduler to return same sequence as argument
    Mockito.when(queryScheduler.run(any(), any())).thenAnswer(i -> i.getArgument(1));
    Mockito.when(queryScheduler.prioritizeAndLaneQuery(any(), any())).thenAnswer(i -> ((QueryPlus) i.getArgument(0)).getQuery());
    Mockito.doReturn(Optional.of(timeline)).when(serverView).getTimeline(any());
    Mockito.doReturn(new MockQueryRunner()).when(serverView).getQueryRunner(any());
    CachingClusteredClient cachingClusteredClient = new CachingClusteredClient(new MockQueryToolChestWareHouse(), serverView, MapCache.create(1024), TestHelper.makeJsonMapper(), Mockito.mock(CachePopulator.class), new CacheConfig(), Mockito.mock(DruidHttpClientConfig.class), Mockito.mock(DruidProcessingConfig.class), ForkJoinPool.commonPool(), queryScheduler, NoopJoinableFactory.INSTANCE, new NoopServiceEmitter());
    Query<SegmentDescriptor> fakeQuery = makeFakeQuery(interval);
    QueryRunner<SegmentDescriptor> queryRunner = cachingClusteredClient.getQueryRunnerForSegments(fakeQuery, segmentDescriptors);
    Sequence<SegmentDescriptor> sequence = queryRunner.run(QueryPlus.wrap(fakeQuery));
    Assert.assertEquals(segmentDescriptors, sequence.toList());
}
Also used : QueryPlus(org.apache.druid.query.QueryPlus) Map(java.util.Map) ServerType(org.apache.druid.server.coordination.ServerType) QueryRunner(org.apache.druid.query.QueryRunner) QueryToolChestWarehouse(org.apache.druid.query.QueryToolChestWarehouse) NoopJoinableFactory(org.apache.druid.segment.join.NoopJoinableFactory) Sequence(org.apache.druid.java.util.common.guava.Sequence) QueryScheduler(org.apache.druid.server.QueryScheduler) ImmutableMap(com.google.common.collect.ImmutableMap) DataSource(org.apache.druid.query.DataSource) CacheConfig(org.apache.druid.client.cache.CacheConfig) DruidProcessingConfig(org.apache.druid.query.DruidProcessingConfig) QuerySegmentSpec(org.apache.druid.query.spec.QuerySegmentSpec) DruidHttpClientConfig(org.apache.druid.guice.http.DruidHttpClientConfig) List(java.util.List) DimFilter(org.apache.druid.query.filter.DimFilter) LinearShardSpec(org.apache.druid.timeline.partition.LinearShardSpec) DataSegment(org.apache.druid.timeline.DataSegment) ServerManagerTest(org.apache.druid.server.coordination.ServerManagerTest) Optional(java.util.Optional) QueryableDruidServer(org.apache.druid.client.selector.QueryableDruidServer) MapCache(org.apache.druid.client.cache.MapCache) ArgumentMatchers.any(org.mockito.ArgumentMatchers.any) HighestPriorityTierSelectorStrategy(org.apache.druid.client.selector.HighestPriorityTierSelectorStrategy) Intervals(org.apache.druid.java.util.common.Intervals) TestSequence(org.apache.druid.java.util.common.guava.TestSequence) BaseQuery(org.apache.druid.query.BaseQuery) Iterators(com.google.common.collect.Iterators) ArrayList(java.util.ArrayList) MultipleSpecificSegmentSpec(org.apache.druid.query.spec.MultipleSpecificSegmentSpec) ServerSelector(org.apache.druid.client.selector.ServerSelector) Interval(org.joda.time.Interval) Query(org.apache.druid.query.Query) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) CachePopulator(org.apache.druid.client.cache.CachePopulator) NoopServiceEmitter(org.apache.druid.server.metrics.NoopServiceEmitter) VersionedIntervalTimeline(org.apache.druid.timeline.VersionedIntervalTimeline) ResponseContext(org.apache.druid.query.context.ResponseContext) QueryToolChest(org.apache.druid.query.QueryToolChest) Test(org.junit.Test) TableDataSource(org.apache.druid.query.TableDataSource) Mockito(org.mockito.Mockito) TestHelper(org.apache.druid.segment.TestHelper) RandomServerSelectorStrategy(org.apache.druid.client.selector.RandomServerSelectorStrategy) Ordering(com.google.common.collect.Ordering) ForkJoinPool(java.util.concurrent.ForkJoinPool) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) Assert(org.junit.Assert) Collections(java.util.Collections) ArrayList(java.util.ArrayList) DataSegment(org.apache.druid.timeline.DataSegment) QueryableDruidServer(org.apache.druid.client.selector.QueryableDruidServer) DruidHttpClientConfig(org.apache.druid.guice.http.DruidHttpClientConfig) ServerSelector(org.apache.druid.client.selector.ServerSelector) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) HighestPriorityTierSelectorStrategy(org.apache.druid.client.selector.HighestPriorityTierSelectorStrategy) CacheConfig(org.apache.druid.client.cache.CacheConfig) QueryScheduler(org.apache.druid.server.QueryScheduler) QueryableDruidServer(org.apache.druid.client.selector.QueryableDruidServer) NoopServiceEmitter(org.apache.druid.server.metrics.NoopServiceEmitter) CachePopulator(org.apache.druid.client.cache.CachePopulator) VersionedIntervalTimeline(org.apache.druid.timeline.VersionedIntervalTimeline) DruidProcessingConfig(org.apache.druid.query.DruidProcessingConfig) RandomServerSelectorStrategy(org.apache.druid.client.selector.RandomServerSelectorStrategy) Interval(org.joda.time.Interval) ServerManagerTest(org.apache.druid.server.coordination.ServerManagerTest) Test(org.junit.Test)

Example 5 with QueryableDruidServer

use of org.apache.druid.client.selector.QueryableDruidServer in project druid by druid-io.

the class CachingClusteredClientTest method testIfNoneMatch.

@Test
public void testIfNoneMatch() {
    Interval interval = Intervals.of("2016/2017");
    final DataSegment dataSegment = new DataSegment("dataSource", interval, "ver", ImmutableMap.of("type", "hdfs", "path", "/tmp"), ImmutableList.of("product"), ImmutableList.of("visited_sum"), NoneShardSpec.instance(), 9, 12334);
    final ServerSelector selector = new ServerSelector(dataSegment, new HighestPriorityTierSelectorStrategy(new RandomServerSelectorStrategy()));
    selector.addServerAndUpdateSegment(new QueryableDruidServer(servers[0], null), dataSegment);
    timeline.add(interval, "ver", new SingleElementPartitionChunk<>(selector));
    TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder().dataSource(DATA_SOURCE).intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(interval))).context(ImmutableMap.of("If-None-Match", "aVJV29CJY93rszVW/QBy0arWZo0=")).randomQueryId().build();
    final ResponseContext responseContext = initializeResponseContext();
    getDefaultQueryRunner().run(QueryPlus.wrap(query), responseContext);
    Assert.assertEquals("MDs2yIUvYLVzaG6zmwTH1plqaYE=", responseContext.getEntityTag());
}
Also used : ServerSelector(org.apache.druid.client.selector.ServerSelector) HighestPriorityTierSelectorStrategy(org.apache.druid.client.selector.HighestPriorityTierSelectorStrategy) ResponseContext(org.apache.druid.query.context.ResponseContext) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) TimeBoundaryQuery(org.apache.druid.query.timeboundary.TimeBoundaryQuery) DataSegment(org.apache.druid.timeline.DataSegment) RandomServerSelectorStrategy(org.apache.druid.client.selector.RandomServerSelectorStrategy) Interval(org.joda.time.Interval) QueryableDruidServer(org.apache.druid.client.selector.QueryableDruidServer) Test(org.junit.Test)

Aggregations

QueryableDruidServer (org.apache.druid.client.selector.QueryableDruidServer)14 ServerSelector (org.apache.druid.client.selector.ServerSelector)12 HighestPriorityTierSelectorStrategy (org.apache.druid.client.selector.HighestPriorityTierSelectorStrategy)8 DataSegment (org.apache.druid.timeline.DataSegment)8 RandomServerSelectorStrategy (org.apache.druid.client.selector.RandomServerSelectorStrategy)7 Test (org.junit.Test)5 ResponseContext (org.apache.druid.query.context.ResponseContext)4 MultipleIntervalSegmentSpec (org.apache.druid.query.spec.MultipleIntervalSegmentSpec)4 Interval (org.joda.time.Interval)4 TimeBoundaryQuery (org.apache.druid.query.timeboundary.TimeBoundaryQuery)3 NoopServiceEmitter (org.apache.druid.server.metrics.NoopServiceEmitter)3 SegmentId (org.apache.druid.timeline.SegmentId)3 ImmutableMap (com.google.common.collect.ImmutableMap)2 ArrayList (java.util.ArrayList)2 Map (java.util.Map)2 MapCache (org.apache.druid.client.cache.MapCache)2 DefaultObjectMapper (org.apache.druid.jackson.DefaultObjectMapper)2 Sequence (org.apache.druid.java.util.common.guava.Sequence)2 Iterators (com.google.common.collect.Iterators)1 Ordering (com.google.common.collect.Ordering)1