Search in sources :

Example 1 with ForegroundCachePopulator

use of org.apache.druid.client.cache.ForegroundCachePopulator in project druid by druid-io.

the class QueryRunnerBasedOnClusteredClientTestBase method setupTestBase.

@Before
public void setupTestBase() {
    segmentGenerator = new SegmentGenerator();
    httpClient = new TestHttpClient(objectMapper);
    simpleServerView = new SimpleServerView(toolChestWarehouse, objectMapper, httpClient);
    cachingClusteredClient = new CachingClusteredClient(toolChestWarehouse, simpleServerView, MapCache.create(0), objectMapper, new ForegroundCachePopulator(objectMapper, new CachePopulatorStats(), 0), new CacheConfig(), new DruidHttpClientConfig(), QueryStackTests.getProcessingConfig(USE_PARALLEL_MERGE_POOL_CONFIGURED, DruidProcessingConfig.DEFAULT_NUM_MERGE_BUFFERS), ForkJoinPool.commonPool(), QueryStackTests.DEFAULT_NOOP_SCHEDULER, new MapJoinableFactory(ImmutableSet.of(), ImmutableMap.of()), new NoopServiceEmitter());
    servers = new ArrayList<>();
}
Also used : SegmentGenerator(org.apache.druid.segment.generator.SegmentGenerator) CachingClusteredClient(org.apache.druid.client.CachingClusteredClient) CachePopulatorStats(org.apache.druid.client.cache.CachePopulatorStats) NoopServiceEmitter(org.apache.druid.server.metrics.NoopServiceEmitter) ForegroundCachePopulator(org.apache.druid.client.cache.ForegroundCachePopulator) CacheConfig(org.apache.druid.client.cache.CacheConfig) MapJoinableFactory(org.apache.druid.segment.join.MapJoinableFactory) SimpleServerView(org.apache.druid.client.SimpleServerView) TestHttpClient(org.apache.druid.client.TestHttpClient) DruidHttpClientConfig(org.apache.druid.guice.http.DruidHttpClientConfig) Before(org.junit.Before)

Example 2 with ForegroundCachePopulator

use of org.apache.druid.client.cache.ForegroundCachePopulator in project druid by druid-io.

the class CachingClusteredClientTest method setUp.

@Before
public void setUp() {
    timeline = new VersionedIntervalTimeline<>(Ordering.natural());
    serverView = EasyMock.createNiceMock(TimelineServerView.class);
    cache = MapCache.create(100000);
    client = makeClient(new ForegroundCachePopulator(JSON_MAPPER, new CachePopulatorStats(), -1));
    servers = new DruidServer[] { new DruidServer("test1", "test1", null, 10, ServerType.HISTORICAL, "bye", 0), new DruidServer("test2", "test2", null, 10, ServerType.HISTORICAL, "bye", 0), new DruidServer("test3", "test3", null, 10, ServerType.HISTORICAL, "bye", 0), new DruidServer("test4", "test4", null, 10, ServerType.HISTORICAL, "bye", 0), new DruidServer("test5", "test5", null, 10, ServerType.HISTORICAL, "bye", 0) };
}
Also used : CachePopulatorStats(org.apache.druid.client.cache.CachePopulatorStats) QueryableDruidServer(org.apache.druid.client.selector.QueryableDruidServer) ForegroundCachePopulator(org.apache.druid.client.cache.ForegroundCachePopulator) Before(org.junit.Before)

Example 3 with ForegroundCachePopulator

use of org.apache.druid.client.cache.ForegroundCachePopulator in project druid by druid-io.

the class CachingClusteredClientTest method testCachingOverBulkLimitEnforcesLimit.

@Test
@SuppressWarnings("unchecked")
public void testCachingOverBulkLimitEnforcesLimit() {
    final int limit = 10;
    final Interval interval = Intervals.of("2011-01-01/2011-01-02");
    final TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SOURCE).intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(interval))).filters(DIM_FILTER).granularity(GRANULARITY).aggregators(AGGS).postAggregators(POST_AGGS).context(CONTEXT).randomQueryId().build();
    final ResponseContext context = initializeResponseContext();
    final Cache cache = EasyMock.createStrictMock(Cache.class);
    final Capture<Iterable<Cache.NamedKey>> cacheKeyCapture = EasyMock.newCapture();
    EasyMock.expect(cache.getBulk(EasyMock.capture(cacheKeyCapture))).andReturn(ImmutableMap.of()).once();
    EasyMock.replay(cache);
    client = makeClient(new ForegroundCachePopulator(JSON_MAPPER, new CachePopulatorStats(), -1), cache, limit);
    final DruidServer lastServer = servers[random.nextInt(servers.length)];
    final DataSegment dataSegment = EasyMock.createNiceMock(DataSegment.class);
    EasyMock.expect(dataSegment.getId()).andReturn(SegmentId.dummy(DATA_SOURCE)).anyTimes();
    EasyMock.replay(dataSegment);
    final ServerSelector selector = new ServerSelector(dataSegment, new HighestPriorityTierSelectorStrategy(new RandomServerSelectorStrategy()));
    selector.addServerAndUpdateSegment(new QueryableDruidServer(lastServer, null), dataSegment);
    timeline.add(interval, "v", new SingleElementPartitionChunk<>(selector));
    getDefaultQueryRunner().run(QueryPlus.wrap(query), context);
    Assert.assertTrue("Capture cache keys", cacheKeyCapture.hasCaptured());
    Assert.assertTrue("Cache key below limit", ImmutableList.copyOf(cacheKeyCapture.getValue()).size() <= limit);
    EasyMock.verify(cache);
    EasyMock.reset(cache);
    cacheKeyCapture.reset();
    EasyMock.expect(cache.getBulk(EasyMock.capture(cacheKeyCapture))).andReturn(ImmutableMap.of()).once();
    EasyMock.replay(cache);
    client = makeClient(new ForegroundCachePopulator(JSON_MAPPER, new CachePopulatorStats(), -1), cache, 0);
    getDefaultQueryRunner().run(QueryPlus.wrap(query), context);
    EasyMock.verify(cache);
    EasyMock.verify(dataSegment);
    Assert.assertTrue("Capture cache keys", cacheKeyCapture.hasCaptured());
    Assert.assertTrue("Cache Keys empty", ImmutableList.copyOf(cacheKeyCapture.getValue()).isEmpty());
}
Also used : TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) MergeIterable(org.apache.druid.java.util.common.guava.MergeIterable) FunctionalIterable(org.apache.druid.java.util.common.guava.FunctionalIterable) QueryableDruidServer(org.apache.druid.client.selector.QueryableDruidServer) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) DataSegment(org.apache.druid.timeline.DataSegment) QueryableDruidServer(org.apache.druid.client.selector.QueryableDruidServer) ServerSelector(org.apache.druid.client.selector.ServerSelector) CachePopulatorStats(org.apache.druid.client.cache.CachePopulatorStats) ResponseContext(org.apache.druid.query.context.ResponseContext) HighestPriorityTierSelectorStrategy(org.apache.druid.client.selector.HighestPriorityTierSelectorStrategy) ForegroundCachePopulator(org.apache.druid.client.cache.ForegroundCachePopulator) RandomServerSelectorStrategy(org.apache.druid.client.selector.RandomServerSelectorStrategy) Interval(org.joda.time.Interval) MapCache(org.apache.druid.client.cache.MapCache) Cache(org.apache.druid.client.cache.Cache) Test(org.junit.Test)

Example 4 with ForegroundCachePopulator

use of org.apache.druid.client.cache.ForegroundCachePopulator in project druid by druid-io.

the class ServerManagerTest method setUp.

@Before
public void setUp() {
    EmittingLogger.registerEmitter(new NoopServiceEmitter());
    queryWaitLatch = new CountDownLatch(1);
    queryWaitYieldLatch = new CountDownLatch(1);
    queryNotifyLatch = new CountDownLatch(1);
    factory = new MyQueryRunnerFactory(queryWaitLatch, queryWaitYieldLatch, queryNotifyLatch);
    serverManagerExec = Executors.newFixedThreadPool(2);
    segmentManager = new SegmentManager(new SegmentLoader() {

        @Override
        public ReferenceCountingSegment getSegment(final DataSegment segment, boolean lazy, SegmentLazyLoadFailCallback SegmentLazyLoadFailCallback) {
            return ReferenceCountingSegment.wrapSegment(new SegmentForTesting(MapUtils.getString(segment.getLoadSpec(), "version"), (Interval) segment.getLoadSpec().get("interval")), segment.getShardSpec());
        }

        @Override
        public void cleanup(DataSegment segment) {
        }
    });
    serverManager = new ServerManager(new QueryRunnerFactoryConglomerate() {

        @Override
        public <T, QueryType extends Query<T>> QueryRunnerFactory<T, QueryType> findFactory(QueryType query) {
            if (query instanceof SearchQuery) {
                return (QueryRunnerFactory) factory;
            } else {
                return null;
            }
        }
    }, new NoopServiceEmitter(), new ForwardingQueryProcessingPool(serverManagerExec), new ForegroundCachePopulator(new DefaultObjectMapper(), new CachePopulatorStats(), -1), new DefaultObjectMapper(), new LocalCacheProvider().get(), new CacheConfig(), segmentManager, NoopJoinableFactory.INSTANCE, new ServerConfig());
    loadQueryable("test", "1", Intervals.of("P1d/2011-04-01"));
    loadQueryable("test", "1", Intervals.of("P1d/2011-04-02"));
    loadQueryable("test", "2", Intervals.of("P1d/2011-04-02"));
    loadQueryable("test", "1", Intervals.of("P1d/2011-04-03"));
    loadQueryable("test", "1", Intervals.of("P1d/2011-04-04"));
    loadQueryable("test", "1", Intervals.of("P1d/2011-04-05"));
    loadQueryable("test", "2", Intervals.of("PT1h/2011-04-04T01"));
    loadQueryable("test", "2", Intervals.of("PT1h/2011-04-04T02"));
    loadQueryable("test", "2", Intervals.of("PT1h/2011-04-04T03"));
    loadQueryable("test", "2", Intervals.of("PT1h/2011-04-04T05"));
    loadQueryable("test", "2", Intervals.of("PT1h/2011-04-04T06"));
    loadQueryable("test2", "1", Intervals.of("P1d/2011-04-01"));
    loadQueryable("test2", "1", Intervals.of("P1d/2011-04-02"));
}
Also used : SearchQuery(org.apache.druid.query.search.SearchQuery) SegmentManager(org.apache.druid.server.SegmentManager) BaseQuery(org.apache.druid.query.BaseQuery) Query(org.apache.druid.query.Query) SearchQuery(org.apache.druid.query.search.SearchQuery) ForwardingQueryProcessingPool(org.apache.druid.query.ForwardingQueryProcessingPool) SegmentLazyLoadFailCallback(org.apache.druid.segment.SegmentLazyLoadFailCallback) NoopServiceEmitter(org.apache.druid.server.metrics.NoopServiceEmitter) CountDownLatch(java.util.concurrent.CountDownLatch) DataSegment(org.apache.druid.timeline.DataSegment) SegmentLoader(org.apache.druid.segment.loading.SegmentLoader) ServerConfig(org.apache.druid.server.initialization.ServerConfig) QueryRunnerFactoryConglomerate(org.apache.druid.query.QueryRunnerFactoryConglomerate) QueryRunnerFactory(org.apache.druid.query.QueryRunnerFactory) CachePopulatorStats(org.apache.druid.client.cache.CachePopulatorStats) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) LocalCacheProvider(org.apache.druid.client.cache.LocalCacheProvider) ForegroundCachePopulator(org.apache.druid.client.cache.ForegroundCachePopulator) CacheConfig(org.apache.druid.client.cache.CacheConfig) Interval(org.joda.time.Interval) Before(org.junit.Before)

Example 5 with ForegroundCachePopulator

use of org.apache.druid.client.cache.ForegroundCachePopulator in project druid by druid-io.

the class MovingAverageQueryTest method testQuery.

/**
 * Validate that the specified query behaves correctly.
 */
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void testQuery() throws IOException {
    Query<?> query = jsonMapper.readValue(getQueryString(), Query.class);
    Assert.assertThat(query, IsInstanceOf.instanceOf(getExpectedQueryType()));
    List<MapBasedRow> expectedResults = jsonMapper.readValue(getExpectedResultString(), getExpectedResultType());
    Assert.assertNotNull(expectedResults);
    Assert.assertThat(expectedResults, IsInstanceOf.instanceOf(List.class));
    CachingClusteredClient baseClient = new CachingClusteredClient(warehouse, new TimelineServerView() {

        @Override
        public Optional<? extends TimelineLookup<String, ServerSelector>> getTimeline(DataSourceAnalysis analysis) {
            return Optional.empty();
        }

        @Override
        public List<ImmutableDruidServer> getDruidServers() {
            return null;
        }

        @Override
        public <T> QueryRunner<T> getQueryRunner(DruidServer server) {
            return null;
        }

        @Override
        public void registerTimelineCallback(Executor exec, TimelineCallback callback) {
        }

        @Override
        public void registerSegmentCallback(Executor exec, SegmentCallback callback) {
        }

        @Override
        public void registerServerRemovedCallback(Executor exec, ServerRemovedCallback callback) {
        }
    }, MapCache.create(100000), jsonMapper, new ForegroundCachePopulator(jsonMapper, new CachePopulatorStats(), -1), new CacheConfig(), new DruidHttpClientConfig() {

        @Override
        public long getMaxQueuedBytes() {
            return 0L;
        }
    }, new DruidProcessingConfig() {

        @Override
        public String getFormatString() {
            return null;
        }
    }, ForkJoinPool.commonPool(), QueryStackTests.DEFAULT_NOOP_SCHEDULER, new MapJoinableFactory(ImmutableSet.of(), ImmutableMap.of()), new NoopServiceEmitter());
    ClientQuerySegmentWalker walker = new ClientQuerySegmentWalker(new ServiceEmitter("", "", null) {

        @Override
        public void emit(Event event) {
        }
    }, baseClient, null, /* local client; unused in this test, so pass in null */
    warehouse, new MapJoinableFactory(ImmutableSet.of(), ImmutableMap.of()), retryConfig, jsonMapper, serverConfig, null, new CacheConfig());
    defineMocks();
    QueryPlus queryPlus = QueryPlus.wrap(query);
    final Sequence<?> res = query.getRunner(walker).run(queryPlus);
    List actualResults = new ArrayList();
    actualResults = (List<MapBasedRow>) res.accumulate(actualResults, Accumulators.list());
    expectedResults = consistentTypeCasting(expectedResults);
    actualResults = consistentTypeCasting(actualResults);
    Assert.assertEquals(expectedResults, actualResults);
}
Also used : ServiceEmitter(org.apache.druid.java.util.emitter.service.ServiceEmitter) NoopServiceEmitter(org.apache.druid.server.metrics.NoopServiceEmitter) ArrayList(java.util.ArrayList) DataSourceAnalysis(org.apache.druid.query.planning.DataSourceAnalysis) DruidHttpClientConfig(org.apache.druid.guice.http.DruidHttpClientConfig) MapBasedRow(org.apache.druid.data.input.MapBasedRow) Executor(java.util.concurrent.Executor) CachePopulatorStats(org.apache.druid.client.cache.CachePopulatorStats) List(java.util.List) ArrayList(java.util.ArrayList) TimelineServerView(org.apache.druid.client.TimelineServerView) CacheConfig(org.apache.druid.client.cache.CacheConfig) MapJoinableFactory(org.apache.druid.segment.join.MapJoinableFactory) QueryPlus(org.apache.druid.query.QueryPlus) CachingClusteredClient(org.apache.druid.client.CachingClusteredClient) Optional(java.util.Optional) DruidServer(org.apache.druid.client.DruidServer) ImmutableDruidServer(org.apache.druid.client.ImmutableDruidServer) NoopServiceEmitter(org.apache.druid.server.metrics.NoopServiceEmitter) QueryRunner(org.apache.druid.query.QueryRunner) ClientQuerySegmentWalker(org.apache.druid.server.ClientQuerySegmentWalker) Event(org.apache.druid.java.util.emitter.core.Event) ForegroundCachePopulator(org.apache.druid.client.cache.ForegroundCachePopulator) DruidProcessingConfig(org.apache.druid.query.DruidProcessingConfig) TimelineLookup(org.apache.druid.timeline.TimelineLookup) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Aggregations

CachePopulatorStats (org.apache.druid.client.cache.CachePopulatorStats)8 ForegroundCachePopulator (org.apache.druid.client.cache.ForegroundCachePopulator)8 CacheConfig (org.apache.druid.client.cache.CacheConfig)5 NoopServiceEmitter (org.apache.druid.server.metrics.NoopServiceEmitter)4 CachingClusteredClient (org.apache.druid.client.CachingClusteredClient)3 DruidHttpClientConfig (org.apache.druid.guice.http.DruidHttpClientConfig)3 Query (org.apache.druid.query.Query)3 QueryRunnerFactory (org.apache.druid.query.QueryRunnerFactory)3 MapJoinableFactory (org.apache.druid.segment.join.MapJoinableFactory)3 DataSegment (org.apache.druid.timeline.DataSegment)3 Before (org.junit.Before)3 Optional (java.util.Optional)2 Cache (org.apache.druid.client.cache.Cache)2 QueryableDruidServer (org.apache.druid.client.selector.QueryableDruidServer)2 FunctionalIterable (org.apache.druid.java.util.common.guava.FunctionalIterable)2 DruidProcessingConfig (org.apache.druid.query.DruidProcessingConfig)2 TimeseriesQuery (org.apache.druid.query.timeseries.TimeseriesQuery)2 SegmentGenerator (org.apache.druid.segment.generator.SegmentGenerator)2 Interval (org.joda.time.Interval)2 Test (org.junit.Test)2