Search in sources :

Example 1 with CacheConfig

use of io.druid.client.cache.CacheConfig in project druid by druid-io.

the class ServerManagerTest method setUp.

@Before
public void setUp() throws IOException {
    EmittingLogger.registerEmitter(new NoopServiceEmitter());
    queryWaitLatch = new CountDownLatch(1);
    queryWaitYieldLatch = new CountDownLatch(1);
    queryNotifyLatch = new CountDownLatch(1);
    factory = new MyQueryRunnerFactory(queryWaitLatch, queryWaitYieldLatch, queryNotifyLatch);
    serverManagerExec = Executors.newFixedThreadPool(2);
    serverManager = new ServerManager(new SegmentLoader() {

        @Override
        public boolean isSegmentLoaded(DataSegment segment) throws SegmentLoadingException {
            return false;
        }

        @Override
        public Segment getSegment(final DataSegment segment) {
            return new SegmentForTesting(MapUtils.getString(segment.getLoadSpec(), "version"), (Interval) segment.getLoadSpec().get("interval"));
        }

        @Override
        public File getSegmentFiles(DataSegment segment) throws SegmentLoadingException {
            throw new UnsupportedOperationException();
        }

        @Override
        public void cleanup(DataSegment segment) throws SegmentLoadingException {
        }
    }, new QueryRunnerFactoryConglomerate() {

        @Override
        public <T, QueryType extends Query<T>> QueryRunnerFactory<T, QueryType> findFactory(QueryType query) {
            return (QueryRunnerFactory) factory;
        }
    }, new NoopServiceEmitter(), serverManagerExec, MoreExecutors.sameThreadExecutor(), new DefaultObjectMapper(), new LocalCacheProvider().get(), new CacheConfig());
    loadQueryable("test", "1", new Interval("P1d/2011-04-01"));
    loadQueryable("test", "1", new Interval("P1d/2011-04-02"));
    loadQueryable("test", "2", new Interval("P1d/2011-04-02"));
    loadQueryable("test", "1", new Interval("P1d/2011-04-03"));
    loadQueryable("test", "1", new Interval("P1d/2011-04-04"));
    loadQueryable("test", "1", new Interval("P1d/2011-04-05"));
    loadQueryable("test", "2", new Interval("PT1h/2011-04-04T01"));
    loadQueryable("test", "2", new Interval("PT1h/2011-04-04T02"));
    loadQueryable("test", "2", new Interval("PT1h/2011-04-04T03"));
    loadQueryable("test", "2", new Interval("PT1h/2011-04-04T05"));
    loadQueryable("test", "2", new Interval("PT1h/2011-04-04T06"));
    loadQueryable("test2", "1", new Interval("P1d/2011-04-01"));
    loadQueryable("test2", "1", new Interval("P1d/2011-04-02"));
}
Also used : Query(io.druid.query.Query) SearchQuery(io.druid.query.search.search.SearchQuery) NoopServiceEmitter(io.druid.server.metrics.NoopServiceEmitter) CountDownLatch(java.util.concurrent.CountDownLatch) DataSegment(io.druid.timeline.DataSegment) SegmentLoader(io.druid.segment.loading.SegmentLoader) QueryRunnerFactoryConglomerate(io.druid.query.QueryRunnerFactoryConglomerate) QueryRunnerFactory(io.druid.query.QueryRunnerFactory) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) LocalCacheProvider(io.druid.client.cache.LocalCacheProvider) CacheConfig(io.druid.client.cache.CacheConfig) Interval(org.joda.time.Interval) Before(org.junit.Before)

Example 2 with CacheConfig

use of io.druid.client.cache.CacheConfig in project druid by druid-io.

the class KafkaIndexTaskTest method makeToolboxFactory.

private void makeToolboxFactory() throws IOException {
    directory = tempFolder.newFolder();
    final TestUtils testUtils = new TestUtils();
    final ObjectMapper objectMapper = testUtils.getTestObjectMapper();
    for (Module module : new KafkaIndexTaskModule().getJacksonModules()) {
        objectMapper.registerModule(module);
    }
    final TaskConfig taskConfig = new TaskConfig(new File(directory, "taskBaseDir").getPath(), null, null, 50000, null, false, null, null);
    final TestDerbyConnector derbyConnector = derby.getConnector();
    derbyConnector.createDataSourceTable();
    derbyConnector.createPendingSegmentsTable();
    derbyConnector.createSegmentTable();
    derbyConnector.createRulesTable();
    derbyConnector.createConfigTable();
    derbyConnector.createTaskTables();
    derbyConnector.createAuditTable();
    taskStorage = new MetadataTaskStorage(derbyConnector, new TaskStorageConfig(null), new SQLMetadataStorageActionHandlerFactory(derbyConnector, derby.metadataTablesConfigSupplier().get(), objectMapper));
    metadataStorageCoordinator = new IndexerSQLMetadataStorageCoordinator(testUtils.getTestObjectMapper(), derby.metadataTablesConfigSupplier().get(), derbyConnector);
    taskLockbox = new TaskLockbox(taskStorage);
    final TaskActionToolbox taskActionToolbox = new TaskActionToolbox(taskLockbox, metadataStorageCoordinator, emitter, new SupervisorManager(null));
    final TaskActionClientFactory taskActionClientFactory = new LocalTaskActionClientFactory(taskStorage, taskActionToolbox);
    final SegmentHandoffNotifierFactory handoffNotifierFactory = new SegmentHandoffNotifierFactory() {

        @Override
        public SegmentHandoffNotifier createSegmentHandoffNotifier(String dataSource) {
            return new SegmentHandoffNotifier() {

                @Override
                public boolean registerSegmentHandoffCallback(SegmentDescriptor descriptor, Executor exec, Runnable handOffRunnable) {
                    if (doHandoff) {
                        // Simulate immediate handoff
                        exec.execute(handOffRunnable);
                    }
                    return true;
                }

                @Override
                public void start() {
                //Noop
                }

                @Override
                public void close() {
                //Noop
                }
            };
        }
    };
    final LocalDataSegmentPusherConfig dataSegmentPusherConfig = new LocalDataSegmentPusherConfig();
    dataSegmentPusherConfig.storageDirectory = getSegmentDirectory();
    final DataSegmentPusher dataSegmentPusher = new LocalDataSegmentPusher(dataSegmentPusherConfig, objectMapper);
    toolboxFactory = new TaskToolboxFactory(taskConfig, taskActionClientFactory, emitter, dataSegmentPusher, new TestDataSegmentKiller(), // DataSegmentMover
    null, // DataSegmentArchiver
    null, new TestDataSegmentAnnouncer(), handoffNotifierFactory, makeTimeseriesOnlyConglomerate(), // queryExecutorService
    MoreExecutors.sameThreadExecutor(), EasyMock.createMock(MonitorScheduler.class), new SegmentLoaderFactory(new SegmentLoaderLocalCacheManager(null, new SegmentLoaderConfig() {

        @Override
        public List<StorageLocationConfig> getLocations() {
            return Lists.newArrayList();
        }
    }, testUtils.getTestObjectMapper())), testUtils.getTestObjectMapper(), testUtils.getTestIndexMerger(), testUtils.getTestIndexIO(), MapCache.create(1024), new CacheConfig(), testUtils.getTestIndexMergerV9());
}
Also used : DataSegmentPusher(io.druid.segment.loading.DataSegmentPusher) LocalDataSegmentPusher(io.druid.segment.loading.LocalDataSegmentPusher) LocalTaskActionClientFactory(io.druid.indexing.common.actions.LocalTaskActionClientFactory) TaskActionClientFactory(io.druid.indexing.common.actions.TaskActionClientFactory) TestDataSegmentAnnouncer(io.druid.indexing.test.TestDataSegmentAnnouncer) TaskConfig(io.druid.indexing.common.config.TaskConfig) TestUtils(io.druid.indexing.common.TestUtils) Executor(java.util.concurrent.Executor) TaskToolboxFactory(io.druid.indexing.common.TaskToolboxFactory) SegmentDescriptor(io.druid.query.SegmentDescriptor) TaskActionToolbox(io.druid.indexing.common.actions.TaskActionToolbox) LocalTaskActionClientFactory(io.druid.indexing.common.actions.LocalTaskActionClientFactory) SegmentLoaderConfig(io.druid.segment.loading.SegmentLoaderConfig) SegmentLoaderFactory(io.druid.indexing.common.SegmentLoaderFactory) SegmentLoaderLocalCacheManager(io.druid.segment.loading.SegmentLoaderLocalCacheManager) CacheConfig(io.druid.client.cache.CacheConfig) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) IndexerSQLMetadataStorageCoordinator(io.druid.metadata.IndexerSQLMetadataStorageCoordinator) StorageLocationConfig(io.druid.segment.loading.StorageLocationConfig) TaskStorageConfig(io.druid.indexing.common.config.TaskStorageConfig) LocalDataSegmentPusherConfig(io.druid.segment.loading.LocalDataSegmentPusherConfig) SegmentHandoffNotifier(io.druid.segment.realtime.plumber.SegmentHandoffNotifier) TestDerbyConnector(io.druid.metadata.TestDerbyConnector) LocalDataSegmentPusher(io.druid.segment.loading.LocalDataSegmentPusher) TestDataSegmentKiller(io.druid.indexing.test.TestDataSegmentKiller) SegmentHandoffNotifierFactory(io.druid.segment.realtime.plumber.SegmentHandoffNotifierFactory) SupervisorManager(io.druid.indexing.overlord.supervisor.SupervisorManager) SQLMetadataStorageActionHandlerFactory(io.druid.metadata.SQLMetadataStorageActionHandlerFactory) TaskLockbox(io.druid.indexing.overlord.TaskLockbox) Module(com.fasterxml.jackson.databind.Module) MetadataTaskStorage(io.druid.indexing.overlord.MetadataTaskStorage) File(java.io.File)

Example 3 with CacheConfig

use of io.druid.client.cache.CacheConfig in project druid by druid-io.

the class CachingQueryRunnerTest method testCloseAndPopulate.

private void testCloseAndPopulate(List<Result> expectedRes, List<Result> expectedCacheRes, Query query, QueryToolChest toolchest) throws Exception {
    final AssertingClosable closable = new AssertingClosable();
    final Sequence resultSeq = Sequences.wrap(Sequences.simple(expectedRes), new SequenceWrapper() {

        @Override
        public void before() {
            Assert.assertFalse(closable.isClosed());
        }

        @Override
        public void after(boolean isDone, Throwable thrown) throws Exception {
            closable.close();
        }
    });
    final CountDownLatch cacheMustBePutOnce = new CountDownLatch(1);
    Cache cache = new Cache() {

        private final Map<NamedKey, byte[]> baseMap = new ConcurrentHashMap<>();

        @Override
        public byte[] get(NamedKey key) {
            return baseMap.get(key);
        }

        @Override
        public void put(NamedKey key, byte[] value) {
            baseMap.put(key, value);
            cacheMustBePutOnce.countDown();
        }

        @Override
        public Map<NamedKey, byte[]> getBulk(Iterable<NamedKey> keys) {
            return null;
        }

        @Override
        public void close(String namespace) {
        }

        @Override
        public CacheStats getStats() {
            return null;
        }

        @Override
        public boolean isLocal() {
            return true;
        }

        @Override
        public void doMonitor(ServiceEmitter emitter) {
        }
    };
    String segmentIdentifier = "segment";
    SegmentDescriptor segmentDescriptor = new SegmentDescriptor(new Interval("2011/2012"), "version", 0);
    DefaultObjectMapper objectMapper = new DefaultObjectMapper();
    CachingQueryRunner runner = new CachingQueryRunner(segmentIdentifier, segmentDescriptor, objectMapper, cache, toolchest, new QueryRunner() {

        @Override
        public Sequence run(Query query, Map responseContext) {
            return resultSeq;
        }
    }, backgroundExecutorService, new CacheConfig() {

        @Override
        public boolean isPopulateCache() {
            return true;
        }

        @Override
        public boolean isUseCache() {
            return true;
        }
    });
    CacheStrategy cacheStrategy = toolchest.getCacheStrategy(query);
    Cache.NamedKey cacheKey = CacheUtil.computeSegmentCacheKey(segmentIdentifier, segmentDescriptor, cacheStrategy.computeCacheKey(query));
    HashMap<String, Object> context = new HashMap<String, Object>();
    Sequence res = runner.run(query, context);
    // base sequence is not closed yet
    Assert.assertFalse("sequence must not be closed", closable.isClosed());
    Assert.assertNull("cache must be empty", cache.get(cacheKey));
    ArrayList results = Sequences.toList(res, new ArrayList());
    Assert.assertTrue(closable.isClosed());
    Assert.assertEquals(expectedRes.toString(), results.toString());
    // wait for background caching finish
    // wait at most 10 seconds to fail the test to avoid block overall tests
    Assert.assertTrue("cache must be populated", cacheMustBePutOnce.await(10, TimeUnit.SECONDS));
    byte[] cacheValue = cache.get(cacheKey);
    Assert.assertNotNull(cacheValue);
    Function<Object, Result> fn = cacheStrategy.pullFromCache();
    List<Result> cacheResults = Lists.newArrayList(Iterators.transform(objectMapper.readValues(objectMapper.getFactory().createParser(cacheValue), cacheStrategy.getCacheObjectClazz()), fn));
    Assert.assertEquals(expectedCacheRes.toString(), cacheResults.toString());
}
Also used : ServiceEmitter(com.metamx.emitter.service.ServiceEmitter) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) Query(io.druid.query.Query) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) Result(io.druid.query.Result) SegmentDescriptor(io.druid.query.SegmentDescriptor) CacheConfig(io.druid.client.cache.CacheConfig) SequenceWrapper(io.druid.java.util.common.guava.SequenceWrapper) Sequence(io.druid.java.util.common.guava.Sequence) CountDownLatch(java.util.concurrent.CountDownLatch) IOException(java.io.IOException) QueryRunner(io.druid.query.QueryRunner) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) CacheStrategy(io.druid.query.CacheStrategy) Cache(io.druid.client.cache.Cache) MapCache(io.druid.client.cache.MapCache) Interval(org.joda.time.Interval)

Example 4 with CacheConfig

use of io.druid.client.cache.CacheConfig in project druid by druid-io.

the class CachingQueryRunnerTest method testUseCache.

private void testUseCache(List<Result> expectedResults, Query query, QueryToolChest toolchest) throws Exception {
    DefaultObjectMapper objectMapper = new DefaultObjectMapper();
    String segmentIdentifier = "segment";
    SegmentDescriptor segmentDescriptor = new SegmentDescriptor(new Interval("2011/2012"), "version", 0);
    CacheStrategy cacheStrategy = toolchest.getCacheStrategy(query);
    Cache.NamedKey cacheKey = CacheUtil.computeSegmentCacheKey(segmentIdentifier, segmentDescriptor, cacheStrategy.computeCacheKey(query));
    Cache cache = MapCache.create(1024 * 1024);
    CacheUtil.populate(cache, objectMapper, cacheKey, Iterables.transform(expectedResults, cacheStrategy.prepareForCache()));
    CachingQueryRunner runner = new CachingQueryRunner(segmentIdentifier, segmentDescriptor, objectMapper, cache, toolchest, // return an empty sequence since results should get pulled from cache
    new QueryRunner() {

        @Override
        public Sequence run(Query query, Map responseContext) {
            return Sequences.empty();
        }
    }, backgroundExecutorService, new CacheConfig() {

        @Override
        public boolean isPopulateCache() {
            return true;
        }

        @Override
        public boolean isUseCache() {
            return true;
        }
    });
    HashMap<String, Object> context = new HashMap<String, Object>();
    List<Result> results = Sequences.toList(runner.run(query, context), new ArrayList());
    Assert.assertEquals(expectedResults.toString(), results.toString());
}
Also used : TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) Query(io.druid.query.Query) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) Sequence(io.druid.java.util.common.guava.Sequence) QueryRunner(io.druid.query.QueryRunner) Result(io.druid.query.Result) SegmentDescriptor(io.druid.query.SegmentDescriptor) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) CacheConfig(io.druid.client.cache.CacheConfig) CacheStrategy(io.druid.query.CacheStrategy) Interval(org.joda.time.Interval) Cache(io.druid.client.cache.Cache) MapCache(io.druid.client.cache.MapCache)

Example 5 with CacheConfig

use of io.druid.client.cache.CacheConfig in project druid by druid-io.

the class RealtimeIndexTaskTest method makeToolbox.

private TaskToolbox makeToolbox(final Task task, final TaskStorage taskStorage, final IndexerMetadataStorageCoordinator mdc, final File directory) {
    final TaskConfig taskConfig = new TaskConfig(directory.getPath(), null, null, 50000, null, false, null, null);
    final TaskLockbox taskLockbox = new TaskLockbox(taskStorage);
    try {
        taskStorage.insert(task, TaskStatus.running(task.getId()));
    } catch (EntryExistsException e) {
    // suppress
    }
    taskLockbox.syncFromStorage();
    final TaskActionToolbox taskActionToolbox = new TaskActionToolbox(taskLockbox, mdc, emitter, EasyMock.createMock(SupervisorManager.class));
    final TaskActionClientFactory taskActionClientFactory = new LocalTaskActionClientFactory(taskStorage, taskActionToolbox);
    final QueryRunnerFactoryConglomerate conglomerate = new DefaultQueryRunnerFactoryConglomerate(ImmutableMap.<Class<? extends Query>, QueryRunnerFactory>of(TimeseriesQuery.class, new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(new IntervalChunkingQueryRunnerDecorator(null, null, null) {

        @Override
        public <T> QueryRunner<T> decorate(QueryRunner<T> delegate, QueryToolChest<T, ? extends Query<T>> toolChest) {
            return delegate;
        }
    }), new TimeseriesQueryEngine(), new QueryWatcher() {

        @Override
        public void registerQuery(Query query, ListenableFuture future) {
        // do nothing
        }
    })));
    handOffCallbacks = Maps.newConcurrentMap();
    final SegmentHandoffNotifierFactory handoffNotifierFactory = new SegmentHandoffNotifierFactory() {

        @Override
        public SegmentHandoffNotifier createSegmentHandoffNotifier(String dataSource) {
            return new SegmentHandoffNotifier() {

                @Override
                public boolean registerSegmentHandoffCallback(SegmentDescriptor descriptor, Executor exec, Runnable handOffRunnable) {
                    handOffCallbacks.put(descriptor, new Pair<>(exec, handOffRunnable));
                    return true;
                }

                @Override
                public void start() {
                //Noop
                }

                @Override
                public void close() {
                //Noop
                }

                Map<SegmentDescriptor, Pair<Executor, Runnable>> getHandOffCallbacks() {
                    return handOffCallbacks;
                }
            };
        }
    };
    final TestUtils testUtils = new TestUtils();
    final TaskToolboxFactory toolboxFactory = new TaskToolboxFactory(taskConfig, taskActionClientFactory, emitter, new TestDataSegmentPusher(), new TestDataSegmentKiller(), // DataSegmentMover
    null, // DataSegmentArchiver
    null, new TestDataSegmentAnnouncer(), handoffNotifierFactory, conglomerate, // queryExecutorService
    MoreExecutors.sameThreadExecutor(), EasyMock.createMock(MonitorScheduler.class), new SegmentLoaderFactory(new SegmentLoaderLocalCacheManager(null, new SegmentLoaderConfig() {

        @Override
        public List<StorageLocationConfig> getLocations() {
            return Lists.newArrayList();
        }
    }, testUtils.getTestObjectMapper())), testUtils.getTestObjectMapper(), testUtils.getTestIndexMerger(), testUtils.getTestIndexIO(), MapCache.create(1024), new CacheConfig(), testUtils.getTestIndexMergerV9());
    return toolboxFactory.build(task);
}
Also used : TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) Query(io.druid.query.Query) QueryWatcher(io.druid.query.QueryWatcher) LocalTaskActionClientFactory(io.druid.indexing.common.actions.LocalTaskActionClientFactory) TaskActionClientFactory(io.druid.indexing.common.actions.TaskActionClientFactory) TestDataSegmentAnnouncer(io.druid.indexing.test.TestDataSegmentAnnouncer) TaskConfig(io.druid.indexing.common.config.TaskConfig) TimeseriesQueryQueryToolChest(io.druid.query.timeseries.TimeseriesQueryQueryToolChest) QueryToolChest(io.druid.query.QueryToolChest) TimeseriesQueryQueryToolChest(io.druid.query.timeseries.TimeseriesQueryQueryToolChest) TestUtils(io.druid.indexing.common.TestUtils) DefaultQueryRunnerFactoryConglomerate(io.druid.query.DefaultQueryRunnerFactoryConglomerate) QueryRunnerFactoryConglomerate(io.druid.query.QueryRunnerFactoryConglomerate) TimeseriesQueryEngine(io.druid.query.timeseries.TimeseriesQueryEngine) Executor(java.util.concurrent.Executor) TaskToolboxFactory(io.druid.indexing.common.TaskToolboxFactory) SegmentDescriptor(io.druid.query.SegmentDescriptor) TaskActionToolbox(io.druid.indexing.common.actions.TaskActionToolbox) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) LocalTaskActionClientFactory(io.druid.indexing.common.actions.LocalTaskActionClientFactory) IntervalChunkingQueryRunnerDecorator(io.druid.query.IntervalChunkingQueryRunnerDecorator) SegmentLoaderConfig(io.druid.segment.loading.SegmentLoaderConfig) SegmentLoaderFactory(io.druid.indexing.common.SegmentLoaderFactory) SegmentLoaderLocalCacheManager(io.druid.segment.loading.SegmentLoaderLocalCacheManager) CacheConfig(io.druid.client.cache.CacheConfig) TestDataSegmentPusher(io.druid.indexing.test.TestDataSegmentPusher) Pair(io.druid.java.util.common.Pair) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) MonitorScheduler(com.metamx.metrics.MonitorScheduler) SegmentHandoffNotifier(io.druid.segment.realtime.plumber.SegmentHandoffNotifier) DefaultQueryRunnerFactoryConglomerate(io.druid.query.DefaultQueryRunnerFactoryConglomerate) EntryExistsException(io.druid.metadata.EntryExistsException) QueryRunner(io.druid.query.QueryRunner) TestDataSegmentKiller(io.druid.indexing.test.TestDataSegmentKiller) SegmentHandoffNotifierFactory(io.druid.segment.realtime.plumber.SegmentHandoffNotifierFactory) SupervisorManager(io.druid.indexing.overlord.supervisor.SupervisorManager) TimeseriesQueryRunnerFactory(io.druid.query.timeseries.TimeseriesQueryRunnerFactory) TaskLockbox(io.druid.indexing.overlord.TaskLockbox) ListenableFuture(com.google.common.util.concurrent.ListenableFuture)

Aggregations

CacheConfig (io.druid.client.cache.CacheConfig)6 DefaultObjectMapper (io.druid.jackson.DefaultObjectMapper)5 Query (io.druid.query.Query)4 SegmentDescriptor (io.druid.query.SegmentDescriptor)4 QueryRunner (io.druid.query.QueryRunner)3 TimeseriesQuery (io.druid.query.timeseries.TimeseriesQuery)3 SegmentLoaderConfig (io.druid.segment.loading.SegmentLoaderConfig)3 ArrayList (java.util.ArrayList)3 ImmutableMap (com.google.common.collect.ImmutableMap)2 Cache (io.druid.client.cache.Cache)2 LocalCacheProvider (io.druid.client.cache.LocalCacheProvider)2 MapCache (io.druid.client.cache.MapCache)2 SegmentLoaderFactory (io.druid.indexing.common.SegmentLoaderFactory)2 TaskToolboxFactory (io.druid.indexing.common.TaskToolboxFactory)2 TestUtils (io.druid.indexing.common.TestUtils)2 LocalTaskActionClientFactory (io.druid.indexing.common.actions.LocalTaskActionClientFactory)2 TaskActionClientFactory (io.druid.indexing.common.actions.TaskActionClientFactory)2 TaskActionToolbox (io.druid.indexing.common.actions.TaskActionToolbox)2 TaskConfig (io.druid.indexing.common.config.TaskConfig)2 TaskLockbox (io.druid.indexing.overlord.TaskLockbox)2