Search in sources :

Example 1 with SegmentManager

use of org.apache.druid.server.SegmentManager in project druid by druid-io.

the class SystemSchemaTest method setUp.

@Before
public void setUp() throws Exception {
    serverView = EasyMock.createNiceMock(TimelineServerView.class);
    client = EasyMock.createMock(DruidLeaderClient.class);
    coordinatorClient = EasyMock.createMock(DruidLeaderClient.class);
    overlordClient = EasyMock.createMock(DruidLeaderClient.class);
    mapper = TestHelper.makeJsonMapper();
    responseHolder = EasyMock.createMock(StringFullResponseHolder.class);
    responseHandler = EasyMock.createMockBuilder(BytesAccumulatingResponseHandler.class).withConstructor().addMockedMethod("handleResponse", HttpResponse.class, HttpResponseHandler.TrafficCop.class).addMockedMethod("getStatus").createMock();
    request = EasyMock.createMock(Request.class);
    authMapper = createAuthMapper();
    final File tmpDir = temporaryFolder.newFolder();
    final QueryableIndex index1 = IndexBuilder.create().tmpDir(new File(tmpDir, "1")).segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance()).schema(new IncrementalIndexSchema.Builder().withMetrics(new CountAggregatorFactory("cnt"), new DoubleSumAggregatorFactory("m1", "m1"), new HyperUniquesAggregatorFactory("unique_dim1", "dim1")).withRollup(false).build()).rows(ROWS1).buildMMappedIndex();
    final QueryableIndex index2 = IndexBuilder.create().tmpDir(new File(tmpDir, "2")).segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance()).schema(new IncrementalIndexSchema.Builder().withMetrics(new LongSumAggregatorFactory("m1", "m1")).withRollup(false).build()).rows(ROWS2).buildMMappedIndex();
    final QueryableIndex index3 = IndexBuilder.create().tmpDir(new File(tmpDir, "3")).segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance()).schema(new IncrementalIndexSchema.Builder().withMetrics(new LongSumAggregatorFactory("m1", "m1")).withRollup(false).build()).rows(ROWS3).buildMMappedIndex();
    walker = new SpecificSegmentsQuerySegmentWalker(conglomerate).add(segment1, index1).add(segment2, index2).add(segment3, index3);
    druidSchema = new DruidSchema(CalciteTests.createMockQueryLifecycleFactory(walker, conglomerate), new TestServerInventoryView(walker.getSegments(), realtimeSegments), new SegmentManager(EasyMock.createMock(SegmentLoader.class)), new MapJoinableFactory(ImmutableSet.of(), ImmutableMap.of()), PLANNER_CONFIG_DEFAULT, new NoopEscalator(), new BrokerInternalQueryConfig(), null);
    druidSchema.start();
    druidSchema.awaitInitialization();
    metadataView = EasyMock.createMock(MetadataSegmentView.class);
    druidNodeDiscoveryProvider = EasyMock.createMock(DruidNodeDiscoveryProvider.class);
    serverInventoryView = EasyMock.createMock(FilteredServerInventoryView.class);
    schema = new SystemSchema(druidSchema, metadataView, serverView, serverInventoryView, EasyMock.createStrictMock(AuthorizerMapper.class), client, client, druidNodeDiscoveryProvider, mapper);
}
Also used : SegmentManager(org.apache.druid.server.SegmentManager) TestServerInventoryView(org.apache.druid.sql.calcite.util.TestServerInventoryView) IndexBuilder(org.apache.druid.segment.IndexBuilder) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) NoopEscalator(org.apache.druid.server.security.NoopEscalator) TimelineServerView(org.apache.druid.client.TimelineServerView) MapJoinableFactory(org.apache.druid.segment.join.MapJoinableFactory) FilteredServerInventoryView(org.apache.druid.client.FilteredServerInventoryView) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) Request(org.apache.druid.java.util.http.client.Request) DefaultHttpResponse(org.jboss.netty.handler.codec.http.DefaultHttpResponse) HttpResponse(org.jboss.netty.handler.codec.http.HttpResponse) DruidLeaderClient(org.apache.druid.discovery.DruidLeaderClient) SegmentLoader(org.apache.druid.segment.loading.SegmentLoader) StringFullResponseHolder(org.apache.druid.java.util.http.client.response.StringFullResponseHolder) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) SpecificSegmentsQuerySegmentWalker(org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker) QueryableIndex(org.apache.druid.segment.QueryableIndex) DruidNodeDiscoveryProvider(org.apache.druid.discovery.DruidNodeDiscoveryProvider) BrokerInternalQueryConfig(org.apache.druid.client.BrokerInternalQueryConfig) HyperUniquesAggregatorFactory(org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory) File(java.io.File) BytesAccumulatingResponseHandler(org.apache.druid.server.coordinator.BytesAccumulatingResponseHandler) Before(org.junit.Before)

Example 2 with SegmentManager

use of org.apache.druid.server.SegmentManager in project druid by druid-io.

the class SegmentLoadDropHandlerCacheTest method setup.

@Before
public void setup() throws IOException {
    storageLoc = new TestStorageLocation(temporaryFolder);
    SegmentLoaderConfig config = new SegmentLoaderConfig().withLocations(Collections.singletonList(storageLoc.toStorageLocationConfig(MAX_SIZE, null))).withInfoDir(storageLoc.getInfoDir());
    objectMapper = TestHelper.makeJsonMapper();
    objectMapper.registerSubtypes(TestLoadSpec.class);
    objectMapper.registerSubtypes(TestSegmentizerFactory.class);
    SegmentCacheManager cacheManager = new SegmentLocalCacheManager(config, objectMapper);
    SegmentManager segmentManager = new SegmentManager(new SegmentLocalCacheLoader(cacheManager, TestIndex.INDEX_IO, objectMapper));
    segmentAnnouncer = Mockito.mock(DataSegmentAnnouncer.class);
    loadDropHandler = new SegmentLoadDropHandler(objectMapper, config, segmentAnnouncer, Mockito.mock(DataSegmentServerAnnouncer.class), segmentManager, cacheManager, new ServerTypeConfig(ServerType.HISTORICAL));
    EmittingLogger.registerEmitter(new NoopServiceEmitter());
}
Also used : SegmentCacheManager(org.apache.druid.segment.loading.SegmentCacheManager) SegmentManager(org.apache.druid.server.SegmentManager) SegmentLocalCacheManager(org.apache.druid.segment.loading.SegmentLocalCacheManager) SegmentLocalCacheLoader(org.apache.druid.segment.loading.SegmentLocalCacheLoader) NoopServiceEmitter(org.apache.druid.server.metrics.NoopServiceEmitter) SegmentLoaderConfig(org.apache.druid.segment.loading.SegmentLoaderConfig) ServerTypeConfig(org.apache.druid.guice.ServerTypeConfig) Before(org.junit.Before)

Example 3 with SegmentManager

use of org.apache.druid.server.SegmentManager in project druid by druid-io.

the class ServerManagerTest method setUp.

@Before
public void setUp() {
    EmittingLogger.registerEmitter(new NoopServiceEmitter());
    queryWaitLatch = new CountDownLatch(1);
    queryWaitYieldLatch = new CountDownLatch(1);
    queryNotifyLatch = new CountDownLatch(1);
    factory = new MyQueryRunnerFactory(queryWaitLatch, queryWaitYieldLatch, queryNotifyLatch);
    serverManagerExec = Executors.newFixedThreadPool(2);
    segmentManager = new SegmentManager(new SegmentLoader() {

        @Override
        public ReferenceCountingSegment getSegment(final DataSegment segment, boolean lazy, SegmentLazyLoadFailCallback SegmentLazyLoadFailCallback) {
            return ReferenceCountingSegment.wrapSegment(new SegmentForTesting(MapUtils.getString(segment.getLoadSpec(), "version"), (Interval) segment.getLoadSpec().get("interval")), segment.getShardSpec());
        }

        @Override
        public void cleanup(DataSegment segment) {
        }
    });
    serverManager = new ServerManager(new QueryRunnerFactoryConglomerate() {

        @Override
        public <T, QueryType extends Query<T>> QueryRunnerFactory<T, QueryType> findFactory(QueryType query) {
            if (query instanceof SearchQuery) {
                return (QueryRunnerFactory) factory;
            } else {
                return null;
            }
        }
    }, new NoopServiceEmitter(), new ForwardingQueryProcessingPool(serverManagerExec), new ForegroundCachePopulator(new DefaultObjectMapper(), new CachePopulatorStats(), -1), new DefaultObjectMapper(), new LocalCacheProvider().get(), new CacheConfig(), segmentManager, NoopJoinableFactory.INSTANCE, new ServerConfig());
    loadQueryable("test", "1", Intervals.of("P1d/2011-04-01"));
    loadQueryable("test", "1", Intervals.of("P1d/2011-04-02"));
    loadQueryable("test", "2", Intervals.of("P1d/2011-04-02"));
    loadQueryable("test", "1", Intervals.of("P1d/2011-04-03"));
    loadQueryable("test", "1", Intervals.of("P1d/2011-04-04"));
    loadQueryable("test", "1", Intervals.of("P1d/2011-04-05"));
    loadQueryable("test", "2", Intervals.of("PT1h/2011-04-04T01"));
    loadQueryable("test", "2", Intervals.of("PT1h/2011-04-04T02"));
    loadQueryable("test", "2", Intervals.of("PT1h/2011-04-04T03"));
    loadQueryable("test", "2", Intervals.of("PT1h/2011-04-04T05"));
    loadQueryable("test", "2", Intervals.of("PT1h/2011-04-04T06"));
    loadQueryable("test2", "1", Intervals.of("P1d/2011-04-01"));
    loadQueryable("test2", "1", Intervals.of("P1d/2011-04-02"));
}
Also used : SearchQuery(org.apache.druid.query.search.SearchQuery) SegmentManager(org.apache.druid.server.SegmentManager) BaseQuery(org.apache.druid.query.BaseQuery) Query(org.apache.druid.query.Query) SearchQuery(org.apache.druid.query.search.SearchQuery) ForwardingQueryProcessingPool(org.apache.druid.query.ForwardingQueryProcessingPool) SegmentLazyLoadFailCallback(org.apache.druid.segment.SegmentLazyLoadFailCallback) NoopServiceEmitter(org.apache.druid.server.metrics.NoopServiceEmitter) CountDownLatch(java.util.concurrent.CountDownLatch) DataSegment(org.apache.druid.timeline.DataSegment) SegmentLoader(org.apache.druid.segment.loading.SegmentLoader) ServerConfig(org.apache.druid.server.initialization.ServerConfig) QueryRunnerFactoryConglomerate(org.apache.druid.query.QueryRunnerFactoryConglomerate) QueryRunnerFactory(org.apache.druid.query.QueryRunnerFactory) CachePopulatorStats(org.apache.druid.client.cache.CachePopulatorStats) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) LocalCacheProvider(org.apache.druid.client.cache.LocalCacheProvider) ForegroundCachePopulator(org.apache.druid.client.cache.ForegroundCachePopulator) CacheConfig(org.apache.druid.client.cache.CacheConfig) Interval(org.joda.time.Interval) Before(org.junit.Before)

Example 4 with SegmentManager

use of org.apache.druid.server.SegmentManager in project druid by druid-io.

the class SegmentLoadDropHandlerTest method testProcessBatchDuplicateLoadRequestsWhenFirstRequestFailsSecondRequestShouldSucceed.

@Test(timeout = 60_000L)
public void testProcessBatchDuplicateLoadRequestsWhenFirstRequestFailsSecondRequestShouldSucceed() throws Exception {
    final SegmentManager segmentManager = Mockito.mock(SegmentManager.class);
    Mockito.when(segmentManager.loadSegment(ArgumentMatchers.any(), ArgumentMatchers.anyBoolean(), ArgumentMatchers.any())).thenThrow(new RuntimeException("segment loading failure test")).thenReturn(true);
    final SegmentLoadDropHandler segmentLoadDropHandler = new SegmentLoadDropHandler(jsonMapper, segmentLoaderConfig, announcer, Mockito.mock(DataSegmentServerAnnouncer.class), segmentManager, segmentCacheManager, scheduledExecutorFactory.create(5, "SegmentLoadDropHandlerTest-[%d]"), new ServerTypeConfig(ServerType.HISTORICAL));
    segmentLoadDropHandler.start();
    DataSegment segment1 = makeSegment("batchtest1", "1", Intervals.of("P1d/2011-04-01"));
    List<DataSegmentChangeRequest> batch = ImmutableList.of(new SegmentChangeRequestLoad(segment1));
    ListenableFuture<List<DataSegmentChangeRequestAndStatus>> future = segmentLoadDropHandler.processBatch(batch);
    for (Runnable runnable : scheduledRunnable) {
        runnable.run();
    }
    List<DataSegmentChangeRequestAndStatus> result = future.get();
    Assert.assertEquals(STATE.FAILED, result.get(0).getStatus().getState());
    future = segmentLoadDropHandler.processBatch(batch);
    for (Runnable runnable : scheduledRunnable) {
        runnable.run();
    }
    result = future.get();
    Assert.assertEquals(STATE.SUCCESS, result.get(0).getStatus().getState());
    segmentLoadDropHandler.stop();
}
Also used : SegmentManager(org.apache.druid.server.SegmentManager) DataSegment(org.apache.druid.timeline.DataSegment) DataSegmentChangeRequestAndStatus(org.apache.druid.server.coordination.SegmentLoadDropHandler.DataSegmentChangeRequestAndStatus) ArrayList(java.util.ArrayList) ImmutableList(com.google.common.collect.ImmutableList) List(java.util.List) ServerTypeConfig(org.apache.druid.guice.ServerTypeConfig) Test(org.junit.Test)

Example 5 with SegmentManager

use of org.apache.druid.server.SegmentManager in project druid by druid-io.

the class CalciteTests method createMockSchema.

private static DruidSchema createMockSchema(final QueryRunnerFactoryConglomerate conglomerate, final SpecificSegmentsQuerySegmentWalker walker, final PlannerConfig plannerConfig, final DruidSchemaManager druidSchemaManager) {
    final DruidSchema schema = new DruidSchema(CalciteTests.createMockQueryLifecycleFactory(walker, conglomerate), new TestServerInventoryView(walker.getSegments()), new SegmentManager(EasyMock.createMock(SegmentLoader.class)) {

        @Override
        public Set<String> getDataSourceNames() {
            return ImmutableSet.of(BROADCAST_DATASOURCE);
        }
    }, createDefaultJoinableFactory(), plannerConfig, TEST_AUTHENTICATOR_ESCALATOR, new BrokerInternalQueryConfig(), druidSchemaManager);
    try {
        schema.start();
        schema.awaitInitialization();
    } catch (InterruptedException e) {
        throw new RuntimeException(e);
    }
    schema.stop();
    return schema;
}
Also used : Set(java.util.Set) ImmutableSet(com.google.common.collect.ImmutableSet) HashSet(java.util.HashSet) SegmentManager(org.apache.druid.server.SegmentManager) BrokerInternalQueryConfig(org.apache.druid.client.BrokerInternalQueryConfig) DruidSchema(org.apache.druid.sql.calcite.schema.DruidSchema) NamedDruidSchema(org.apache.druid.sql.calcite.schema.NamedDruidSchema)

Aggregations

SegmentManager (org.apache.druid.server.SegmentManager)9 Before (org.junit.Before)5 ServerTypeConfig (org.apache.druid.guice.ServerTypeConfig)4 DataSegment (org.apache.druid.timeline.DataSegment)4 ImmutableList (com.google.common.collect.ImmutableList)3 ArrayList (java.util.ArrayList)3 List (java.util.List)3 BrokerInternalQueryConfig (org.apache.druid.client.BrokerInternalQueryConfig)3 File (java.io.File)2 Set (java.util.Set)2 QueryRunnerFactoryConglomerate (org.apache.druid.query.QueryRunnerFactoryConglomerate)2 MapJoinableFactory (org.apache.druid.segment.join.MapJoinableFactory)2 SegmentLoader (org.apache.druid.segment.loading.SegmentLoader)2 SegmentLoaderConfig (org.apache.druid.segment.loading.SegmentLoaderConfig)2 NoopServiceEmitter (org.apache.druid.server.metrics.NoopServiceEmitter)2 Test (org.junit.Test)2 ImmutableSet (com.google.common.collect.ImmutableSet)1 IOException (java.io.IOException)1 HashSet (java.util.HashSet)1 Optional (java.util.Optional)1