Search in sources :

Example 31 with DruidServer

use of org.apache.druid.client.DruidServer in project druid by druid-io.

the class BatchServerInventoryViewTest method testRun.

@Test
public void testRun() throws Exception {
    segmentAnnouncer.announceSegments(testSegments);
    waitForSync(batchServerInventoryView, testSegments);
    DruidServer server = Iterables.get(batchServerInventoryView.getInventory(), 0);
    Set<DataSegment> segments = Sets.newHashSet(server.iterateAllSegments());
    Assert.assertEquals(testSegments, segments);
    DataSegment segment1 = makeSegment(101);
    DataSegment segment2 = makeSegment(102);
    segmentAnnouncer.announceSegment(segment1);
    segmentAnnouncer.announceSegment(segment2);
    testSegments.add(segment1);
    testSegments.add(segment2);
    waitForSync(batchServerInventoryView, testSegments);
    Assert.assertEquals(testSegments, Sets.newHashSet(server.iterateAllSegments()));
    segmentAnnouncer.unannounceSegment(segment1);
    segmentAnnouncer.unannounceSegment(segment2);
    testSegments.remove(segment1);
    testSegments.remove(segment2);
    waitForSync(batchServerInventoryView, testSegments);
    Assert.assertEquals(testSegments, Sets.newHashSet(server.iterateAllSegments()));
}
Also used : DruidServer(org.apache.druid.client.DruidServer) DataSegment(org.apache.druid.timeline.DataSegment) Test(org.junit.Test)

Example 32 with DruidServer

use of org.apache.druid.client.DruidServer in project druid by druid-io.

the class BatchServerInventoryViewTest method setUp.

@Before
public void setUp() throws Exception {
    testingCluster = new TestingCluster(1);
    testingCluster.start();
    cf = CuratorFrameworkFactory.builder().connectString(testingCluster.getConnectString()).retryPolicy(new ExponentialBackoffRetry(1, 10)).compressionProvider(new PotentiallyGzippedCompressionProvider(true)).build();
    cf.start();
    cf.blockUntilConnected();
    cf.create().creatingParentsIfNeeded().forPath(TEST_BASE_PATH);
    jsonMapper = TestHelper.makeJsonMapper();
    announcer = new Announcer(cf, Execs.directExecutor());
    announcer.start();
    DruidServerMetadata serverMetadata = new DruidServerMetadata("id", "host", null, Long.MAX_VALUE, ServerType.HISTORICAL, "tier", 0);
    ZkPathsConfig zkPathsConfig = new ZkPathsConfig() {

        @Override
        public String getBase() {
            return TEST_BASE_PATH;
        }
    };
    serverAnnouncer = new CuratorDataSegmentServerAnnouncer(serverMetadata, zkPathsConfig, announcer, jsonMapper);
    serverAnnouncer.announce();
    segmentAnnouncer = new BatchDataSegmentAnnouncer(serverMetadata, new BatchDataSegmentAnnouncerConfig() {

        @Override
        public int getSegmentsPerNode() {
            return 50;
        }
    }, zkPathsConfig, announcer, jsonMapper);
    testSegments = Sets.newConcurrentHashSet();
    for (int i = 0; i < INITIAL_SEGMENTS; i++) {
        testSegments.add(makeSegment(i));
    }
    batchServerInventoryView = new BatchServerInventoryView(new ZkPathsConfig() {

        @Override
        public String getBase() {
            return TEST_BASE_PATH;
        }
    }, cf, jsonMapper, Predicates.alwaysTrue(), "test");
    batchServerInventoryView.start();
    inventoryUpdateCounter.set(0);
    filteredBatchServerInventoryView = new BatchServerInventoryView(new ZkPathsConfig() {

        @Override
        public String getBase() {
            return TEST_BASE_PATH;
        }
    }, cf, jsonMapper, new Predicate<Pair<DruidServerMetadata, DataSegment>>() {

        @Override
        public boolean apply(@Nullable Pair<DruidServerMetadata, DataSegment> input) {
            return input.rhs.getInterval().getStart().isBefore(SEGMENT_INTERVAL_START.plusDays(INITIAL_SEGMENTS));
        }
    }, "test") {

        @Override
        protected DruidServer addInnerInventory(DruidServer container, String inventoryKey, Set<DataSegment> inventory) {
            DruidServer server = super.addInnerInventory(container, inventoryKey, inventory);
            inventoryUpdateCounter.incrementAndGet();
            return server;
        }
    };
    filteredBatchServerInventoryView.start();
}
Also used : BatchServerInventoryView(org.apache.druid.client.BatchServerInventoryView) ExponentialBackoffRetry(org.apache.curator.retry.ExponentialBackoffRetry) BatchDataSegmentAnnouncerConfig(org.apache.druid.server.initialization.BatchDataSegmentAnnouncerConfig) DruidServer(org.apache.druid.client.DruidServer) DruidServerMetadata(org.apache.druid.server.coordination.DruidServerMetadata) PotentiallyGzippedCompressionProvider(org.apache.druid.curator.PotentiallyGzippedCompressionProvider) DataSegment(org.apache.druid.timeline.DataSegment) CuratorDataSegmentServerAnnouncer(org.apache.druid.server.coordination.CuratorDataSegmentServerAnnouncer) Predicate(com.google.common.base.Predicate) TestingCluster(org.apache.curator.test.TestingCluster) Announcer(org.apache.druid.curator.announcement.Announcer) CuratorDataSegmentServerAnnouncer(org.apache.druid.server.coordination.CuratorDataSegmentServerAnnouncer) DataSegmentServerAnnouncer(org.apache.druid.server.coordination.DataSegmentServerAnnouncer) BatchDataSegmentAnnouncer(org.apache.druid.server.coordination.BatchDataSegmentAnnouncer) ZkPathsConfig(org.apache.druid.server.initialization.ZkPathsConfig) BatchDataSegmentAnnouncer(org.apache.druid.server.coordination.BatchDataSegmentAnnouncer) Nullable(javax.annotation.Nullable) Pair(org.apache.druid.java.util.common.Pair) Before(org.junit.Before)

Example 33 with DruidServer

use of org.apache.druid.client.DruidServer in project druid by druid-io.

the class BatchServerInventoryViewTest method testRunWithFilter.

@Test
public void testRunWithFilter() throws Exception {
    segmentAnnouncer.announceSegments(testSegments);
    waitForSync(filteredBatchServerInventoryView, testSegments);
    DruidServer server = Iterables.get(filteredBatchServerInventoryView.getInventory(), 0);
    Set<DataSegment> segments = Sets.newHashSet(server.iterateAllSegments());
    Assert.assertEquals(testSegments, segments);
    int prevUpdateCount = inventoryUpdateCounter.get();
    // segment outside the range of default filter
    DataSegment segment1 = makeSegment(101);
    segmentAnnouncer.announceSegment(segment1);
    testSegments.add(segment1);
    waitForUpdateEvents(prevUpdateCount + 1);
    Assert.assertNull(Iterables.getOnlyElement(filteredBatchServerInventoryView.getInventory()).getSegment(segment1.getId()));
}
Also used : DruidServer(org.apache.druid.client.DruidServer) DataSegment(org.apache.druid.timeline.DataSegment) Test(org.junit.Test)

Example 34 with DruidServer

use of org.apache.druid.client.DruidServer in project druid by druid-io.

the class TierSelectorStrategyTest method testLowestPriorityTierSelectorStrategy.

@Test
public void testLowestPriorityTierSelectorStrategy() {
    DirectDruidClient client = EasyMock.createMock(DirectDruidClient.class);
    QueryableDruidServer lowPriority = new QueryableDruidServer(new DruidServer("test1", "localhost", null, 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 0), client);
    QueryableDruidServer highPriority = new QueryableDruidServer(new DruidServer("test1", "localhost", null, 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 1), client);
    testTierSelectorStrategy(new LowestPriorityTierSelectorStrategy(new ConnectionCountServerSelectorStrategy()), lowPriority, highPriority);
}
Also used : DirectDruidClient(org.apache.druid.client.DirectDruidClient) DruidServer(org.apache.druid.client.DruidServer) Test(org.junit.Test)

Example 35 with DruidServer

use of org.apache.druid.client.DruidServer in project druid by druid-io.

the class TierSelectorStrategyTest method testServerSelectorStrategyDefaults.

@Test
public void testServerSelectorStrategyDefaults() {
    DirectDruidClient client = EasyMock.createMock(DirectDruidClient.class);
    QueryableDruidServer p0 = new QueryableDruidServer(new DruidServer("test1", "localhost", null, 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, -1), client);
    Set<QueryableDruidServer> servers = new HashSet<>();
    servers.add(p0);
    RandomServerSelectorStrategy strategy = new RandomServerSelectorStrategy();
    Assert.assertEquals(strategy.pick(servers, EasyMock.createMock(DataSegment.class)), p0);
    Assert.assertEquals(strategy.pick(EasyMock.createMock(Query.class), servers, EasyMock.createMock(DataSegment.class)), p0);
    ServerSelectorStrategy defaultDeprecatedServerSelectorStrategy = new ServerSelectorStrategy() {

        @Override
        public <T> List<QueryableDruidServer> pick(@Nullable Query<T> query, Set<QueryableDruidServer> servers, DataSegment segment, int numServersToPick) {
            return strategy.pick(servers, segment, numServersToPick);
        }
    };
    Assert.assertEquals(defaultDeprecatedServerSelectorStrategy.pick(servers, EasyMock.createMock(DataSegment.class)), p0);
    Assert.assertEquals(defaultDeprecatedServerSelectorStrategy.pick(servers, EasyMock.createMock(DataSegment.class), 1).get(0), p0);
}
Also used : Set(java.util.Set) HashSet(java.util.HashSet) DirectDruidClient(org.apache.druid.client.DirectDruidClient) Query(org.apache.druid.query.Query) DruidServer(org.apache.druid.client.DruidServer) DataSegment(org.apache.druid.timeline.DataSegment) Nullable(javax.annotation.Nullable) HashSet(java.util.HashSet) Test(org.junit.Test)

Aggregations

DruidServer (org.apache.druid.client.DruidServer)73 Test (org.junit.Test)57 DataSegment (org.apache.druid.timeline.DataSegment)43 ImmutableDruidServer (org.apache.druid.client.ImmutableDruidServer)21 IntervalLoadRule (org.apache.druid.server.coordinator.rules.IntervalLoadRule)18 ListeningExecutorService (com.google.common.util.concurrent.ListeningExecutorService)17 ServerHolder (org.apache.druid.server.coordinator.ServerHolder)14 CoordinatorStats (org.apache.druid.server.coordinator.CoordinatorStats)12 DruidCluster (org.apache.druid.server.coordinator.DruidCluster)12 LoadQueuePeon (org.apache.druid.server.coordinator.LoadQueuePeon)11 HashMap (java.util.HashMap)9 Object2LongMap (it.unimi.dsi.fastutil.objects.Object2LongMap)8 ArrayList (java.util.ArrayList)8 Response (javax.ws.rs.core.Response)8 ForeverLoadRule (org.apache.druid.server.coordinator.rules.ForeverLoadRule)8 HashSet (java.util.HashSet)7 Map (java.util.Map)7 CountDownLatch (java.util.concurrent.CountDownLatch)7 DirectDruidClient (org.apache.druid.client.DirectDruidClient)7 ImmutableDruidDataSource (org.apache.druid.client.ImmutableDruidDataSource)7