Search in sources :

Example 1 with DruidCoordinator

use of org.apache.druid.server.coordinator.DruidCoordinator in project druid by druid-io.

the class BroadcastDistributionRule method run.

@Override
public CoordinatorStats run(DruidCoordinator coordinator, DruidCoordinatorRuntimeParams params, DataSegment segment) {
    final Set<ServerHolder> dropServerHolders = new HashSet<>();
    // Find servers where we need to load the broadcast segments
    final Set<ServerHolder> loadServerHolders = params.getDruidCluster().getAllServers().stream().filter((serverHolder) -> {
        ServerType serverType = serverHolder.getServer().getType();
        if (!serverType.isSegmentBroadcastTarget()) {
            return false;
        }
        final boolean isServingSegment = serverHolder.isServingSegment(segment);
        if (serverHolder.isDecommissioning()) {
            if (isServingSegment && !serverHolder.isDroppingSegment(segment)) {
                dropServerHolders.add(serverHolder);
            }
            return false;
        }
        return !isServingSegment && !serverHolder.isLoadingSegment(segment);
    }).collect(Collectors.toSet());
    final CoordinatorStats stats = new CoordinatorStats();
    return stats.accumulate(assign(loadServerHolders, segment)).accumulate(drop(dropServerHolders, segment));
}
Also used : EmittingLogger(org.apache.druid.java.util.emitter.EmittingLogger) DruidCoordinatorRuntimeParams(org.apache.druid.server.coordinator.DruidCoordinatorRuntimeParams) Set(java.util.Set) DruidCoordinator(org.apache.druid.server.coordinator.DruidCoordinator) CoordinatorStats(org.apache.druid.server.coordinator.CoordinatorStats) Collectors(java.util.stream.Collectors) HashSet(java.util.HashSet) SegmentReplicantLookup(org.apache.druid.server.coordinator.SegmentReplicantLookup) Object2LongMap(it.unimi.dsi.fastutil.objects.Object2LongMap) Map(java.util.Map) ServerType(org.apache.druid.server.coordination.ServerType) DataSegment(org.apache.druid.timeline.DataSegment) DruidCluster(org.apache.druid.server.coordinator.DruidCluster) ServerHolder(org.apache.druid.server.coordinator.ServerHolder) Object2LongOpenHashMap(it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap) ServerType(org.apache.druid.server.coordination.ServerType) CoordinatorStats(org.apache.druid.server.coordinator.CoordinatorStats) ServerHolder(org.apache.druid.server.coordinator.ServerHolder) HashSet(java.util.HashSet)

Example 2 with DruidCoordinator

use of org.apache.druid.server.coordinator.DruidCoordinator in project druid by druid-io.

the class DataSourcesResourceTest method testGetDatasourceLoadstatusFullAndComputeUsingClusterView.

@Test
public void testGetDatasourceLoadstatusFullAndComputeUsingClusterView() {
    DataSegment datasource1Segment1 = new DataSegment("datasource1", Intervals.of("2010-01-01/P1D"), "", null, null, null, null, 0x9, 10);
    DataSegment datasource1Segment2 = new DataSegment("datasource1", Intervals.of("2010-01-22/P1D"), "", null, null, null, null, 0x9, 20);
    List<DataSegment> segments = ImmutableList.of(datasource1Segment1, datasource1Segment2);
    final Map<String, Object2LongMap<String>> underReplicationCountsPerDataSourcePerTier = new HashMap<>();
    Object2LongMap<String> tier1 = new Object2LongOpenHashMap<>();
    tier1.put("datasource1", 0L);
    Object2LongMap<String> tier2 = new Object2LongOpenHashMap<>();
    tier2.put("datasource1", 3L);
    underReplicationCountsPerDataSourcePerTier.put("tier1", tier1);
    underReplicationCountsPerDataSourcePerTier.put("tier2", tier2);
    // Test when datasource fully loaded
    EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq("datasource1"), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())).andReturn(Optional.of(segments)).once();
    DruidCoordinator druidCoordinator = EasyMock.createMock(DruidCoordinator.class);
    EasyMock.expect(druidCoordinator.computeUnderReplicationCountsPerDataSourcePerTierForSegmentsUsingClusterView(segments)).andReturn(underReplicationCountsPerDataSourcePerTier).once();
    EasyMock.replay(segmentsMetadataManager, druidCoordinator);
    DataSourcesResource dataSourcesResource = new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, druidCoordinator);
    Response response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, null, "full", "computeUsingClusterView");
    Assert.assertEquals(200, response.getStatus());
    Assert.assertNotNull(response.getEntity());
    Assert.assertEquals(2, ((Map) response.getEntity()).size());
    Assert.assertEquals(1, ((Map) ((Map) response.getEntity()).get("tier1")).size());
    Assert.assertEquals(1, ((Map) ((Map) response.getEntity()).get("tier2")).size());
    Assert.assertEquals(0L, ((Map) ((Map) response.getEntity()).get("tier1")).get("datasource1"));
    Assert.assertEquals(3L, ((Map) ((Map) response.getEntity()).get("tier2")).get("datasource1"));
    EasyMock.verify(segmentsMetadataManager);
}
Also used : Response(javax.ws.rs.core.Response) Object2LongOpenHashMap(it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap) Object2LongMap(it.unimi.dsi.fastutil.objects.Object2LongMap) HashMap(java.util.HashMap) Object2LongOpenHashMap(it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap) DataSegment(org.apache.druid.timeline.DataSegment) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) Object2LongMap(it.unimi.dsi.fastutil.objects.Object2LongMap) TreeMap(java.util.TreeMap) Object2LongOpenHashMap(it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap) DruidCoordinator(org.apache.druid.server.coordinator.DruidCoordinator) Test(org.junit.Test)

Example 3 with DruidCoordinator

use of org.apache.druid.server.coordinator.DruidCoordinator in project druid by druid-io.

the class DataSourcesResourceTest method testGetDatasourceLoadstatusFull.

@Test
public void testGetDatasourceLoadstatusFull() {
    DataSegment datasource1Segment1 = new DataSegment("datasource1", Intervals.of("2010-01-01/P1D"), "", null, null, null, null, 0x9, 10);
    DataSegment datasource1Segment2 = new DataSegment("datasource1", Intervals.of("2010-01-22/P1D"), "", null, null, null, null, 0x9, 20);
    List<DataSegment> segments = ImmutableList.of(datasource1Segment1, datasource1Segment2);
    final Map<String, Object2LongMap<String>> underReplicationCountsPerDataSourcePerTier = new HashMap<>();
    Object2LongMap<String> tier1 = new Object2LongOpenHashMap<>();
    tier1.put("datasource1", 0L);
    Object2LongMap<String> tier2 = new Object2LongOpenHashMap<>();
    tier2.put("datasource1", 3L);
    underReplicationCountsPerDataSourcePerTier.put("tier1", tier1);
    underReplicationCountsPerDataSourcePerTier.put("tier2", tier2);
    // Test when datasource fully loaded
    EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq("datasource1"), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())).andReturn(Optional.of(segments)).once();
    DruidCoordinator druidCoordinator = EasyMock.createMock(DruidCoordinator.class);
    EasyMock.expect(druidCoordinator.computeUnderReplicationCountsPerDataSourcePerTierForSegments(segments)).andReturn(underReplicationCountsPerDataSourcePerTier).once();
    EasyMock.replay(segmentsMetadataManager, druidCoordinator);
    DataSourcesResource dataSourcesResource = new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, druidCoordinator);
    Response response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, null, "full", null);
    Assert.assertEquals(200, response.getStatus());
    Assert.assertNotNull(response.getEntity());
    Assert.assertEquals(2, ((Map) response.getEntity()).size());
    Assert.assertEquals(1, ((Map) ((Map) response.getEntity()).get("tier1")).size());
    Assert.assertEquals(1, ((Map) ((Map) response.getEntity()).get("tier2")).size());
    Assert.assertEquals(0L, ((Map) ((Map) response.getEntity()).get("tier1")).get("datasource1"));
    Assert.assertEquals(3L, ((Map) ((Map) response.getEntity()).get("tier2")).get("datasource1"));
    EasyMock.verify(segmentsMetadataManager);
}
Also used : Response(javax.ws.rs.core.Response) Object2LongOpenHashMap(it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap) Object2LongMap(it.unimi.dsi.fastutil.objects.Object2LongMap) HashMap(java.util.HashMap) Object2LongOpenHashMap(it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap) DataSegment(org.apache.druid.timeline.DataSegment) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) Object2LongMap(it.unimi.dsi.fastutil.objects.Object2LongMap) TreeMap(java.util.TreeMap) Object2LongOpenHashMap(it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap) DruidCoordinator(org.apache.druid.server.coordinator.DruidCoordinator) Test(org.junit.Test)

Aggregations

Object2LongMap (it.unimi.dsi.fastutil.objects.Object2LongMap)3 Object2LongOpenHashMap (it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap)3 Map (java.util.Map)3 DruidCoordinator (org.apache.druid.server.coordinator.DruidCoordinator)3 DataSegment (org.apache.druid.timeline.DataSegment)3 ImmutableMap (com.google.common.collect.ImmutableMap)2 HashMap (java.util.HashMap)2 TreeMap (java.util.TreeMap)2 Response (javax.ws.rs.core.Response)2 Test (org.junit.Test)2 HashSet (java.util.HashSet)1 Set (java.util.Set)1 Collectors (java.util.stream.Collectors)1 EmittingLogger (org.apache.druid.java.util.emitter.EmittingLogger)1 ServerType (org.apache.druid.server.coordination.ServerType)1 CoordinatorStats (org.apache.druid.server.coordinator.CoordinatorStats)1 DruidCluster (org.apache.druid.server.coordinator.DruidCluster)1 DruidCoordinatorRuntimeParams (org.apache.druid.server.coordinator.DruidCoordinatorRuntimeParams)1 SegmentReplicantLookup (org.apache.druid.server.coordinator.SegmentReplicantLookup)1 ServerHolder (org.apache.druid.server.coordinator.ServerHolder)1