Search in sources :

Example 11 with DruidDataSource

use of org.apache.druid.client.DruidDataSource in project druid by druid-io.

the class DruidCoordinatorTest method testCoordinatorCustomDutyGroupsRunAsExpected.

@Test(timeout = 3000)
public void testCoordinatorCustomDutyGroupsRunAsExpected() throws Exception {
    // Some nessesary setup to start the Coordinator
    JacksonConfigManager configManager = EasyMock.createNiceMock(JacksonConfigManager.class);
    EasyMock.expect(configManager.watch(EasyMock.eq(CoordinatorDynamicConfig.CONFIG_KEY), EasyMock.anyObject(Class.class), EasyMock.anyObject())).andReturn(new AtomicReference(CoordinatorDynamicConfig.builder().build())).anyTimes();
    EasyMock.expect(configManager.watch(EasyMock.eq(CoordinatorCompactionConfig.CONFIG_KEY), EasyMock.anyObject(Class.class), EasyMock.anyObject())).andReturn(new AtomicReference(CoordinatorCompactionConfig.empty())).anyTimes();
    EasyMock.replay(configManager);
    EasyMock.expect(segmentsMetadataManager.isPollingDatabasePeriodically()).andReturn(true).anyTimes();
    DruidDataSource dataSource = new DruidDataSource("dataSource1", Collections.emptyMap());
    DataSegment dataSegment = new DataSegment("dataSource1", Intervals.of("2010-01-01/P1D"), "v1", null, null, null, null, 0x9, 0);
    dataSource.addSegment(dataSegment);
    DataSourcesSnapshot dataSourcesSnapshot = new DataSourcesSnapshot(ImmutableMap.of(dataSource.getName(), dataSource.toImmutableDruidDataSource()));
    EasyMock.expect(segmentsMetadataManager.getSnapshotOfDataSourcesWithAllUsedSegments()).andReturn(dataSourcesSnapshot).anyTimes();
    EasyMock.replay(segmentsMetadataManager);
    EasyMock.expect(serverInventoryView.isStarted()).andReturn(true).anyTimes();
    EasyMock.replay(serverInventoryView);
    // Create CoordinatorCustomDutyGroups
    // We will have two groups and each group has one duty
    CountDownLatch latch1 = new CountDownLatch(1);
    CoordinatorCustomDuty duty1 = new CoordinatorCustomDuty() {

        @Override
        public DruidCoordinatorRuntimeParams run(DruidCoordinatorRuntimeParams params) {
            latch1.countDown();
            return params;
        }
    };
    CoordinatorCustomDutyGroup group1 = new CoordinatorCustomDutyGroup("group1", Duration.standardSeconds(1), ImmutableList.of(duty1));
    CountDownLatch latch2 = new CountDownLatch(1);
    CoordinatorCustomDuty duty2 = new CoordinatorCustomDuty() {

        @Override
        public DruidCoordinatorRuntimeParams run(DruidCoordinatorRuntimeParams params) {
            latch2.countDown();
            return params;
        }
    };
    CoordinatorCustomDutyGroup group2 = new CoordinatorCustomDutyGroup("group2", Duration.standardSeconds(1), ImmutableList.of(duty2));
    CoordinatorCustomDutyGroups groups = new CoordinatorCustomDutyGroups(ImmutableSet.of(group1, group2));
    coordinator = new DruidCoordinator(druidCoordinatorConfig, new ZkPathsConfig() {

        @Override
        public String getBase() {
            return "druid";
        }
    }, configManager, segmentsMetadataManager, serverInventoryView, metadataRuleManager, () -> curator, serviceEmitter, scheduledExecutorFactory, null, null, new NoopServiceAnnouncer() {

        @Override
        public void announce(DruidNode node) {
            // count down when this coordinator becomes the leader
            leaderAnnouncerLatch.countDown();
        }

        @Override
        public void unannounce(DruidNode node) {
            leaderUnannouncerLatch.countDown();
        }
    }, druidNode, loadManagementPeons, null, new HashSet<>(), groups, new CostBalancerStrategyFactory(), EasyMock.createNiceMock(LookupCoordinatorManager.class), new TestDruidLeaderSelector(), null, ZkEnablementConfig.ENABLED);
    coordinator.start();
    // Wait until group 1 duty ran for latch1 to countdown
    latch1.await();
    // Wait until group 2 duty ran for latch2 to countdown
    latch2.await();
}
Also used : JacksonConfigManager(org.apache.druid.common.config.JacksonConfigManager) AtomicReference(java.util.concurrent.atomic.AtomicReference) CountDownLatch(java.util.concurrent.CountDownLatch) DruidDataSource(org.apache.druid.client.DruidDataSource) ImmutableDruidDataSource(org.apache.druid.client.ImmutableDruidDataSource) DataSegment(org.apache.druid.timeline.DataSegment) ZkPathsConfig(org.apache.druid.server.initialization.ZkPathsConfig) CoordinatorCustomDuty(org.apache.druid.server.coordinator.duty.CoordinatorCustomDuty) DruidNode(org.apache.druid.server.DruidNode) CoordinatorCustomDutyGroup(org.apache.druid.server.coordinator.duty.CoordinatorCustomDutyGroup) DataSourcesSnapshot(org.apache.druid.client.DataSourcesSnapshot) CoordinatorCustomDutyGroups(org.apache.druid.server.coordinator.duty.CoordinatorCustomDutyGroups) NoopServiceAnnouncer(org.apache.druid.curator.discovery.NoopServiceAnnouncer) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 12 with DruidDataSource

use of org.apache.druid.client.DruidDataSource in project druid by druid-io.

the class DataSourcesResource method getSimpleDatasource.

private Map<String, Map<String, Object>> getSimpleDatasource(String dataSourceName) {
    Map<String, Object> tiers = new HashMap<>();
    Map<String, Object> segments = new HashMap<>();
    Map<String, Map<String, Object>> retVal = ImmutableMap.of("tiers", tiers, "segments", segments);
    Set<SegmentId> totalDistinctSegments = new HashSet<>();
    Map<String, HashSet<Object>> tierDistinctSegments = new HashMap<>();
    long totalSegmentSize = 0;
    long totalReplicatedSize = 0;
    DateTime minTime = DateTimes.MAX;
    DateTime maxTime = DateTimes.MIN;
    String tier;
    for (DruidServer druidServer : serverInventoryView.getInventory()) {
        DruidDataSource druidDataSource = druidServer.getDataSource(dataSourceName);
        tier = druidServer.getTier();
        if (druidDataSource == null) {
            continue;
        }
        tierDistinctSegments.computeIfAbsent(tier, t -> new HashSet<>());
        long dataSourceSegmentSize = 0;
        long replicatedSegmentSize = 0;
        for (DataSegment dataSegment : druidDataSource.getSegments()) {
            // tier segments stats
            if (!tierDistinctSegments.get(tier).contains(dataSegment.getId())) {
                dataSourceSegmentSize += dataSegment.getSize();
                tierDistinctSegments.get(tier).add(dataSegment.getId());
            }
            // total segments stats
            if (totalDistinctSegments.add(dataSegment.getId())) {
                totalSegmentSize += dataSegment.getSize();
                minTime = DateTimes.min(minTime, dataSegment.getInterval().getStart());
                maxTime = DateTimes.max(maxTime, dataSegment.getInterval().getEnd());
            }
            totalReplicatedSize += dataSegment.getSize();
            replicatedSegmentSize += dataSegment.getSize();
        }
        // tier stats
        Map<String, Object> tierStats = (Map) tiers.get(tier);
        if (tierStats == null) {
            tierStats = new HashMap<>();
            tiers.put(druidServer.getTier(), tierStats);
        }
        tierStats.put("segmentCount", tierDistinctSegments.get(tier).size());
        long segmentSize = MapUtils.getLong(tierStats, "size", 0L);
        tierStats.put("size", segmentSize + dataSourceSegmentSize);
        long replicatedSize = MapUtils.getLong(tierStats, "replicatedSize", 0L);
        tierStats.put("replicatedSize", replicatedSize + replicatedSegmentSize);
    }
    segments.put("count", totalDistinctSegments.size());
    segments.put("size", totalSegmentSize);
    segments.put("replicatedSize", totalReplicatedSize);
    segments.put("minTime", minTime);
    segments.put("maxTime", maxTime);
    return retVal;
}
Also used : HashMap(java.util.HashMap) SegmentId(org.apache.druid.timeline.SegmentId) DruidServer(org.apache.druid.client.DruidServer) DruidDataSource(org.apache.druid.client.DruidDataSource) ImmutableDruidDataSource(org.apache.druid.client.ImmutableDruidDataSource) DataSegment(org.apache.druid.timeline.DataSegment) DateTime(org.joda.time.DateTime) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) EnumMap(java.util.EnumMap) SortedMap(java.util.SortedMap) HashMap(java.util.HashMap) Object2LongMap(it.unimi.dsi.fastutil.objects.Object2LongMap) TreeMap(java.util.TreeMap) HashSet(java.util.HashSet)

Example 13 with DruidDataSource

use of org.apache.druid.client.DruidDataSource in project druid by druid-io.

the class TiersResource method getTierDataSources.

@GET
@Path("/{tierName}")
@Produces(MediaType.APPLICATION_JSON)
public Response getTierDataSources(@PathParam("tierName") String tierName, @QueryParam("simple") String simple) {
    if (simple != null) {
        Map<String, Map<Interval, Map<IntervalProperties, Object>>> tierToStatsPerInterval = new HashMap<>();
        for (DruidServer druidServer : serverInventoryView.getInventory()) {
            if (druidServer.getTier().equalsIgnoreCase(tierName)) {
                for (DataSegment dataSegment : druidServer.iterateAllSegments()) {
                    Map<IntervalProperties, Object> properties = tierToStatsPerInterval.computeIfAbsent(dataSegment.getDataSource(), dsName -> new HashMap<>()).computeIfAbsent(dataSegment.getInterval(), interval -> new EnumMap<>(IntervalProperties.class));
                    properties.merge(IntervalProperties.size, dataSegment.getSize(), (a, b) -> (Long) a + (Long) b);
                    properties.merge(IntervalProperties.count, 1, (a, b) -> (Integer) a + (Integer) b);
                }
            }
        }
        return Response.ok(tierToStatsPerInterval).build();
    }
    Set<String> retVal = serverInventoryView.getInventory().stream().filter(druidServer -> druidServer.getTier().equalsIgnoreCase(tierName)).flatMap(druidServer -> druidServer.getDataSources().stream().map(DruidDataSource::getName)).collect(Collectors.toSet());
    return Response.ok(retVal).build();
}
Also used : PathParam(javax.ws.rs.PathParam) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET) EnumMap(java.util.EnumMap) Inject(com.google.inject.Inject) DruidServer(org.apache.druid.client.DruidServer) Path(javax.ws.rs.Path) Set(java.util.Set) HashMap(java.util.HashMap) ResourceFilters(com.sun.jersey.spi.container.ResourceFilters) Collectors(java.util.stream.Collectors) InventoryView(org.apache.druid.client.InventoryView) Interval(org.joda.time.Interval) MediaType(javax.ws.rs.core.MediaType) QueryParam(javax.ws.rs.QueryParam) Response(javax.ws.rs.core.Response) Map(java.util.Map) DataSegment(org.apache.druid.timeline.DataSegment) StateResourceFilter(org.apache.druid.server.http.security.StateResourceFilter) DruidDataSource(org.apache.druid.client.DruidDataSource) HashMap(java.util.HashMap) DruidServer(org.apache.druid.client.DruidServer) DataSegment(org.apache.druid.timeline.DataSegment) DruidDataSource(org.apache.druid.client.DruidDataSource) EnumMap(java.util.EnumMap) HashMap(java.util.HashMap) Map(java.util.Map) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Example 14 with DruidDataSource

use of org.apache.druid.client.DruidDataSource in project druid by druid-io.

the class DruidCoordinator method getLoadStatus.

public Map<String, Double> getLoadStatus() {
    final Map<String, Double> loadStatus = new HashMap<>();
    final Collection<ImmutableDruidDataSource> dataSources = segmentsMetadataManager.getImmutableDataSourcesWithAllUsedSegments();
    for (ImmutableDruidDataSource dataSource : dataSources) {
        final Set<DataSegment> segments = Sets.newHashSet(dataSource.getSegments());
        final int numPublishedSegments = segments.size();
        // remove loaded segments
        for (DruidServer druidServer : serverInventoryView.getInventory()) {
            final DruidDataSource loadedView = druidServer.getDataSource(dataSource.getName());
            if (loadedView != null) {
                // Please see https://github.com/apache/druid/pull/5632 and LoadStatusBenchmark for more info.
                for (DataSegment serverSegment : loadedView.getSegments()) {
                    segments.remove(serverSegment);
                }
            }
        }
        final int numUnavailableSegments = segments.size();
        loadStatus.put(dataSource.getName(), 100 * ((double) (numPublishedSegments - numUnavailableSegments) / (double) numPublishedSegments));
    }
    return loadStatus;
}
Also used : ImmutableDruidDataSource(org.apache.druid.client.ImmutableDruidDataSource) Object2IntOpenHashMap(it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) DruidServer(org.apache.druid.client.DruidServer) ImmutableDruidServer(org.apache.druid.client.ImmutableDruidServer) DataSegment(org.apache.druid.timeline.DataSegment) DruidDataSource(org.apache.druid.client.DruidDataSource) ImmutableDruidDataSource(org.apache.druid.client.ImmutableDruidDataSource)

Example 15 with DruidDataSource

use of org.apache.druid.client.DruidDataSource in project druid by druid-io.

the class DataSourcesResourceTest method testMarkAsUsedNonOvershadowedSegmentsIntervalException.

@Test
public void testMarkAsUsedNonOvershadowedSegmentsIntervalException() {
    DruidDataSource dataSource = new DruidDataSource("datasource1", new HashMap<>());
    Interval interval = Intervals.of("2010-01-22/P1D");
    int numUpdatedSegments = segmentsMetadataManager.markAsUsedNonOvershadowedSegmentsInInterval(EasyMock.eq("datasource1"), EasyMock.eq(interval));
    EasyMock.expect(numUpdatedSegments).andThrow(new RuntimeException("Error!")).once();
    EasyMock.expect(inventoryView.getInventory()).andReturn(ImmutableList.of(server)).once();
    EasyMock.expect(server.getDataSource("datasource1")).andReturn(dataSource).once();
    EasyMock.replay(segmentsMetadataManager, inventoryView, server);
    DataSourcesResource dataSourcesResource = new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null);
    Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments("datasource1", new DataSourcesResource.MarkDataSourceSegmentsPayload(interval, null));
    Assert.assertEquals(500, response.getStatus());
    EasyMock.verify(segmentsMetadataManager, inventoryView, server);
}
Also used : Response(javax.ws.rs.core.Response) DruidDataSource(org.apache.druid.client.DruidDataSource) ImmutableDruidDataSource(org.apache.druid.client.ImmutableDruidDataSource) Interval(org.joda.time.Interval) Test(org.junit.Test)

Aggregations

DruidDataSource (org.apache.druid.client.DruidDataSource)22 ImmutableDruidDataSource (org.apache.druid.client.ImmutableDruidDataSource)21 Test (org.junit.Test)17 Response (javax.ws.rs.core.Response)14 DataSegment (org.apache.druid.timeline.DataSegment)10 DruidServer (org.apache.druid.client.DruidServer)7 Interval (org.joda.time.Interval)7 Object2LongMap (it.unimi.dsi.fastutil.objects.Object2LongMap)5 SegmentId (org.apache.druid.timeline.SegmentId)5 HashMap (java.util.HashMap)4 CountDownLatch (java.util.concurrent.CountDownLatch)4 ImmutableDruidServer (org.apache.druid.client.ImmutableDruidServer)4 Map (java.util.Map)3 TreeMap (java.util.TreeMap)3 ForeverBroadcastDistributionRule (org.apache.druid.server.coordinator.rules.ForeverBroadcastDistributionRule)3 ForeverLoadRule (org.apache.druid.server.coordinator.rules.ForeverLoadRule)3 IntervalLoadRule (org.apache.druid.server.coordinator.rules.IntervalLoadRule)3 Rule (org.apache.druid.server.coordinator.rules.Rule)3 ImmutableMap (com.google.common.collect.ImmutableMap)2 EnumMap (java.util.EnumMap)2