Search in sources :

Example 16 with DruidDataSource

use of io.druid.client.DruidDataSource in project druid by druid-io.

the class DatasourcesResourceTest method testSimpleGetTheDataSource.

@Test
public void testSimpleGetTheDataSource() throws Exception {
    DruidDataSource dataSource1 = new DruidDataSource("datasource1", new HashMap());
    dataSource1.addSegment("partition", new DataSegment("datasegment1", new Interval("2010-01-01/P1D"), null, null, null, null, null, 0x9, 10));
    EasyMock.expect(server.getDataSource("datasource1")).andReturn(dataSource1).atLeastOnce();
    EasyMock.expect(server.getTier()).andReturn(null).atLeastOnce();
    EasyMock.expect(inventoryView.getInventory()).andReturn(ImmutableList.of(server)).atLeastOnce();
    EasyMock.replay(inventoryView, server);
    DatasourcesResource datasourcesResource = new DatasourcesResource(inventoryView, null, null, new AuthConfig());
    Response response = datasourcesResource.getTheDataSource("datasource1", null);
    Assert.assertEquals(200, response.getStatus());
    Map<String, Map<String, Object>> result = (Map<String, Map<String, Object>>) response.getEntity();
    Assert.assertEquals(1, ((Map) (result.get("tiers").get(null))).get("segmentCount"));
    Assert.assertEquals(10L, ((Map) (result.get("tiers").get(null))).get("size"));
    Assert.assertNotNull(result.get("segments"));
    Assert.assertEquals("2010-01-01T00:00:00.000Z", result.get("segments").get("minTime").toString());
    Assert.assertEquals("2010-01-02T00:00:00.000Z", result.get("segments").get("maxTime").toString());
    Assert.assertEquals(1, result.get("segments").get("count"));
    Assert.assertEquals(10L, result.get("segments").get("size"));
    EasyMock.verify(inventoryView, server);
}
Also used : Response(javax.ws.rs.core.Response) HashMap(java.util.HashMap) AuthConfig(io.druid.server.security.AuthConfig) DruidDataSource(io.druid.client.DruidDataSource) DataSegment(io.druid.timeline.DataSegment) HashMap(java.util.HashMap) Map(java.util.Map) TreeMap(java.util.TreeMap) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 17 with DruidDataSource

use of io.druid.client.DruidDataSource in project druid by druid-io.

the class DatasourcesResourceTest method testFullGetTheDataSource.

@Test
public void testFullGetTheDataSource() throws Exception {
    DruidDataSource dataSource1 = new DruidDataSource("datasource1", new HashMap());
    EasyMock.expect(server.getDataSource("datasource1")).andReturn(dataSource1).atLeastOnce();
    EasyMock.expect(inventoryView.getInventory()).andReturn(ImmutableList.of(server)).atLeastOnce();
    EasyMock.replay(inventoryView, server);
    DatasourcesResource datasourcesResource = new DatasourcesResource(inventoryView, null, null, new AuthConfig());
    Response response = datasourcesResource.getTheDataSource("datasource1", "full");
    DruidDataSource result = (DruidDataSource) response.getEntity();
    Assert.assertEquals(200, response.getStatus());
    Assert.assertEquals(dataSource1, result);
    EasyMock.verify(inventoryView, server);
}
Also used : Response(javax.ws.rs.core.Response) HashMap(java.util.HashMap) AuthConfig(io.druid.server.security.AuthConfig) DruidDataSource(io.druid.client.DruidDataSource) Test(org.junit.Test)

Example 18 with DruidDataSource

use of io.druid.client.DruidDataSource in project druid by druid-io.

the class ClientInfoResource method getSegmentsForDatasources.

private Map<String, List<DataSegment>> getSegmentsForDatasources() {
    final Map<String, List<DataSegment>> dataSourceMap = Maps.newHashMap();
    for (DruidServer server : serverInventoryView.getInventory()) {
        for (DruidDataSource dataSource : server.getDataSources()) {
            if (!dataSourceMap.containsKey(dataSource.getName())) {
                dataSourceMap.put(dataSource.getName(), Lists.<DataSegment>newArrayList());
            }
            List<DataSegment> segments = dataSourceMap.get(dataSource.getName());
            segments.addAll(dataSource.getSegments());
        }
    }
    return dataSourceMap;
}
Also used : DruidServer(io.druid.client.DruidServer) List(java.util.List) DruidDataSource(io.druid.client.DruidDataSource) DataSegment(io.druid.timeline.DataSegment)

Example 19 with DruidDataSource

use of io.druid.client.DruidDataSource in project druid by druid-io.

the class DruidCoordinatorLogger method run.

@Override
public DruidCoordinatorRuntimeParams run(DruidCoordinatorRuntimeParams params) {
    DruidCluster cluster = params.getDruidCluster();
    CoordinatorStats stats = params.getCoordinatorStats();
    ServiceEmitter emitter = params.getEmitter();
    Map<String, AtomicLong> assigned = stats.getPerTierStats().get("assignedCount");
    if (assigned != null) {
        for (Map.Entry<String, AtomicLong> entry : assigned.entrySet()) {
            log.info("[%s] : Assigned %s segments among %,d servers", entry.getKey(), entry.getValue().get(), cluster.get(entry.getKey()).size());
        }
    }
    emitTieredStats(emitter, "segment/assigned/count", assigned);
    Map<String, AtomicLong> dropped = stats.getPerTierStats().get("droppedCount");
    if (dropped != null) {
        for (Map.Entry<String, AtomicLong> entry : dropped.entrySet()) {
            log.info("[%s] : Dropped %s segments among %,d servers", entry.getKey(), entry.getValue().get(), cluster.get(entry.getKey()).size());
        }
    }
    emitTieredStats(emitter, "segment/dropped/count", dropped);
    emitTieredStats(emitter, "segment/cost/raw", stats.getPerTierStats().get("initialCost"));
    emitTieredStats(emitter, "segment/cost/normalization", stats.getPerTierStats().get("normalization"));
    emitTieredStats(emitter, "segment/moved/count", stats.getPerTierStats().get("movedCount"));
    emitTieredStats(emitter, "segment/deleted/count", stats.getPerTierStats().get("deletedCount"));
    Map<String, AtomicLong> normalized = stats.getPerTierStats().get("normalizedInitialCostTimesOneThousand");
    if (normalized != null) {
        emitTieredStats(emitter, "segment/cost/normalized", Maps.transformEntries(normalized, new Maps.EntryTransformer<String, AtomicLong, Number>() {

            @Override
            public Number transformEntry(String key, AtomicLong value) {
                return value.doubleValue() / 1000d;
            }
        }));
    }
    Map<String, AtomicLong> unneeded = stats.getPerTierStats().get("unneededCount");
    if (unneeded != null) {
        for (Map.Entry<String, AtomicLong> entry : unneeded.entrySet()) {
            log.info("[%s] : Removed %s unneeded segments among %,d servers", entry.getKey(), entry.getValue().get(), cluster.get(entry.getKey()).size());
        }
    }
    emitTieredStats(emitter, "segment/unneeded/count", stats.getPerTierStats().get("unneededCount"));
    emitter.emit(new ServiceMetricEvent.Builder().build("segment/overShadowed/count", stats.getGlobalStats().get("overShadowedCount")));
    Map<String, AtomicLong> moved = stats.getPerTierStats().get("movedCount");
    if (moved != null) {
        for (Map.Entry<String, AtomicLong> entry : moved.entrySet()) {
            log.info("[%s] : Moved %,d segment(s)", entry.getKey(), entry.getValue().get());
        }
    }
    final Map<String, AtomicLong> unmoved = stats.getPerTierStats().get("unmovedCount");
    if (unmoved != null) {
        for (Map.Entry<String, AtomicLong> entry : unmoved.entrySet()) {
            log.info("[%s] : Let alone %,d segment(s)", entry.getKey(), entry.getValue().get());
        }
    }
    log.info("Load Queues:");
    for (MinMaxPriorityQueue<ServerHolder> serverHolders : cluster.getSortedServersByTier()) {
        for (ServerHolder serverHolder : serverHolders) {
            ImmutableDruidServer server = serverHolder.getServer();
            LoadQueuePeon queuePeon = serverHolder.getPeon();
            log.info("Server[%s, %s, %s] has %,d left to load, %,d left to drop, %,d bytes queued, %,d bytes served.", server.getName(), server.getType(), server.getTier(), queuePeon.getSegmentsToLoad().size(), queuePeon.getSegmentsToDrop().size(), queuePeon.getLoadQueueSize(), server.getCurrSize());
            if (log.isDebugEnabled()) {
                for (DataSegment segment : queuePeon.getSegmentsToLoad()) {
                    log.debug("Segment to load[%s]", segment);
                }
                for (DataSegment segment : queuePeon.getSegmentsToDrop()) {
                    log.debug("Segment to drop[%s]", segment);
                }
            }
        }
    }
    // Emit coordinator metrics
    final Set<Map.Entry<String, LoadQueuePeon>> peonEntries = params.getLoadManagementPeons().entrySet();
    for (Map.Entry<String, LoadQueuePeon> entry : peonEntries) {
        String serverName = entry.getKey();
        LoadQueuePeon queuePeon = entry.getValue();
        emitter.emit(new ServiceMetricEvent.Builder().setDimension(DruidMetrics.SERVER, serverName).build("segment/loadQueue/size", queuePeon.getLoadQueueSize()));
        emitter.emit(new ServiceMetricEvent.Builder().setDimension(DruidMetrics.SERVER, serverName).build("segment/loadQueue/failed", queuePeon.getAndResetFailedAssignCount()));
        emitter.emit(new ServiceMetricEvent.Builder().setDimension(DruidMetrics.SERVER, serverName).build("segment/loadQueue/count", queuePeon.getSegmentsToLoad().size()));
        emitter.emit(new ServiceMetricEvent.Builder().setDimension(DruidMetrics.SERVER, serverName).build("segment/dropQueue/count", queuePeon.getSegmentsToDrop().size()));
    }
    for (Map.Entry<String, AtomicLong> entry : coordinator.getSegmentAvailability().entrySet()) {
        String datasource = entry.getKey();
        Long count = entry.getValue().get();
        emitter.emit(new ServiceMetricEvent.Builder().setDimension(DruidMetrics.DATASOURCE, datasource).build("segment/unavailable/count", count));
    }
    for (Map.Entry<String, CountingMap<String>> entry : coordinator.getReplicationStatus().entrySet()) {
        String tier = entry.getKey();
        CountingMap<String> datasourceAvailabilities = entry.getValue();
        for (Map.Entry<String, AtomicLong> datasourceAvailability : datasourceAvailabilities.entrySet()) {
            String datasource = datasourceAvailability.getKey();
            Long count = datasourceAvailability.getValue().get();
            emitter.emit(new ServiceMetricEvent.Builder().setDimension(DruidMetrics.TIER, tier).setDimension(DruidMetrics.DATASOURCE, datasource).build("segment/underReplicated/count", count));
        }
    }
    // Emit segment metrics
    CountingMap<String> segmentSizes = new CountingMap<String>();
    CountingMap<String> segmentCounts = new CountingMap<String>();
    for (DruidDataSource dataSource : params.getDataSources()) {
        for (DataSegment segment : dataSource.getSegments()) {
            segmentSizes.add(dataSource.getName(), segment.getSize());
            segmentCounts.add(dataSource.getName(), 1L);
        }
    }
    for (Map.Entry<String, Long> entry : segmentSizes.snapshot().entrySet()) {
        String dataSource = entry.getKey();
        Long size = entry.getValue();
        emitter.emit(new ServiceMetricEvent.Builder().setDimension(DruidMetrics.DATASOURCE, dataSource).build("segment/size", size));
    }
    for (Map.Entry<String, Long> entry : segmentCounts.snapshot().entrySet()) {
        String dataSource = entry.getKey();
        Long count = entry.getValue();
        emitter.emit(new ServiceMetricEvent.Builder().setDimension(DruidMetrics.DATASOURCE, dataSource).build("segment/count", count));
    }
    return params;
}
Also used : ServiceEmitter(com.metamx.emitter.service.ServiceEmitter) CoordinatorStats(io.druid.server.coordinator.CoordinatorStats) DataSegment(io.druid.timeline.DataSegment) LoadQueuePeon(io.druid.server.coordinator.LoadQueuePeon) DruidCluster(io.druid.server.coordinator.DruidCluster) DruidDataSource(io.druid.client.DruidDataSource) CountingMap(io.druid.collections.CountingMap) AtomicLong(java.util.concurrent.atomic.AtomicLong) ServerHolder(io.druid.server.coordinator.ServerHolder) AtomicLong(java.util.concurrent.atomic.AtomicLong) ServiceMetricEvent(com.metamx.emitter.service.ServiceMetricEvent) CountingMap(io.druid.collections.CountingMap) Map(java.util.Map) ImmutableDruidServer(io.druid.client.ImmutableDruidServer)

Example 20 with DruidDataSource

use of io.druid.client.DruidDataSource in project druid by druid-io.

the class DruidCoordinator method getLoadStatus.

public Map<String, Double> getLoadStatus() {
    Map<String, Double> loadStatus = Maps.newHashMap();
    for (DruidDataSource dataSource : metadataSegmentManager.getInventory()) {
        final Set<DataSegment> segments = Sets.newHashSet(dataSource.getSegments());
        final int availableSegmentSize = segments.size();
        // remove loaded segments
        for (DruidServer druidServer : serverInventoryView.getInventory()) {
            final DruidDataSource loadedView = druidServer.getDataSource(dataSource.getName());
            if (loadedView != null) {
                segments.removeAll(loadedView.getSegments());
            }
        }
        final int unloadedSegmentSize = segments.size();
        loadStatus.put(dataSource.getName(), 100 * ((double) (availableSegmentSize - unloadedSegmentSize) / (double) availableSegmentSize));
    }
    return loadStatus;
}
Also used : ImmutableDruidServer(io.druid.client.ImmutableDruidServer) DruidServer(io.druid.client.DruidServer) ImmutableDruidDataSource(io.druid.client.ImmutableDruidDataSource) DruidDataSource(io.druid.client.DruidDataSource) DataSegment(io.druid.timeline.DataSegment)

Aggregations

DruidDataSource (io.druid.client.DruidDataSource)26 DataSegment (io.druid.timeline.DataSegment)19 Map (java.util.Map)9 GET (javax.ws.rs.GET)9 Produces (javax.ws.rs.Produces)9 Interval (org.joda.time.Interval)9 DruidServer (io.druid.client.DruidServer)8 Path (javax.ws.rs.Path)8 HashMap (java.util.HashMap)7 Set (java.util.Set)6 Response (javax.ws.rs.core.Response)6 Test (org.junit.Test)6 ResourceFilters (com.sun.jersey.spi.container.ResourceFilters)5 List (java.util.List)5 ImmutableMap (com.google.common.collect.ImmutableMap)4 AuthConfig (io.druid.server.security.AuthConfig)4 AuthorizationInfo (io.druid.server.security.AuthorizationInfo)4 ImmutableDruidDataSource (io.druid.client.ImmutableDruidDataSource)3 ImmutableDruidServer (io.druid.client.ImmutableDruidServer)3 Access (io.druid.server.security.Access)3