Search in sources :

Example 11 with DruidDataSource

use of io.druid.client.DruidDataSource in project druid by druid-io.

the class MetadataResource method getDatabaseSegmentDataSourceSegments.

@GET
@Path("/datasources/{dataSourceName}/segments")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Response getDatabaseSegmentDataSourceSegments(@PathParam("dataSourceName") String dataSourceName, @QueryParam("full") String full) {
    DruidDataSource dataSource = metadataSegmentManager.getInventoryValue(dataSourceName);
    if (dataSource == null) {
        return Response.status(Response.Status.NOT_FOUND).build();
    }
    Response.ResponseBuilder builder = Response.status(Response.Status.OK);
    if (full != null) {
        return builder.entity(dataSource.getSegments()).build();
    }
    return builder.entity(Iterables.transform(dataSource.getSegments(), new Function<DataSegment, String>() {

        @Override
        public String apply(DataSegment segment) {
            return segment.getIdentifier();
        }
    })).build();
}
Also used : Response(javax.ws.rs.core.Response) DruidDataSource(io.druid.client.DruidDataSource) DataSegment(io.druid.timeline.DataSegment) Path(javax.ws.rs.Path) ResourceFilters(com.sun.jersey.spi.container.ResourceFilters) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Example 12 with DruidDataSource

use of io.druid.client.DruidDataSource in project druid by druid-io.

the class TiersResource method getTierDatasources.

@GET
@Path("/{tierName}")
@Produces(MediaType.APPLICATION_JSON)
public Response getTierDatasources(@PathParam("tierName") String tierName, @QueryParam("simple") String simple) {
    if (simple != null) {
        Table<String, Interval, Map<String, Object>> retVal = HashBasedTable.create();
        for (DruidServer druidServer : serverInventoryView.getInventory()) {
            if (druidServer.getTier().equalsIgnoreCase(tierName)) {
                for (DataSegment dataSegment : druidServer.getSegments().values()) {
                    Map<String, Object> properties = retVal.get(dataSegment.getDataSource(), dataSegment.getInterval());
                    if (properties == null) {
                        properties = Maps.newHashMap();
                        retVal.put(dataSegment.getDataSource(), dataSegment.getInterval(), properties);
                    }
                    properties.put("size", MapUtils.getLong(properties, "size", 0L) + dataSegment.getSize());
                    properties.put("count", MapUtils.getInt(properties, "count", 0) + 1);
                }
            }
        }
        return Response.ok(retVal.rowMap()).build();
    }
    Set<String> retVal = Sets.newHashSet();
    for (DruidServer druidServer : serverInventoryView.getInventory()) {
        if (druidServer.getTier().equalsIgnoreCase(tierName)) {
            retVal.addAll(Lists.newArrayList(Iterables.transform(druidServer.getDataSources(), new Function<DruidDataSource, String>() {

                @Override
                public String apply(DruidDataSource input) {
                    return input.getName();
                }
            })));
        }
    }
    return Response.ok(retVal).build();
}
Also used : DruidServer(io.druid.client.DruidServer) Map(java.util.Map) DataSegment(io.druid.timeline.DataSegment) DruidDataSource(io.druid.client.DruidDataSource) Interval(org.joda.time.Interval) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Example 13 with DruidDataSource

use of io.druid.client.DruidDataSource in project druid by druid-io.

the class HadoopConverterJobTest method getDataSegments.

private List<DataSegment> getDataSegments(SQLMetadataSegmentManager manager) throws InterruptedException {
    manager.start();
    while (!manager.isStarted()) {
        Thread.sleep(10);
    }
    manager.poll();
    final DruidDataSource druidDataSource = manager.getInventoryValue(DATASOURCE);
    manager.stop();
    return Lists.newArrayList(druidDataSource.getSegments());
}
Also used : DruidDataSource(io.druid.client.DruidDataSource)

Example 14 with DruidDataSource

use of io.druid.client.DruidDataSource in project druid by druid-io.

the class ClientInfoResource method getSegmentsForDatasources.

private Map<String, List<DataSegment>> getSegmentsForDatasources() {
    final Map<String, List<DataSegment>> dataSourceMap = Maps.newHashMap();
    for (DruidServer server : serverInventoryView.getInventory()) {
        for (DruidDataSource dataSource : server.getDataSources()) {
            if (!dataSourceMap.containsKey(dataSource.getName())) {
                dataSourceMap.put(dataSource.getName(), Lists.<DataSegment>newArrayList());
            }
            List<DataSegment> segments = dataSourceMap.get(dataSource.getName());
            segments.addAll(dataSource.getSegments());
        }
    }
    return dataSourceMap;
}
Also used : DruidServer(io.druid.client.DruidServer) List(java.util.List) DruidDataSource(io.druid.client.DruidDataSource) DataSegment(io.druid.timeline.DataSegment)

Example 15 with DruidDataSource

use of io.druid.client.DruidDataSource in project druid by druid-io.

the class DruidCoordinatorLogger method run.

@Override
public DruidCoordinatorRuntimeParams run(DruidCoordinatorRuntimeParams params) {
    DruidCluster cluster = params.getDruidCluster();
    CoordinatorStats stats = params.getCoordinatorStats();
    ServiceEmitter emitter = params.getEmitter();
    Map<String, AtomicLong> assigned = stats.getPerTierStats().get("assignedCount");
    if (assigned != null) {
        for (Map.Entry<String, AtomicLong> entry : assigned.entrySet()) {
            log.info("[%s] : Assigned %s segments among %,d servers", entry.getKey(), entry.getValue().get(), cluster.get(entry.getKey()).size());
        }
    }
    emitTieredStats(emitter, "segment/assigned/count", assigned);
    Map<String, AtomicLong> dropped = stats.getPerTierStats().get("droppedCount");
    if (dropped != null) {
        for (Map.Entry<String, AtomicLong> entry : dropped.entrySet()) {
            log.info("[%s] : Dropped %s segments among %,d servers", entry.getKey(), entry.getValue().get(), cluster.get(entry.getKey()).size());
        }
    }
    emitTieredStats(emitter, "segment/dropped/count", dropped);
    emitTieredStats(emitter, "segment/cost/raw", stats.getPerTierStats().get("initialCost"));
    emitTieredStats(emitter, "segment/cost/normalization", stats.getPerTierStats().get("normalization"));
    emitTieredStats(emitter, "segment/moved/count", stats.getPerTierStats().get("movedCount"));
    emitTieredStats(emitter, "segment/deleted/count", stats.getPerTierStats().get("deletedCount"));
    Map<String, AtomicLong> normalized = stats.getPerTierStats().get("normalizedInitialCostTimesOneThousand");
    if (normalized != null) {
        emitTieredStats(emitter, "segment/cost/normalized", Maps.transformEntries(normalized, new Maps.EntryTransformer<String, AtomicLong, Number>() {

            @Override
            public Number transformEntry(String key, AtomicLong value) {
                return value.doubleValue() / 1000d;
            }
        }));
    }
    Map<String, AtomicLong> unneeded = stats.getPerTierStats().get("unneededCount");
    if (unneeded != null) {
        for (Map.Entry<String, AtomicLong> entry : unneeded.entrySet()) {
            log.info("[%s] : Removed %s unneeded segments among %,d servers", entry.getKey(), entry.getValue().get(), cluster.get(entry.getKey()).size());
        }
    }
    emitTieredStats(emitter, "segment/unneeded/count", stats.getPerTierStats().get("unneededCount"));
    emitter.emit(new ServiceMetricEvent.Builder().build("segment/overShadowed/count", stats.getGlobalStats().get("overShadowedCount")));
    Map<String, AtomicLong> moved = stats.getPerTierStats().get("movedCount");
    if (moved != null) {
        for (Map.Entry<String, AtomicLong> entry : moved.entrySet()) {
            log.info("[%s] : Moved %,d segment(s)", entry.getKey(), entry.getValue().get());
        }
    }
    final Map<String, AtomicLong> unmoved = stats.getPerTierStats().get("unmovedCount");
    if (unmoved != null) {
        for (Map.Entry<String, AtomicLong> entry : unmoved.entrySet()) {
            log.info("[%s] : Let alone %,d segment(s)", entry.getKey(), entry.getValue().get());
        }
    }
    log.info("Load Queues:");
    for (MinMaxPriorityQueue<ServerHolder> serverHolders : cluster.getSortedServersByTier()) {
        for (ServerHolder serverHolder : serverHolders) {
            ImmutableDruidServer server = serverHolder.getServer();
            LoadQueuePeon queuePeon = serverHolder.getPeon();
            log.info("Server[%s, %s, %s] has %,d left to load, %,d left to drop, %,d bytes queued, %,d bytes served.", server.getName(), server.getType(), server.getTier(), queuePeon.getSegmentsToLoad().size(), queuePeon.getSegmentsToDrop().size(), queuePeon.getLoadQueueSize(), server.getCurrSize());
            if (log.isDebugEnabled()) {
                for (DataSegment segment : queuePeon.getSegmentsToLoad()) {
                    log.debug("Segment to load[%s]", segment);
                }
                for (DataSegment segment : queuePeon.getSegmentsToDrop()) {
                    log.debug("Segment to drop[%s]", segment);
                }
            }
        }
    }
    // Emit coordinator metrics
    final Set<Map.Entry<String, LoadQueuePeon>> peonEntries = params.getLoadManagementPeons().entrySet();
    for (Map.Entry<String, LoadQueuePeon> entry : peonEntries) {
        String serverName = entry.getKey();
        LoadQueuePeon queuePeon = entry.getValue();
        emitter.emit(new ServiceMetricEvent.Builder().setDimension(DruidMetrics.SERVER, serverName).build("segment/loadQueue/size", queuePeon.getLoadQueueSize()));
        emitter.emit(new ServiceMetricEvent.Builder().setDimension(DruidMetrics.SERVER, serverName).build("segment/loadQueue/failed", queuePeon.getAndResetFailedAssignCount()));
        emitter.emit(new ServiceMetricEvent.Builder().setDimension(DruidMetrics.SERVER, serverName).build("segment/loadQueue/count", queuePeon.getSegmentsToLoad().size()));
        emitter.emit(new ServiceMetricEvent.Builder().setDimension(DruidMetrics.SERVER, serverName).build("segment/dropQueue/count", queuePeon.getSegmentsToDrop().size()));
    }
    for (Map.Entry<String, AtomicLong> entry : coordinator.getSegmentAvailability().entrySet()) {
        String datasource = entry.getKey();
        Long count = entry.getValue().get();
        emitter.emit(new ServiceMetricEvent.Builder().setDimension(DruidMetrics.DATASOURCE, datasource).build("segment/unavailable/count", count));
    }
    for (Map.Entry<String, CountingMap<String>> entry : coordinator.getReplicationStatus().entrySet()) {
        String tier = entry.getKey();
        CountingMap<String> datasourceAvailabilities = entry.getValue();
        for (Map.Entry<String, AtomicLong> datasourceAvailability : datasourceAvailabilities.entrySet()) {
            String datasource = datasourceAvailability.getKey();
            Long count = datasourceAvailability.getValue().get();
            emitter.emit(new ServiceMetricEvent.Builder().setDimension(DruidMetrics.TIER, tier).setDimension(DruidMetrics.DATASOURCE, datasource).build("segment/underReplicated/count", count));
        }
    }
    // Emit segment metrics
    CountingMap<String> segmentSizes = new CountingMap<String>();
    CountingMap<String> segmentCounts = new CountingMap<String>();
    for (DruidDataSource dataSource : params.getDataSources()) {
        for (DataSegment segment : dataSource.getSegments()) {
            segmentSizes.add(dataSource.getName(), segment.getSize());
            segmentCounts.add(dataSource.getName(), 1L);
        }
    }
    for (Map.Entry<String, Long> entry : segmentSizes.snapshot().entrySet()) {
        String dataSource = entry.getKey();
        Long size = entry.getValue();
        emitter.emit(new ServiceMetricEvent.Builder().setDimension(DruidMetrics.DATASOURCE, dataSource).build("segment/size", size));
    }
    for (Map.Entry<String, Long> entry : segmentCounts.snapshot().entrySet()) {
        String dataSource = entry.getKey();
        Long count = entry.getValue();
        emitter.emit(new ServiceMetricEvent.Builder().setDimension(DruidMetrics.DATASOURCE, dataSource).build("segment/count", count));
    }
    return params;
}
Also used : ServiceEmitter(com.metamx.emitter.service.ServiceEmitter) CoordinatorStats(io.druid.server.coordinator.CoordinatorStats) DataSegment(io.druid.timeline.DataSegment) LoadQueuePeon(io.druid.server.coordinator.LoadQueuePeon) DruidCluster(io.druid.server.coordinator.DruidCluster) DruidDataSource(io.druid.client.DruidDataSource) CountingMap(io.druid.collections.CountingMap) AtomicLong(java.util.concurrent.atomic.AtomicLong) ServerHolder(io.druid.server.coordinator.ServerHolder) AtomicLong(java.util.concurrent.atomic.AtomicLong) ServiceMetricEvent(com.metamx.emitter.service.ServiceMetricEvent) CountingMap(io.druid.collections.CountingMap) Map(java.util.Map) ImmutableDruidServer(io.druid.client.ImmutableDruidServer)

Aggregations

DruidDataSource (io.druid.client.DruidDataSource)26 DataSegment (io.druid.timeline.DataSegment)19 Map (java.util.Map)9 GET (javax.ws.rs.GET)9 Produces (javax.ws.rs.Produces)9 Interval (org.joda.time.Interval)9 DruidServer (io.druid.client.DruidServer)8 Path (javax.ws.rs.Path)8 HashMap (java.util.HashMap)7 Set (java.util.Set)6 Response (javax.ws.rs.core.Response)6 Test (org.junit.Test)6 ResourceFilters (com.sun.jersey.spi.container.ResourceFilters)5 List (java.util.List)5 ImmutableMap (com.google.common.collect.ImmutableMap)4 AuthConfig (io.druid.server.security.AuthConfig)4 AuthorizationInfo (io.druid.server.security.AuthorizationInfo)4 ImmutableDruidDataSource (io.druid.client.ImmutableDruidDataSource)3 ImmutableDruidServer (io.druid.client.ImmutableDruidServer)3 Access (io.druid.server.security.Access)3