use of org.apache.druid.client.DruidServer in project druid by druid-io.
the class TiersResource method getTierDataSources.
@GET
@Path("/{tierName}")
@Produces(MediaType.APPLICATION_JSON)
public Response getTierDataSources(@PathParam("tierName") String tierName, @QueryParam("simple") String simple) {
if (simple != null) {
Map<String, Map<Interval, Map<IntervalProperties, Object>>> tierToStatsPerInterval = new HashMap<>();
for (DruidServer druidServer : serverInventoryView.getInventory()) {
if (druidServer.getTier().equalsIgnoreCase(tierName)) {
for (DataSegment dataSegment : druidServer.iterateAllSegments()) {
Map<IntervalProperties, Object> properties = tierToStatsPerInterval.computeIfAbsent(dataSegment.getDataSource(), dsName -> new HashMap<>()).computeIfAbsent(dataSegment.getInterval(), interval -> new EnumMap<>(IntervalProperties.class));
properties.merge(IntervalProperties.size, dataSegment.getSize(), (a, b) -> (Long) a + (Long) b);
properties.merge(IntervalProperties.count, 1, (a, b) -> (Integer) a + (Integer) b);
}
}
}
return Response.ok(tierToStatsPerInterval).build();
}
Set<String> retVal = serverInventoryView.getInventory().stream().filter(druidServer -> druidServer.getTier().equalsIgnoreCase(tierName)).flatMap(druidServer -> druidServer.getDataSources().stream().map(DruidDataSource::getName)).collect(Collectors.toSet());
return Response.ok(retVal).build();
}
use of org.apache.druid.client.DruidServer in project druid by druid-io.
the class ServersResource method getServer.
@GET
@Path("/{serverName}")
@Produces(MediaType.APPLICATION_JSON)
public Response getServer(@PathParam("serverName") String serverName, @QueryParam("simple") String simple) {
DruidServer server = serverInventoryView.getInventoryValue(serverName);
if (server == null) {
return Response.status(Response.Status.NOT_FOUND).build();
}
Response.ResponseBuilder builder = Response.status(Response.Status.OK);
if (simple != null) {
return builder.entity(makeSimpleServer(server)).build();
}
return builder.entity(makeFullServer(server)).build();
}
use of org.apache.druid.client.DruidServer in project druid by druid-io.
the class DruidCoordinator method getLoadStatus.
public Map<String, Double> getLoadStatus() {
final Map<String, Double> loadStatus = new HashMap<>();
final Collection<ImmutableDruidDataSource> dataSources = segmentsMetadataManager.getImmutableDataSourcesWithAllUsedSegments();
for (ImmutableDruidDataSource dataSource : dataSources) {
final Set<DataSegment> segments = Sets.newHashSet(dataSource.getSegments());
final int numPublishedSegments = segments.size();
// remove loaded segments
for (DruidServer druidServer : serverInventoryView.getInventory()) {
final DruidDataSource loadedView = druidServer.getDataSource(dataSource.getName());
if (loadedView != null) {
// Please see https://github.com/apache/druid/pull/5632 and LoadStatusBenchmark for more info.
for (DataSegment serverSegment : loadedView.getSegments()) {
segments.remove(serverSegment);
}
}
}
final int numUnavailableSegments = segments.size();
loadStatus.put(dataSource.getName(), 100 * ((double) (numPublishedSegments - numUnavailableSegments) / (double) numPublishedSegments));
}
return loadStatus;
}
use of org.apache.druid.client.DruidServer in project druid by druid-io.
the class BatchServerInventoryViewTest method testRunWithFilterCallback.
@Test
public void testRunWithFilterCallback() throws Exception {
final CountDownLatch removeCallbackLatch = new CountDownLatch(1);
segmentAnnouncer.announceSegments(testSegments);
waitForSync(filteredBatchServerInventoryView, testSegments);
DruidServer server = Iterables.get(filteredBatchServerInventoryView.getInventory(), 0);
Set<DataSegment> segments = Sets.newHashSet(server.iterateAllSegments());
Assert.assertEquals(testSegments, segments);
ServerView.SegmentCallback callback = EasyMock.createStrictMock(ServerView.SegmentCallback.class);
Comparator<DataSegment> dataSegmentComparator = Comparator.comparing(DataSegment::getInterval, Comparators.intervalsByStartThenEnd());
EasyMock.expect(callback.segmentAdded(EasyMock.anyObject(), EasyMock.cmp(makeSegment(INITIAL_SEGMENTS + 2), dataSegmentComparator, LogicalOperator.EQUAL))).andReturn(ServerView.CallbackAction.CONTINUE).times(1);
EasyMock.expect(callback.segmentRemoved(EasyMock.anyObject(), EasyMock.cmp(makeSegment(INITIAL_SEGMENTS + 2), dataSegmentComparator, LogicalOperator.EQUAL))).andAnswer(new IAnswer<ServerView.CallbackAction>() {
@Override
public ServerView.CallbackAction answer() {
removeCallbackLatch.countDown();
return ServerView.CallbackAction.CONTINUE;
}
}).times(1);
EasyMock.replay(callback);
filteredBatchServerInventoryView.registerSegmentCallback(Execs.directExecutor(), callback, new Predicate<Pair<DruidServerMetadata, DataSegment>>() {
@Override
public boolean apply(@Nullable Pair<DruidServerMetadata, DataSegment> input) {
return input.rhs.getInterval().getStart().equals(SEGMENT_INTERVAL_START.plusDays(INITIAL_SEGMENTS + 2));
}
});
DataSegment segment2 = makeSegment(INITIAL_SEGMENTS + 2);
segmentAnnouncer.announceSegment(segment2);
testSegments.add(segment2);
DataSegment oldSegment = makeSegment(-1);
segmentAnnouncer.announceSegment(oldSegment);
testSegments.add(oldSegment);
segmentAnnouncer.unannounceSegment(oldSegment);
testSegments.remove(oldSegment);
waitForSync(filteredBatchServerInventoryView, testSegments);
segmentAnnouncer.unannounceSegment(segment2);
testSegments.remove(segment2);
waitForSync(filteredBatchServerInventoryView, testSegments);
TIMING.forWaiting().awaitLatch(removeCallbackLatch);
EasyMock.verify(callback);
}
use of org.apache.druid.client.DruidServer in project druid by druid-io.
the class BatchServerInventoryViewTest method testSameTimeZnode.
@Test
public void testSameTimeZnode() throws Exception {
final int numThreads = INITIAL_SEGMENTS / 10;
final ListeningExecutorService executor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(numThreads));
segmentAnnouncer.announceSegments(testSegments);
waitForSync(batchServerInventoryView, testSegments);
DruidServer server = Iterables.get(batchServerInventoryView.getInventory(), 0);
final Set<DataSegment> segments = Sets.newHashSet(server.iterateAllSegments());
Assert.assertEquals(testSegments, segments);
final CountDownLatch latch = new CountDownLatch(numThreads);
final List<ListenableFuture<BatchDataSegmentAnnouncer>> futures = new ArrayList<>();
for (int i = 0; i < numThreads; ++i) {
final int ii = i;
futures.add(executor.submit(new Callable<BatchDataSegmentAnnouncer>() {
@Override
public BatchDataSegmentAnnouncer call() {
BatchDataSegmentAnnouncer segmentAnnouncer = new BatchDataSegmentAnnouncer(new DruidServerMetadata("id", "host", null, Long.MAX_VALUE, ServerType.HISTORICAL, "tier", 0), new BatchDataSegmentAnnouncerConfig() {
@Override
public int getSegmentsPerNode() {
return 50;
}
}, new ZkPathsConfig() {
@Override
public String getBase() {
return TEST_BASE_PATH;
}
}, announcer, jsonMapper);
List<DataSegment> segments = new ArrayList<DataSegment>();
try {
for (int j = 0; j < INITIAL_SEGMENTS / numThreads; ++j) {
segments.add(makeSegment(INITIAL_SEGMENTS + ii + numThreads * j));
}
latch.countDown();
latch.await();
segmentAnnouncer.announceSegments(segments);
testSegments.addAll(segments);
} catch (Exception e) {
throw new RuntimeException(e);
}
return segmentAnnouncer;
}
}));
}
final List<BatchDataSegmentAnnouncer> announcers = Futures.allAsList(futures).get();
Assert.assertEquals(INITIAL_SEGMENTS * 2, testSegments.size());
waitForSync(batchServerInventoryView, testSegments);
Assert.assertEquals(testSegments, Sets.newHashSet(server.iterateAllSegments()));
for (int i = 0; i < INITIAL_SEGMENTS; ++i) {
final DataSegment segment = makeSegment(100 + i);
segmentAnnouncer.unannounceSegment(segment);
testSegments.remove(segment);
}
waitForSync(batchServerInventoryView, testSegments);
Assert.assertEquals(testSegments, Sets.newHashSet(server.iterateAllSegments()));
}
Aggregations