use of io.druid.client.DruidServer in project druid by druid-io.
the class DatasourcesResource method getSegment.
private Pair<DataSegment, Set<String>> getSegment(String segmentId) {
DataSegment theSegment = null;
Set<String> servers = Sets.newHashSet();
for (DruidServer druidServer : serverInventoryView.getInventory()) {
DataSegment currSegment = druidServer.getSegments().get(segmentId);
if (currSegment != null) {
theSegment = currSegment;
servers.add(druidServer.getHost());
}
}
if (theSegment == null) {
return null;
}
return new Pair<>(theSegment, servers);
}
use of io.druid.client.DruidServer in project druid by druid-io.
the class ServersResource method getServerSegment.
@GET
@Path("/{serverName}/segments/{segmentId}")
@Produces(MediaType.APPLICATION_JSON)
public Response getServerSegment(@PathParam("serverName") String serverName, @PathParam("segmentId") String segmentId) {
DruidServer server = serverInventoryView.getInventoryValue(serverName);
if (server == null) {
return Response.status(Response.Status.NOT_FOUND).build();
}
DataSegment segment = server.getSegment(segmentId);
if (segment == null) {
return Response.status(Response.Status.NOT_FOUND).build();
}
return Response.status(Response.Status.OK).entity(segment).build();
}
use of io.druid.client.DruidServer in project druid by druid-io.
the class ServersResource method getServer.
@GET
@Path("/{serverName}")
@Produces(MediaType.APPLICATION_JSON)
public Response getServer(@PathParam("serverName") String serverName, @QueryParam("simple") String simple) {
DruidServer server = serverInventoryView.getInventoryValue(serverName);
if (server == null) {
return Response.status(Response.Status.NOT_FOUND).build();
}
Response.ResponseBuilder builder = Response.status(Response.Status.OK);
if (simple != null) {
return builder.entity(makeSimpleServer(server)).build();
}
return builder.entity(makeFullServer(server)).build();
}
use of io.druid.client.DruidServer in project druid by druid-io.
the class TiersResource method getTierDatasources.
@GET
@Path("/{tierName}")
@Produces(MediaType.APPLICATION_JSON)
public Response getTierDatasources(@PathParam("tierName") String tierName, @QueryParam("simple") String simple) {
if (simple != null) {
Table<String, Interval, Map<String, Object>> retVal = HashBasedTable.create();
for (DruidServer druidServer : serverInventoryView.getInventory()) {
if (druidServer.getTier().equalsIgnoreCase(tierName)) {
for (DataSegment dataSegment : druidServer.getSegments().values()) {
Map<String, Object> properties = retVal.get(dataSegment.getDataSource(), dataSegment.getInterval());
if (properties == null) {
properties = Maps.newHashMap();
retVal.put(dataSegment.getDataSource(), dataSegment.getInterval(), properties);
}
properties.put("size", MapUtils.getLong(properties, "size", 0L) + dataSegment.getSize());
properties.put("count", MapUtils.getInt(properties, "count", 0) + 1);
}
}
}
return Response.ok(retVal.rowMap()).build();
}
Set<String> retVal = Sets.newHashSet();
for (DruidServer druidServer : serverInventoryView.getInventory()) {
if (druidServer.getTier().equalsIgnoreCase(tierName)) {
retVal.addAll(Lists.newArrayList(Iterables.transform(druidServer.getDataSources(), new Function<DruidDataSource, String>() {
@Override
public String apply(DruidDataSource input) {
return input.getName();
}
})));
}
}
return Response.ok(retVal).build();
}
use of io.druid.client.DruidServer in project druid by druid-io.
the class BatchServerInventoryViewTest method setUp.
@Before
public void setUp() throws Exception {
testingCluster = new TestingCluster(1);
testingCluster.start();
cf = CuratorFrameworkFactory.builder().connectString(testingCluster.getConnectString()).retryPolicy(new ExponentialBackoffRetry(1, 10)).compressionProvider(new PotentiallyGzippedCompressionProvider(true)).build();
cf.start();
cf.blockUntilConnected();
cf.create().creatingParentsIfNeeded().forPath(testBasePath);
jsonMapper = new DefaultObjectMapper();
announcer = new Announcer(cf, MoreExecutors.sameThreadExecutor());
announcer.start();
segmentAnnouncer = new BatchDataSegmentAnnouncer(new DruidServerMetadata("id", "host", Long.MAX_VALUE, "type", "tier", 0), new BatchDataSegmentAnnouncerConfig() {
@Override
public int getSegmentsPerNode() {
return 50;
}
}, new ZkPathsConfig() {
@Override
public String getBase() {
return testBasePath;
}
}, announcer, jsonMapper);
segmentAnnouncer.start();
testSegments = Sets.newConcurrentHashSet();
for (int i = 0; i < INITIAL_SEGMENTS; i++) {
testSegments.add(makeSegment(i));
}
batchServerInventoryView = new BatchServerInventoryView(new ZkPathsConfig() {
@Override
public String getBase() {
return testBasePath;
}
}, cf, jsonMapper, Predicates.<Pair<DruidServerMetadata, DataSegment>>alwaysTrue());
batchServerInventoryView.start();
inventoryUpdateCounter.set(0);
filteredBatchServerInventoryView = new BatchServerInventoryView(new ZkPathsConfig() {
@Override
public String getBase() {
return testBasePath;
}
}, cf, jsonMapper, new Predicate<Pair<DruidServerMetadata, DataSegment>>() {
@Override
public boolean apply(@Nullable Pair<DruidServerMetadata, DataSegment> input) {
return input.rhs.getInterval().getStart().isBefore(SEGMENT_INTERVAL_START.plusDays(INITIAL_SEGMENTS));
}
}) {
@Override
protected DruidServer addInnerInventory(DruidServer container, String inventoryKey, Set<DataSegment> inventory) {
DruidServer server = super.addInnerInventory(container, inventoryKey, inventory);
inventoryUpdateCounter.incrementAndGet();
return server;
}
};
filteredBatchServerInventoryView.start();
}
Aggregations