use of com.sun.jersey.spi.container.ResourceFilters in project druid by druid-io.
the class DatasourcesResource method getSegmentDataSourceSegments.
@GET
@Path("/{dataSourceName}/segments")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Response getSegmentDataSourceSegments(@PathParam("dataSourceName") String dataSourceName, @QueryParam("full") String full) {
DruidDataSource dataSource = getDataSource(dataSourceName);
if (dataSource == null) {
return Response.noContent().build();
}
Response.ResponseBuilder builder = Response.ok();
if (full != null) {
return builder.entity(dataSource.getSegments()).build();
}
return builder.entity(Iterables.transform(dataSource.getSegments(), new Function<DataSegment, Object>() {
@Override
public Object apply(DataSegment segment) {
return segment.getIdentifier();
}
})).build();
}
use of com.sun.jersey.spi.container.ResourceFilters in project druid by druid-io.
the class MetadataResource method getDatabaseSegmentDataSourceSegments.
@GET
@Path("/datasources/{dataSourceName}/segments")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Response getDatabaseSegmentDataSourceSegments(@PathParam("dataSourceName") String dataSourceName, @QueryParam("full") String full) {
DruidDataSource dataSource = metadataSegmentManager.getInventoryValue(dataSourceName);
if (dataSource == null) {
return Response.status(Response.Status.NOT_FOUND).build();
}
Response.ResponseBuilder builder = Response.status(Response.Status.OK);
if (full != null) {
return builder.entity(dataSource.getSegments()).build();
}
return builder.entity(Iterables.transform(dataSource.getSegments(), new Function<DataSegment, String>() {
@Override
public String apply(DataSegment segment) {
return segment.getIdentifier();
}
})).build();
}
use of com.sun.jersey.spi.container.ResourceFilters in project druid by druid-io.
the class ClientInfoResource method getDatasourceDimensions.
@GET
@Path("/{dataSourceName}/dimensions")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Iterable<String> getDatasourceDimensions(@PathParam("dataSourceName") String dataSourceName, @QueryParam("interval") String interval) {
final List<DataSegment> segments = getSegmentsForDatasources().get(dataSourceName);
final Set<String> dims = Sets.newHashSet();
if (segments == null || segments.isEmpty()) {
return dims;
}
Interval theInterval;
if (interval == null || interval.isEmpty()) {
DateTime now = getCurrentTime();
theInterval = new Interval(segmentMetadataQueryConfig.getDefaultHistory(), now);
} else {
theInterval = new Interval(interval);
}
for (DataSegment segment : segments) {
if (theInterval.overlaps(segment.getInterval())) {
dims.addAll(segment.getDimensions());
}
}
return dims;
}
use of com.sun.jersey.spi.container.ResourceFilters in project druid by druid-io.
the class ClientInfoResource method getDatasource.
@GET
@Path("/{dataSourceName}")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Map<String, Object> getDatasource(@PathParam("dataSourceName") String dataSourceName, @QueryParam("interval") String interval, @QueryParam("full") String full) {
if (full == null) {
return ImmutableMap.<String, Object>of(KEY_DIMENSIONS, getDatasourceDimensions(dataSourceName, interval), KEY_METRICS, getDatasourceMetrics(dataSourceName, interval));
}
Interval theInterval;
if (interval == null || interval.isEmpty()) {
DateTime now = getCurrentTime();
theInterval = new Interval(segmentMetadataQueryConfig.getDefaultHistory(), now);
} else {
theInterval = new Interval(interval);
}
TimelineLookup<String, ServerSelector> timeline = timelineServerView.getTimeline(new TableDataSource(dataSourceName));
Iterable<TimelineObjectHolder<String, ServerSelector>> serversLookup = timeline != null ? timeline.lookup(theInterval) : null;
if (serversLookup == null || Iterables.isEmpty(serversLookup)) {
return Collections.EMPTY_MAP;
}
Map<Interval, Object> servedIntervals = new TreeMap<>(new Comparator<Interval>() {
@Override
public int compare(Interval o1, Interval o2) {
if (o1.equals(o2) || o1.overlaps(o2)) {
return 0;
} else {
return o1.isBefore(o2) ? -1 : 1;
}
}
});
for (TimelineObjectHolder<String, ServerSelector> holder : serversLookup) {
final Set<Object> dimensions = Sets.newHashSet();
final Set<Object> metrics = Sets.newHashSet();
final PartitionHolder<ServerSelector> partitionHolder = holder.getObject();
if (partitionHolder.isComplete()) {
for (ServerSelector server : partitionHolder.payloads()) {
final DataSegment segment = server.getSegment();
dimensions.addAll(segment.getDimensions());
metrics.addAll(segment.getMetrics());
}
}
servedIntervals.put(holder.getInterval(), ImmutableMap.of(KEY_DIMENSIONS, dimensions, KEY_METRICS, metrics));
}
//collapse intervals if they abut and have same set of columns
Map<String, Object> result = Maps.newLinkedHashMap();
Interval curr = null;
Map<String, Set<String>> cols = null;
for (Map.Entry<Interval, Object> e : servedIntervals.entrySet()) {
Interval ival = e.getKey();
if (curr != null && curr.abuts(ival) && cols.equals(e.getValue())) {
curr = curr.withEnd(ival.getEnd());
} else {
if (curr != null) {
result.put(curr.toString(), cols);
}
curr = ival;
cols = (Map<String, Set<String>>) e.getValue();
}
}
//add the last one in
if (curr != null) {
result.put(curr.toString(), cols);
}
return result;
}
use of com.sun.jersey.spi.container.ResourceFilters in project druid by druid-io.
the class DatasourcesResource method getSegmentDataSourceSpecificInterval.
@GET
@Path("/{dataSourceName}/intervals/{interval}")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Response getSegmentDataSourceSpecificInterval(@PathParam("dataSourceName") String dataSourceName, @PathParam("interval") String interval, @QueryParam("simple") String simple, @QueryParam("full") String full) {
final DruidDataSource dataSource = getDataSource(dataSourceName);
final Interval theInterval = new Interval(interval.replace("_", "/"));
if (dataSource == null) {
return Response.noContent().build();
}
final Comparator<Interval> comparator = Comparators.inverse(Comparators.intervalsByStartThenEnd());
if (full != null) {
final Map<Interval, Map<String, Object>> retVal = Maps.newTreeMap(comparator);
for (DataSegment dataSegment : dataSource.getSegments()) {
if (theInterval.contains(dataSegment.getInterval())) {
Map<String, Object> segments = retVal.get(dataSegment.getInterval());
if (segments == null) {
segments = Maps.newHashMap();
retVal.put(dataSegment.getInterval(), segments);
}
Pair<DataSegment, Set<String>> val = getSegment(dataSegment.getIdentifier());
segments.put(dataSegment.getIdentifier(), ImmutableMap.of("metadata", val.lhs, "servers", val.rhs));
}
}
return Response.ok(retVal).build();
}
if (simple != null) {
final Map<Interval, Map<String, Object>> retVal = Maps.newHashMap();
for (DataSegment dataSegment : dataSource.getSegments()) {
if (theInterval.contains(dataSegment.getInterval())) {
Map<String, Object> properties = retVal.get(dataSegment.getInterval());
if (properties == null) {
properties = Maps.newHashMap();
properties.put("size", dataSegment.getSize());
properties.put("count", 1);
retVal.put(dataSegment.getInterval(), properties);
} else {
properties.put("size", MapUtils.getLong(properties, "size", 0L) + dataSegment.getSize());
properties.put("count", MapUtils.getInt(properties, "count", 0) + 1);
}
}
}
return Response.ok(retVal).build();
}
final Set<String> retVal = Sets.newTreeSet(Comparators.inverse(String.CASE_INSENSITIVE_ORDER));
for (DataSegment dataSegment : dataSource.getSegments()) {
if (theInterval.contains(dataSegment.getInterval())) {
retVal.add(dataSegment.getIdentifier());
}
}
return Response.ok(retVal).build();
}
Aggregations