Search in sources :

Example 16 with Interval

use of org.joda.time.Interval in project druid by druid-io.

the class DatasourcesResource method getSegmentDataSourceSpecificInterval.

/**
   * Provides serverView for a datasource and Interval which gives details about servers hosting segments for an interval
   * Used by the realtime tasks to fetch a view of the interval they are interested in.
   */
@GET
@Path("/{dataSourceName}/intervals/{interval}/serverview")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Response getSegmentDataSourceSpecificInterval(@PathParam("dataSourceName") String dataSourceName, @PathParam("interval") String interval, @QueryParam("partial") final boolean partial) {
    TimelineLookup<String, SegmentLoadInfo> timeline = serverInventoryView.getTimeline(new TableDataSource(dataSourceName));
    final Interval theInterval = new Interval(interval.replace("_", "/"));
    if (timeline == null) {
        log.debug("No timeline found for datasource[%s]", dataSourceName);
        return Response.ok(Lists.<ImmutableSegmentLoadInfo>newArrayList()).build();
    }
    Iterable<TimelineObjectHolder<String, SegmentLoadInfo>> lookup = timeline.lookupWithIncompletePartitions(theInterval);
    FunctionalIterable<ImmutableSegmentLoadInfo> retval = FunctionalIterable.create(lookup).transformCat(new Function<TimelineObjectHolder<String, SegmentLoadInfo>, Iterable<ImmutableSegmentLoadInfo>>() {

        @Override
        public Iterable<ImmutableSegmentLoadInfo> apply(TimelineObjectHolder<String, SegmentLoadInfo> input) {
            return Iterables.transform(input.getObject(), new Function<PartitionChunk<SegmentLoadInfo>, ImmutableSegmentLoadInfo>() {

                @Override
                public ImmutableSegmentLoadInfo apply(PartitionChunk<SegmentLoadInfo> chunk) {
                    return chunk.getObject().toImmutableSegmentLoadInfo();
                }
            });
        }
    });
    return Response.ok(retval).build();
}
Also used : FunctionalIterable(io.druid.java.util.common.guava.FunctionalIterable) ImmutableSegmentLoadInfo(io.druid.client.ImmutableSegmentLoadInfo) SegmentLoadInfo(io.druid.client.SegmentLoadInfo) Function(com.google.common.base.Function) TimelineObjectHolder(io.druid.timeline.TimelineObjectHolder) TableDataSource(io.druid.query.TableDataSource) ImmutableSegmentLoadInfo(io.druid.client.ImmutableSegmentLoadInfo) PartitionChunk(io.druid.timeline.partition.PartitionChunk) Interval(org.joda.time.Interval) Path(javax.ws.rs.Path) ResourceFilters(com.sun.jersey.spi.container.ResourceFilters) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Example 17 with Interval

use of org.joda.time.Interval in project druid by druid-io.

the class DatasourcesResource method getSegmentDataSourceIntervals.

@GET
@Path("/{dataSourceName}/intervals")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Response getSegmentDataSourceIntervals(@PathParam("dataSourceName") String dataSourceName, @QueryParam("simple") String simple, @QueryParam("full") String full) {
    final DruidDataSource dataSource = getDataSource(dataSourceName);
    if (dataSource == null) {
        return Response.noContent().build();
    }
    final Comparator<Interval> comparator = Comparators.inverse(Comparators.intervalsByStartThenEnd());
    if (full != null) {
        final Map<Interval, Map<String, Object>> retVal = Maps.newTreeMap(comparator);
        for (DataSegment dataSegment : dataSource.getSegments()) {
            Map<String, Object> segments = retVal.get(dataSegment.getInterval());
            if (segments == null) {
                segments = Maps.newHashMap();
                retVal.put(dataSegment.getInterval(), segments);
            }
            Pair<DataSegment, Set<String>> val = getSegment(dataSegment.getIdentifier());
            segments.put(dataSegment.getIdentifier(), ImmutableMap.of("metadata", val.lhs, "servers", val.rhs));
        }
        return Response.ok(retVal).build();
    }
    if (simple != null) {
        final Map<Interval, Map<String, Object>> retVal = Maps.newTreeMap(comparator);
        for (DataSegment dataSegment : dataSource.getSegments()) {
            Map<String, Object> properties = retVal.get(dataSegment.getInterval());
            if (properties == null) {
                properties = Maps.newHashMap();
                properties.put("size", dataSegment.getSize());
                properties.put("count", 1);
                retVal.put(dataSegment.getInterval(), properties);
            } else {
                properties.put("size", MapUtils.getLong(properties, "size", 0L) + dataSegment.getSize());
                properties.put("count", MapUtils.getInt(properties, "count", 0) + 1);
            }
        }
        return Response.ok(retVal).build();
    }
    final Set<Interval> intervals = Sets.newTreeSet(comparator);
    for (DataSegment dataSegment : dataSource.getSegments()) {
        intervals.add(dataSegment.getInterval());
    }
    return Response.ok(intervals).build();
}
Also used : HashSet(java.util.HashSet) Set(java.util.Set) DruidDataSource(io.druid.client.DruidDataSource) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) DataSegment(io.druid.timeline.DataSegment) Interval(org.joda.time.Interval) Path(javax.ws.rs.Path) ResourceFilters(com.sun.jersey.spi.container.ResourceFilters) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Example 18 with Interval

use of org.joda.time.Interval in project druid by druid-io.

the class IntervalsResource method getSpecificIntervals.

@GET
@Path("/{interval}")
@Produces(MediaType.APPLICATION_JSON)
public Response getSpecificIntervals(@PathParam("interval") String interval, @QueryParam("simple") String simple, @QueryParam("full") String full, @Context final HttpServletRequest req) {
    final Interval theInterval = new Interval(interval.replace("_", "/"));
    final Set<DruidDataSource> datasources = authConfig.isEnabled() ? InventoryViewUtils.getSecuredDataSources(serverInventoryView, (AuthorizationInfo) req.getAttribute(AuthConfig.DRUID_AUTH_TOKEN)) : InventoryViewUtils.getDataSources(serverInventoryView);
    final Comparator<Interval> comparator = Comparators.inverse(Comparators.intervalsByStartThenEnd());
    if (full != null) {
        final Map<Interval, Map<String, Map<String, Object>>> retVal = Maps.newTreeMap(comparator);
        for (DruidDataSource dataSource : datasources) {
            for (DataSegment dataSegment : dataSource.getSegments()) {
                if (theInterval.contains(dataSegment.getInterval())) {
                    Map<String, Map<String, Object>> dataSourceInterval = retVal.get(dataSegment.getInterval());
                    if (dataSourceInterval == null) {
                        Map<String, Map<String, Object>> tmp = Maps.newHashMap();
                        retVal.put(dataSegment.getInterval(), tmp);
                    }
                    setProperties(retVal, dataSource, dataSegment);
                }
            }
        }
        return Response.ok(retVal).build();
    }
    if (simple != null) {
        final Map<Interval, Map<String, Object>> retVal = Maps.newHashMap();
        for (DruidDataSource dataSource : datasources) {
            for (DataSegment dataSegment : dataSource.getSegments()) {
                if (theInterval.contains(dataSegment.getInterval())) {
                    Map<String, Object> properties = retVal.get(dataSegment.getInterval());
                    if (properties == null) {
                        properties = Maps.newHashMap();
                        properties.put("size", dataSegment.getSize());
                        properties.put("count", 1);
                        retVal.put(dataSegment.getInterval(), properties);
                    } else {
                        properties.put("size", MapUtils.getLong(properties, "size", 0L) + dataSegment.getSize());
                        properties.put("count", MapUtils.getInt(properties, "count", 0) + 1);
                    }
                }
            }
        }
        return Response.ok(retVal).build();
    }
    final Map<String, Object> retVal = Maps.newHashMap();
    for (DruidDataSource dataSource : datasources) {
        for (DataSegment dataSegment : dataSource.getSegments()) {
            if (theInterval.contains(dataSegment.getInterval())) {
                retVal.put("size", MapUtils.getLong(retVal, "size", 0L) + dataSegment.getSize());
                retVal.put("count", MapUtils.getInt(retVal, "count", 0) + 1);
            }
        }
    }
    return Response.ok(retVal).build();
}
Also used : AuthorizationInfo(io.druid.server.security.AuthorizationInfo) DruidDataSource(io.druid.client.DruidDataSource) DataSegment(io.druid.timeline.DataSegment) Map(java.util.Map) Interval(org.joda.time.Interval) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Example 19 with Interval

use of org.joda.time.Interval in project druid by druid-io.

the class BrokerServerViewTest method testSingleServerAddedRemovedSegment.

@Test
public void testSingleServerAddedRemovedSegment() throws Exception {
    segmentViewInitLatch = new CountDownLatch(1);
    segmentAddedLatch = new CountDownLatch(1);
    segmentRemovedLatch = new CountDownLatch(1);
    setupViews();
    final DruidServer druidServer = new DruidServer("localhost:1234", "localhost:1234", 10000000L, "historical", "default_tier", 0);
    setupZNodeForServer(druidServer, zkPathsConfig, jsonMapper);
    final DataSegment segment = dataSegmentWithIntervalAndVersion("2014-10-20T00:00:00Z/P1D", "v1");
    announceSegmentForServer(druidServer, segment, zkPathsConfig, jsonMapper);
    Assert.assertTrue(timing.forWaiting().awaitLatch(segmentViewInitLatch));
    Assert.assertTrue(timing.forWaiting().awaitLatch(segmentAddedLatch));
    TimelineLookup timeline = brokerServerView.getTimeline(new TableDataSource("test_broker_server_view"));
    List<TimelineObjectHolder> serverLookupRes = (List<TimelineObjectHolder>) timeline.lookup(new Interval("2014-10-20T00:00:00Z/P1D"));
    Assert.assertEquals(1, serverLookupRes.size());
    TimelineObjectHolder<String, ServerSelector> actualTimelineObjectHolder = serverLookupRes.get(0);
    Assert.assertEquals(new Interval("2014-10-20T00:00:00Z/P1D"), actualTimelineObjectHolder.getInterval());
    Assert.assertEquals("v1", actualTimelineObjectHolder.getVersion());
    PartitionHolder<ServerSelector> actualPartitionHolder = actualTimelineObjectHolder.getObject();
    Assert.assertTrue(actualPartitionHolder.isComplete());
    Assert.assertEquals(1, Iterables.size(actualPartitionHolder));
    ServerSelector selector = ((SingleElementPartitionChunk<ServerSelector>) actualPartitionHolder.iterator().next()).getObject();
    Assert.assertFalse(selector.isEmpty());
    Assert.assertEquals(segment, selector.getSegment());
    Assert.assertEquals(druidServer, selector.pick().getServer());
    unannounceSegmentForServer(druidServer, segment, zkPathsConfig);
    Assert.assertTrue(timing.forWaiting().awaitLatch(segmentRemovedLatch));
    Assert.assertEquals(0, ((List<TimelineObjectHolder>) timeline.lookup(new Interval("2014-10-20T00:00:00Z/P1D"))).size());
    Assert.assertNull(timeline.findEntry(new Interval("2014-10-20T00:00:00Z/P1D"), "v1"));
}
Also used : CountDownLatch(java.util.concurrent.CountDownLatch) DataSegment(io.druid.timeline.DataSegment) ServerSelector(io.druid.client.selector.ServerSelector) TimelineObjectHolder(io.druid.timeline.TimelineObjectHolder) TableDataSource(io.druid.query.TableDataSource) ImmutableList(com.google.common.collect.ImmutableList) List(java.util.List) TimelineLookup(io.druid.timeline.TimelineLookup) SingleElementPartitionChunk(io.druid.timeline.partition.SingleElementPartitionChunk) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 20 with Interval

use of org.joda.time.Interval in project druid by druid-io.

the class CachingClusteredClientFunctionalityTest method testUncoveredInterval.

@Test
public void testUncoveredInterval() throws Exception {
    addToTimeline(new Interval("2015-01-02/2015-01-03"), "1");
    addToTimeline(new Interval("2015-01-04/2015-01-05"), "1");
    addToTimeline(new Interval("2015-02-04/2015-02-05"), "1");
    final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder().dataSource("test").intervals("2015-01-02/2015-01-03").granularity("day").aggregators(Arrays.<AggregatorFactory>asList(new CountAggregatorFactory("rows"))).context(ImmutableMap.<String, Object>of("uncoveredIntervalsLimit", 3));
    Map<String, Object> responseContext = new HashMap<>();
    client.run(builder.build(), responseContext);
    Assert.assertNull(responseContext.get("uncoveredIntervals"));
    builder.intervals("2015-01-01/2015-01-03");
    responseContext = new HashMap<>();
    client.run(builder.build(), responseContext);
    assertUncovered(responseContext, false, "2015-01-01/2015-01-02");
    builder.intervals("2015-01-01/2015-01-04");
    responseContext = new HashMap<>();
    client.run(builder.build(), responseContext);
    assertUncovered(responseContext, false, "2015-01-01/2015-01-02", "2015-01-03/2015-01-04");
    builder.intervals("2015-01-02/2015-01-04");
    responseContext = new HashMap<>();
    client.run(builder.build(), responseContext);
    assertUncovered(responseContext, false, "2015-01-03/2015-01-04");
    builder.intervals("2015-01-01/2015-01-30");
    responseContext = new HashMap<>();
    client.run(builder.build(), responseContext);
    assertUncovered(responseContext, false, "2015-01-01/2015-01-02", "2015-01-03/2015-01-04", "2015-01-05/2015-01-30");
    builder.intervals("2015-01-02/2015-01-30");
    responseContext = new HashMap<>();
    client.run(builder.build(), responseContext);
    assertUncovered(responseContext, false, "2015-01-03/2015-01-04", "2015-01-05/2015-01-30");
    builder.intervals("2015-01-04/2015-01-30");
    responseContext = new HashMap<>();
    client.run(builder.build(), responseContext);
    assertUncovered(responseContext, false, "2015-01-05/2015-01-30");
    builder.intervals("2015-01-10/2015-01-30");
    responseContext = new HashMap<>();
    client.run(builder.build(), responseContext);
    assertUncovered(responseContext, false, "2015-01-10/2015-01-30");
    builder.intervals("2015-01-01/2015-02-25");
    responseContext = new HashMap<>();
    client.run(builder.build(), responseContext);
    assertUncovered(responseContext, true, "2015-01-01/2015-01-02", "2015-01-03/2015-01-04", "2015-01-05/2015-02-04");
}
Also used : CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) HashMap(java.util.HashMap) Druids(io.druid.query.Druids) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) Interval(org.joda.time.Interval) Test(org.junit.Test)

Aggregations

Interval (org.joda.time.Interval)555 Test (org.junit.Test)340 DateTime (org.joda.time.DateTime)200 DataSegment (io.druid.timeline.DataSegment)138 ArrayList (java.util.ArrayList)59 Map (java.util.Map)54 LongSumAggregatorFactory (io.druid.query.aggregation.LongSumAggregatorFactory)49 QueryRunner (io.druid.query.QueryRunner)47 List (java.util.List)47 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)45 Result (io.druid.query.Result)44 File (java.io.File)42 FinalizeResultsQueryRunner (io.druid.query.FinalizeResultsQueryRunner)41 CountAggregatorFactory (io.druid.query.aggregation.CountAggregatorFactory)40 HashMap (java.util.HashMap)37 IOException (java.io.IOException)35 Period (org.joda.time.Period)30 Test (org.testng.annotations.Test)29 DruidServer (io.druid.client.DruidServer)27 Sequence (io.druid.java.util.common.guava.Sequence)26