Search in sources :

Example 6 with ImmutableSegmentLoadInfo

use of io.druid.client.ImmutableSegmentLoadInfo in project druid by druid-io.

the class DatasourcesResource method getSegmentDataSourceSpecificInterval.

/**
   * Provides serverView for a datasource and Interval which gives details about servers hosting segments for an interval
   * Used by the realtime tasks to fetch a view of the interval they are interested in.
   */
@GET
@Path("/{dataSourceName}/intervals/{interval}/serverview")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Response getSegmentDataSourceSpecificInterval(@PathParam("dataSourceName") String dataSourceName, @PathParam("interval") String interval, @QueryParam("partial") final boolean partial) {
    TimelineLookup<String, SegmentLoadInfo> timeline = serverInventoryView.getTimeline(new TableDataSource(dataSourceName));
    final Interval theInterval = new Interval(interval.replace("_", "/"));
    if (timeline == null) {
        log.debug("No timeline found for datasource[%s]", dataSourceName);
        return Response.ok(Lists.<ImmutableSegmentLoadInfo>newArrayList()).build();
    }
    Iterable<TimelineObjectHolder<String, SegmentLoadInfo>> lookup = timeline.lookupWithIncompletePartitions(theInterval);
    FunctionalIterable<ImmutableSegmentLoadInfo> retval = FunctionalIterable.create(lookup).transformCat(new Function<TimelineObjectHolder<String, SegmentLoadInfo>, Iterable<ImmutableSegmentLoadInfo>>() {

        @Override
        public Iterable<ImmutableSegmentLoadInfo> apply(TimelineObjectHolder<String, SegmentLoadInfo> input) {
            return Iterables.transform(input.getObject(), new Function<PartitionChunk<SegmentLoadInfo>, ImmutableSegmentLoadInfo>() {

                @Override
                public ImmutableSegmentLoadInfo apply(PartitionChunk<SegmentLoadInfo> chunk) {
                    return chunk.getObject().toImmutableSegmentLoadInfo();
                }
            });
        }
    });
    return Response.ok(retval).build();
}
Also used : FunctionalIterable(io.druid.java.util.common.guava.FunctionalIterable) ImmutableSegmentLoadInfo(io.druid.client.ImmutableSegmentLoadInfo) SegmentLoadInfo(io.druid.client.SegmentLoadInfo) Function(com.google.common.base.Function) TimelineObjectHolder(io.druid.timeline.TimelineObjectHolder) TableDataSource(io.druid.query.TableDataSource) ImmutableSegmentLoadInfo(io.druid.client.ImmutableSegmentLoadInfo) PartitionChunk(io.druid.timeline.partition.PartitionChunk) Interval(org.joda.time.Interval) Path(javax.ws.rs.Path) ResourceFilters(com.sun.jersey.spi.container.ResourceFilters) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Example 7 with ImmutableSegmentLoadInfo

use of io.druid.client.ImmutableSegmentLoadInfo in project druid by druid-io.

the class ImmutableSegmentLoadInfoTest method testSerde.

@Test
public void testSerde() throws IOException {
    ImmutableSegmentLoadInfo segmentLoadInfo = new ImmutableSegmentLoadInfo(new DataSegment("test_ds", new Interval("2011-04-01/2011-04-02"), "v1", null, null, null, NoneShardSpec.instance(), 0, 0), Sets.newHashSet(new DruidServerMetadata("a", "host", 10, "type", "tier", 1)));
    ImmutableSegmentLoadInfo serde = mapper.readValue(mapper.writeValueAsBytes(segmentLoadInfo), ImmutableSegmentLoadInfo.class);
    Assert.assertEquals(segmentLoadInfo, serde);
}
Also used : ImmutableSegmentLoadInfo(io.druid.client.ImmutableSegmentLoadInfo) DruidServerMetadata(io.druid.server.coordination.DruidServerMetadata) DataSegment(io.druid.timeline.DataSegment) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 8 with ImmutableSegmentLoadInfo

use of io.druid.client.ImmutableSegmentLoadInfo in project druid by druid-io.

the class CoordinatorBasedSegmentHandoffNotifierTest method testHandoffChecksForAssignableServer.

@Test
public void testHandoffChecksForAssignableServer() {
    Interval interval = new Interval("2011-04-01/2011-04-02");
    Assert.assertTrue(CoordinatorBasedSegmentHandoffNotifier.isHandOffComplete(Lists.newArrayList(new ImmutableSegmentLoadInfo(createSegment(interval, "v1", 2), Sets.newHashSet(createHistoricalServerMetadata("a")))), new SegmentDescriptor(interval, "v1", 2)));
    Assert.assertFalse(CoordinatorBasedSegmentHandoffNotifier.isHandOffComplete(Lists.newArrayList(new ImmutableSegmentLoadInfo(createSegment(interval, "v1", 2), Sets.newHashSet(createRealtimeServerMetadata("a")))), new SegmentDescriptor(interval, "v1", 2)));
}
Also used : SegmentDescriptor(io.druid.query.SegmentDescriptor) ImmutableSegmentLoadInfo(io.druid.client.ImmutableSegmentLoadInfo) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 9 with ImmutableSegmentLoadInfo

use of io.druid.client.ImmutableSegmentLoadInfo in project druid by druid-io.

the class CoordinatorBasedSegmentHandoffNotifier method checkForSegmentHandoffs.

void checkForSegmentHandoffs() {
    try {
        Iterator<Map.Entry<SegmentDescriptor, Pair<Executor, Runnable>>> itr = handOffCallbacks.entrySet().iterator();
        while (itr.hasNext()) {
            Map.Entry<SegmentDescriptor, Pair<Executor, Runnable>> entry = itr.next();
            SegmentDescriptor descriptor = entry.getKey();
            try {
                List<ImmutableSegmentLoadInfo> loadedSegments = coordinatorClient.fetchServerView(dataSource, descriptor.getInterval(), true);
                if (isHandOffComplete(loadedSegments, entry.getKey())) {
                    log.info("Segment Handoff complete for dataSource[%s] Segment[%s]", dataSource, descriptor);
                    entry.getValue().lhs.execute(entry.getValue().rhs);
                    itr.remove();
                }
            } catch (Exception e) {
                log.error(e, "Exception while checking handoff for dataSource[%s] Segment[%s], Will try again after [%d]secs", dataSource, descriptor, pollDurationMillis);
            }
        }
        if (!handOffCallbacks.isEmpty()) {
            log.info("Still waiting for Handoff for Segments : [%s]", handOffCallbacks.keySet());
        }
    } catch (Throwable t) {
        log.error(t, "Exception while checking handoff for dataSource[%s] Segment[%s], Will try again after [%d]secs", dataSource, pollDurationMillis);
    }
}
Also used : Executor(java.util.concurrent.Executor) SegmentDescriptor(io.druid.query.SegmentDescriptor) ImmutableSegmentLoadInfo(io.druid.client.ImmutableSegmentLoadInfo) ConcurrentMap(java.util.concurrent.ConcurrentMap) Map(java.util.Map) Pair(io.druid.java.util.common.Pair)

Aggregations

ImmutableSegmentLoadInfo (io.druid.client.ImmutableSegmentLoadInfo)9 Interval (org.joda.time.Interval)8 SegmentDescriptor (io.druid.query.SegmentDescriptor)7 Test (org.junit.Test)7 DataSegment (io.druid.timeline.DataSegment)3 CoordinatorClient (io.druid.client.coordinator.CoordinatorClient)2 NumberedShardSpec (io.druid.timeline.partition.NumberedShardSpec)2 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)2 Function (com.google.common.base.Function)1 ResourceFilters (com.sun.jersey.spi.container.ResourceFilters)1 SegmentLoadInfo (io.druid.client.SegmentLoadInfo)1 Pair (io.druid.java.util.common.Pair)1 FunctionalIterable (io.druid.java.util.common.guava.FunctionalIterable)1 TableDataSource (io.druid.query.TableDataSource)1 DruidServerMetadata (io.druid.server.coordination.DruidServerMetadata)1 TimelineObjectHolder (io.druid.timeline.TimelineObjectHolder)1 PartitionChunk (io.druid.timeline.partition.PartitionChunk)1 Map (java.util.Map)1 ConcurrentMap (java.util.concurrent.ConcurrentMap)1 Executor (java.util.concurrent.Executor)1