Search in sources :

Example 6 with ImmutableSegmentLoadInfo

use of org.apache.druid.client.ImmutableSegmentLoadInfo in project druid by druid-io.

the class ImmutableSegmentLoadInfoTest method testSerde.

@Test
public void testSerde() throws IOException {
    ImmutableSegmentLoadInfo segmentLoadInfo = new ImmutableSegmentLoadInfo(new DataSegment("test_ds", Intervals.of("2011-04-01/2011-04-02"), "v1", null, null, null, NoneShardSpec.instance(), 0, 0), Sets.newHashSet(new DruidServerMetadata("a", "host", null, 10, ServerType.HISTORICAL, "tier", 1)));
    ImmutableSegmentLoadInfo serde = mapper.readValue(mapper.writeValueAsBytes(segmentLoadInfo), ImmutableSegmentLoadInfo.class);
    Assert.assertEquals(segmentLoadInfo, serde);
}
Also used : ImmutableSegmentLoadInfo(org.apache.druid.client.ImmutableSegmentLoadInfo) DruidServerMetadata(org.apache.druid.server.coordination.DruidServerMetadata) DataSegment(org.apache.druid.timeline.DataSegment) Test(org.junit.Test)

Example 7 with ImmutableSegmentLoadInfo

use of org.apache.druid.client.ImmutableSegmentLoadInfo in project druid by druid-io.

the class CoordinatorBasedSegmentHandoffNotifier method checkForSegmentHandoffs.

void checkForSegmentHandoffs() {
    try {
        Iterator<Map.Entry<SegmentDescriptor, Pair<Executor, Runnable>>> itr = handOffCallbacks.entrySet().iterator();
        while (itr.hasNext()) {
            Map.Entry<SegmentDescriptor, Pair<Executor, Runnable>> entry = itr.next();
            SegmentDescriptor descriptor = entry.getKey();
            try {
                Boolean handOffComplete = coordinatorClient.isHandOffComplete(dataSource, descriptor);
                if (handOffComplete == null) {
                    log.warn("Failed to call the new coordinator API for checking segment handoff. Falling back to the old API");
                    final List<ImmutableSegmentLoadInfo> loadedSegments = coordinatorClient.fetchServerView(dataSource, descriptor.getInterval(), true);
                    handOffComplete = isHandOffComplete(loadedSegments, descriptor);
                }
                if (handOffComplete) {
                    log.debug("Segment Handoff complete for dataSource[%s] Segment[%s]", dataSource, descriptor);
                    entry.getValue().lhs.execute(entry.getValue().rhs);
                    itr.remove();
                }
            } catch (Exception e) {
                log.error(e, "Exception while checking handoff for dataSource[%s] Segment[%s], Will try again after [%d]secs", dataSource, descriptor, pollDurationMillis);
            }
        }
        if (!handOffCallbacks.isEmpty()) {
            log.warn("Still waiting for Handoff for [%d] Segments", handOffCallbacks.size());
        }
    } catch (Throwable t) {
        log.error(t, "Exception while checking handoff for dataSource[%s], Will try again after [%d]secs", dataSource, pollDurationMillis);
    }
}
Also used : Executor(java.util.concurrent.Executor) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) ImmutableSegmentLoadInfo(org.apache.druid.client.ImmutableSegmentLoadInfo) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) ConcurrentMap(java.util.concurrent.ConcurrentMap) Map(java.util.Map) Pair(org.apache.druid.java.util.common.Pair)

Example 8 with ImmutableSegmentLoadInfo

use of org.apache.druid.client.ImmutableSegmentLoadInfo in project druid by druid-io.

the class DataSourcesResource method isHandOffComplete.

/**
 * Used by the realtime tasks to learn whether a segment is handed off or not.
 * It returns true when the segment will never be handed off or is already handed off. Otherwise, it returns false.
 */
@GET
@Path("/{dataSourceName}/handoffComplete")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Response isHandOffComplete(@PathParam("dataSourceName") String dataSourceName, @QueryParam("interval") final String interval, @QueryParam("partitionNumber") final int partitionNumber, @QueryParam("version") final String version) {
    try {
        final List<Rule> rules = metadataRuleManager.getRulesWithDefault(dataSourceName);
        final Interval theInterval = Intervals.of(interval);
        final SegmentDescriptor descriptor = new SegmentDescriptor(theInterval, version, partitionNumber);
        final DateTime now = DateTimes.nowUtc();
        // dropped means a segment will never be handed off, i.e it completed hand off
        // init to true, reset to false only if this segment can be loaded by rules
        boolean dropped = true;
        for (Rule rule : rules) {
            if (rule.appliesTo(theInterval, now)) {
                if (rule instanceof LoadRule) {
                    dropped = false;
                }
                break;
            }
        }
        if (dropped) {
            return Response.ok(true).build();
        }
        TimelineLookup<String, SegmentLoadInfo> timeline = serverInventoryView.getTimeline(new TableDataSource(dataSourceName));
        if (timeline == null) {
            log.debug("No timeline found for datasource[%s]", dataSourceName);
            return Response.ok(false).build();
        }
        Iterable<ImmutableSegmentLoadInfo> servedSegmentsInInterval = prepareServedSegmentsInInterval(timeline, theInterval);
        if (isSegmentLoaded(servedSegmentsInInterval, descriptor)) {
            return Response.ok(true).build();
        }
        return Response.ok(false).build();
    } catch (Exception e) {
        log.error(e, "Error while handling hand off check request");
        return Response.serverError().entity(ImmutableMap.of("error", e.toString())).build();
    }
}
Also used : SegmentLoadInfo(org.apache.druid.client.SegmentLoadInfo) ImmutableSegmentLoadInfo(org.apache.druid.client.ImmutableSegmentLoadInfo) DateTime(org.joda.time.DateTime) UnknownSegmentIdsException(org.apache.druid.metadata.UnknownSegmentIdsException) TableDataSource(org.apache.druid.query.TableDataSource) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) ImmutableSegmentLoadInfo(org.apache.druid.client.ImmutableSegmentLoadInfo) LoadRule(org.apache.druid.server.coordinator.rules.LoadRule) Rule(org.apache.druid.server.coordinator.rules.Rule) LoadRule(org.apache.druid.server.coordinator.rules.LoadRule) Interval(org.joda.time.Interval) Path(javax.ws.rs.Path) ResourceFilters(com.sun.jersey.spi.container.ResourceFilters) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Example 9 with ImmutableSegmentLoadInfo

use of org.apache.druid.client.ImmutableSegmentLoadInfo in project druid by druid-io.

the class CoordinatorBasedSegmentHandoffNotifierTest method testHandoffChecksForPartitionNumber.

@Test
public void testHandoffChecksForPartitionNumber() {
    Interval interval = Intervals.of("2011-04-01/2011-04-02");
    Assert.assertTrue(CoordinatorBasedSegmentHandoffNotifier.isHandOffComplete(Collections.singletonList(new ImmutableSegmentLoadInfo(createSegment(interval, "v1", 1), Sets.newHashSet(createHistoricalServerMetadata("a")))), new SegmentDescriptor(interval, "v1", 1)));
    Assert.assertFalse(CoordinatorBasedSegmentHandoffNotifier.isHandOffComplete(Collections.singletonList(new ImmutableSegmentLoadInfo(createSegment(interval, "v1", 1), Sets.newHashSet(createHistoricalServerMetadata("a")))), new SegmentDescriptor(interval, "v1", 2)));
}
Also used : SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) ImmutableSegmentLoadInfo(org.apache.druid.client.ImmutableSegmentLoadInfo) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 10 with ImmutableSegmentLoadInfo

use of org.apache.druid.client.ImmutableSegmentLoadInfo in project druid by druid-io.

the class DataSourcesResourceTest method testSegmentLoadChecksForInterval.

@Test
public void testSegmentLoadChecksForInterval() {
    Assert.assertFalse(DataSourcesResource.isSegmentLoaded(Collections.singletonList(new ImmutableSegmentLoadInfo(createSegment(Intervals.of("2011-04-01/2011-04-02"), "v1", 1), Sets.newHashSet(createHistoricalServerMetadata("a")))), new SegmentDescriptor(Intervals.of("2011-04-01/2011-04-03"), "v1", 1)));
    Assert.assertTrue(DataSourcesResource.isSegmentLoaded(Collections.singletonList(new ImmutableSegmentLoadInfo(createSegment(Intervals.of("2011-04-01/2011-04-04"), "v1", 1), Sets.newHashSet(createHistoricalServerMetadata("a")))), new SegmentDescriptor(Intervals.of("2011-04-02/2011-04-03"), "v1", 1)));
}
Also used : SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) ImmutableSegmentLoadInfo(org.apache.druid.client.ImmutableSegmentLoadInfo) Test(org.junit.Test)

Aggregations

ImmutableSegmentLoadInfo (org.apache.druid.client.ImmutableSegmentLoadInfo)12 SegmentDescriptor (org.apache.druid.query.SegmentDescriptor)10 Test (org.junit.Test)9 Interval (org.joda.time.Interval)8 ResourceFilters (com.sun.jersey.spi.container.ResourceFilters)2 GET (javax.ws.rs.GET)2 Path (javax.ws.rs.Path)2 Produces (javax.ws.rs.Produces)2 SegmentLoadInfo (org.apache.druid.client.SegmentLoadInfo)2 TableDataSource (org.apache.druid.query.TableDataSource)2 ArrayList (java.util.ArrayList)1 Map (java.util.Map)1 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)1 ConcurrentMap (java.util.concurrent.ConcurrentMap)1 Executor (java.util.concurrent.Executor)1 Pair (org.apache.druid.java.util.common.Pair)1 UnknownSegmentIdsException (org.apache.druid.metadata.UnknownSegmentIdsException)1 DruidServerMetadata (org.apache.druid.server.coordination.DruidServerMetadata)1 LoadRule (org.apache.druid.server.coordinator.rules.LoadRule)1 Rule (org.apache.druid.server.coordinator.rules.Rule)1