Search in sources :

Example 1 with SegmentLoadInfo

use of org.apache.druid.client.SegmentLoadInfo in project druid by druid-io.

the class DataSourcesResource method computeSegmentLoadStatistics.

private SegmentsLoadStatistics computeSegmentLoadStatistics(Iterable<DataSegment> segments) {
    Map<SegmentId, SegmentLoadInfo> segmentLoadInfos = serverInventoryView.getSegmentLoadInfos();
    int numPublishedSegments = 0;
    int numUnavailableSegments = 0;
    int numLoadedSegments = 0;
    for (DataSegment segment : segments) {
        numPublishedSegments++;
        if (!segmentLoadInfos.containsKey(segment.getId())) {
            numUnavailableSegments++;
        } else {
            numLoadedSegments++;
        }
    }
    return new SegmentsLoadStatistics(numPublishedSegments, numUnavailableSegments, numLoadedSegments);
}
Also used : SegmentId(org.apache.druid.timeline.SegmentId) SegmentLoadInfo(org.apache.druid.client.SegmentLoadInfo) ImmutableSegmentLoadInfo(org.apache.druid.client.ImmutableSegmentLoadInfo) DataSegment(org.apache.druid.timeline.DataSegment)

Example 2 with SegmentLoadInfo

use of org.apache.druid.client.SegmentLoadInfo in project druid by druid-io.

the class DataSourcesResource method getServedSegmentsInInterval.

/**
 * Provides serverView for a datasource and Interval which gives details about servers hosting segments for an
 * interval. Used by the realtime tasks to fetch a view of the interval they are interested in.
 */
@GET
@Path("/{dataSourceName}/intervals/{interval}/serverview")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Response getServedSegmentsInInterval(@PathParam("dataSourceName") String dataSourceName, @PathParam("interval") String interval, @QueryParam("partial") final boolean partial) {
    TimelineLookup<String, SegmentLoadInfo> timeline = serverInventoryView.getTimeline(new TableDataSource(dataSourceName));
    final Interval theInterval = Intervals.of(interval.replace('_', '/'));
    if (timeline == null) {
        log.debug("No timeline found for datasource[%s]", dataSourceName);
        return Response.ok(new ArrayList<ImmutableSegmentLoadInfo>()).build();
    }
    return Response.ok(prepareServedSegmentsInInterval(timeline, theInterval)).build();
}
Also used : TableDataSource(org.apache.druid.query.TableDataSource) SegmentLoadInfo(org.apache.druid.client.SegmentLoadInfo) ImmutableSegmentLoadInfo(org.apache.druid.client.ImmutableSegmentLoadInfo) ArrayList(java.util.ArrayList) Interval(org.joda.time.Interval) Path(javax.ws.rs.Path) ResourceFilters(com.sun.jersey.spi.container.ResourceFilters) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Example 3 with SegmentLoadInfo

use of org.apache.druid.client.SegmentLoadInfo in project druid by druid-io.

the class DataSourcesResourceTest method testGetDatasourceLoadstatusDefault.

@Test
public void testGetDatasourceLoadstatusDefault() {
    DataSegment datasource1Segment1 = new DataSegment("datasource1", Intervals.of("2010-01-01/P1D"), "", null, null, null, null, 0x9, 10);
    DataSegment datasource1Segment2 = new DataSegment("datasource1", Intervals.of("2010-01-22/P1D"), "", null, null, null, null, 0x9, 20);
    DataSegment datasource2Segment1 = new DataSegment("datasource2", Intervals.of("2010-01-01/P1D"), "", null, null, null, null, 0x9, 30);
    List<DataSegment> segments = ImmutableList.of(datasource1Segment1, datasource1Segment2);
    Map<SegmentId, SegmentLoadInfo> completedLoadInfoMap = ImmutableMap.of(datasource1Segment1.getId(), new SegmentLoadInfo(datasource1Segment1), datasource1Segment2.getId(), new SegmentLoadInfo(datasource1Segment2), datasource2Segment1.getId(), new SegmentLoadInfo(datasource2Segment1));
    Map<SegmentId, SegmentLoadInfo> halfLoadedInfoMap = ImmutableMap.of(datasource1Segment1.getId(), new SegmentLoadInfo(datasource1Segment1));
    // Test when datasource fully loaded
    EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq("datasource1"), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())).andReturn(Optional.of(segments)).once();
    EasyMock.expect(inventoryView.getSegmentLoadInfos()).andReturn(completedLoadInfoMap).once();
    EasyMock.replay(segmentsMetadataManager, inventoryView);
    DataSourcesResource dataSourcesResource = new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null);
    Response response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, null, null, null);
    Assert.assertEquals(200, response.getStatus());
    Assert.assertNotNull(response.getEntity());
    Assert.assertEquals(1, ((Map) response.getEntity()).size());
    Assert.assertTrue(((Map) response.getEntity()).containsKey("datasource1"));
    Assert.assertEquals(100.0, ((Map) response.getEntity()).get("datasource1"));
    EasyMock.verify(segmentsMetadataManager, inventoryView);
    EasyMock.reset(segmentsMetadataManager, inventoryView);
    // Test when datasource half loaded
    EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq("datasource1"), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())).andReturn(Optional.of(segments)).once();
    EasyMock.expect(inventoryView.getSegmentLoadInfos()).andReturn(halfLoadedInfoMap).once();
    EasyMock.replay(segmentsMetadataManager, inventoryView);
    dataSourcesResource = new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null);
    response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, null, null, null);
    Assert.assertEquals(200, response.getStatus());
    Assert.assertNotNull(response.getEntity());
    Assert.assertEquals(1, ((Map) response.getEntity()).size());
    Assert.assertTrue(((Map) response.getEntity()).containsKey("datasource1"));
    Assert.assertEquals(50.0, ((Map) response.getEntity()).get("datasource1"));
    EasyMock.verify(segmentsMetadataManager, inventoryView);
}
Also used : Response(javax.ws.rs.core.Response) SegmentId(org.apache.druid.timeline.SegmentId) SegmentLoadInfo(org.apache.druid.client.SegmentLoadInfo) ImmutableSegmentLoadInfo(org.apache.druid.client.ImmutableSegmentLoadInfo) DataSegment(org.apache.druid.timeline.DataSegment) Test(org.junit.Test)

Example 4 with SegmentLoadInfo

use of org.apache.druid.client.SegmentLoadInfo in project druid by druid-io.

the class DataSourcesResourceTest method testGetDatasourceLoadstatusSimple.

@Test
public void testGetDatasourceLoadstatusSimple() {
    DataSegment datasource1Segment1 = new DataSegment("datasource1", Intervals.of("2010-01-01/P1D"), "", null, null, null, null, 0x9, 10);
    DataSegment datasource1Segment2 = new DataSegment("datasource1", Intervals.of("2010-01-22/P1D"), "", null, null, null, null, 0x9, 20);
    DataSegment datasource2Segment1 = new DataSegment("datasource2", Intervals.of("2010-01-01/P1D"), "", null, null, null, null, 0x9, 30);
    List<DataSegment> segments = ImmutableList.of(datasource1Segment1, datasource1Segment2);
    Map<SegmentId, SegmentLoadInfo> completedLoadInfoMap = ImmutableMap.of(datasource1Segment1.getId(), new SegmentLoadInfo(datasource1Segment1), datasource1Segment2.getId(), new SegmentLoadInfo(datasource1Segment2), datasource2Segment1.getId(), new SegmentLoadInfo(datasource2Segment1));
    Map<SegmentId, SegmentLoadInfo> halfLoadedInfoMap = ImmutableMap.of(datasource1Segment1.getId(), new SegmentLoadInfo(datasource1Segment1));
    // Test when datasource fully loaded
    EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq("datasource1"), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())).andReturn(Optional.of(segments)).once();
    EasyMock.expect(inventoryView.getSegmentLoadInfos()).andReturn(completedLoadInfoMap).once();
    EasyMock.replay(segmentsMetadataManager, inventoryView);
    DataSourcesResource dataSourcesResource = new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null);
    Response response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, "simple", null, null);
    Assert.assertEquals(200, response.getStatus());
    Assert.assertNotNull(response.getEntity());
    Assert.assertEquals(1, ((Map) response.getEntity()).size());
    Assert.assertTrue(((Map) response.getEntity()).containsKey("datasource1"));
    Assert.assertEquals(0, ((Map) response.getEntity()).get("datasource1"));
    EasyMock.verify(segmentsMetadataManager, inventoryView);
    EasyMock.reset(segmentsMetadataManager, inventoryView);
    // Test when datasource half loaded
    EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq("datasource1"), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())).andReturn(Optional.of(segments)).once();
    EasyMock.expect(inventoryView.getSegmentLoadInfos()).andReturn(halfLoadedInfoMap).once();
    EasyMock.replay(segmentsMetadataManager, inventoryView);
    dataSourcesResource = new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null);
    response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, "simple", null, null);
    Assert.assertEquals(200, response.getStatus());
    Assert.assertNotNull(response.getEntity());
    Assert.assertEquals(1, ((Map) response.getEntity()).size());
    Assert.assertTrue(((Map) response.getEntity()).containsKey("datasource1"));
    Assert.assertEquals(1, ((Map) response.getEntity()).get("datasource1"));
    EasyMock.verify(segmentsMetadataManager, inventoryView);
}
Also used : Response(javax.ws.rs.core.Response) SegmentId(org.apache.druid.timeline.SegmentId) SegmentLoadInfo(org.apache.druid.client.SegmentLoadInfo) ImmutableSegmentLoadInfo(org.apache.druid.client.ImmutableSegmentLoadInfo) DataSegment(org.apache.druid.timeline.DataSegment) Test(org.junit.Test)

Example 5 with SegmentLoadInfo

use of org.apache.druid.client.SegmentLoadInfo in project druid by druid-io.

the class DataSourcesResource method isHandOffComplete.

/**
 * Used by the realtime tasks to learn whether a segment is handed off or not.
 * It returns true when the segment will never be handed off or is already handed off. Otherwise, it returns false.
 */
@GET
@Path("/{dataSourceName}/handoffComplete")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Response isHandOffComplete(@PathParam("dataSourceName") String dataSourceName, @QueryParam("interval") final String interval, @QueryParam("partitionNumber") final int partitionNumber, @QueryParam("version") final String version) {
    try {
        final List<Rule> rules = metadataRuleManager.getRulesWithDefault(dataSourceName);
        final Interval theInterval = Intervals.of(interval);
        final SegmentDescriptor descriptor = new SegmentDescriptor(theInterval, version, partitionNumber);
        final DateTime now = DateTimes.nowUtc();
        // dropped means a segment will never be handed off, i.e it completed hand off
        // init to true, reset to false only if this segment can be loaded by rules
        boolean dropped = true;
        for (Rule rule : rules) {
            if (rule.appliesTo(theInterval, now)) {
                if (rule instanceof LoadRule) {
                    dropped = false;
                }
                break;
            }
        }
        if (dropped) {
            return Response.ok(true).build();
        }
        TimelineLookup<String, SegmentLoadInfo> timeline = serverInventoryView.getTimeline(new TableDataSource(dataSourceName));
        if (timeline == null) {
            log.debug("No timeline found for datasource[%s]", dataSourceName);
            return Response.ok(false).build();
        }
        Iterable<ImmutableSegmentLoadInfo> servedSegmentsInInterval = prepareServedSegmentsInInterval(timeline, theInterval);
        if (isSegmentLoaded(servedSegmentsInInterval, descriptor)) {
            return Response.ok(true).build();
        }
        return Response.ok(false).build();
    } catch (Exception e) {
        log.error(e, "Error while handling hand off check request");
        return Response.serverError().entity(ImmutableMap.of("error", e.toString())).build();
    }
}
Also used : SegmentLoadInfo(org.apache.druid.client.SegmentLoadInfo) ImmutableSegmentLoadInfo(org.apache.druid.client.ImmutableSegmentLoadInfo) DateTime(org.joda.time.DateTime) UnknownSegmentIdsException(org.apache.druid.metadata.UnknownSegmentIdsException) TableDataSource(org.apache.druid.query.TableDataSource) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) ImmutableSegmentLoadInfo(org.apache.druid.client.ImmutableSegmentLoadInfo) LoadRule(org.apache.druid.server.coordinator.rules.LoadRule) Rule(org.apache.druid.server.coordinator.rules.Rule) LoadRule(org.apache.druid.server.coordinator.rules.LoadRule) Interval(org.joda.time.Interval) Path(javax.ws.rs.Path) ResourceFilters(com.sun.jersey.spi.container.ResourceFilters) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Aggregations

ImmutableSegmentLoadInfo (org.apache.druid.client.ImmutableSegmentLoadInfo)6 SegmentLoadInfo (org.apache.druid.client.SegmentLoadInfo)6 Response (javax.ws.rs.core.Response)3 TableDataSource (org.apache.druid.query.TableDataSource)3 DataSegment (org.apache.druid.timeline.DataSegment)3 SegmentId (org.apache.druid.timeline.SegmentId)3 Interval (org.joda.time.Interval)3 Test (org.junit.Test)3 ResourceFilters (com.sun.jersey.spi.container.ResourceFilters)2 ArrayList (java.util.ArrayList)2 GET (javax.ws.rs.GET)2 Path (javax.ws.rs.Path)2 Produces (javax.ws.rs.Produces)2 Rule (org.apache.druid.server.coordinator.rules.Rule)2 MetadataRuleManager (org.apache.druid.metadata.MetadataRuleManager)1 UnknownSegmentIdsException (org.apache.druid.metadata.UnknownSegmentIdsException)1 SegmentDescriptor (org.apache.druid.query.SegmentDescriptor)1 IntervalDropRule (org.apache.druid.server.coordinator.rules.IntervalDropRule)1 IntervalLoadRule (org.apache.druid.server.coordinator.rules.IntervalLoadRule)1 LoadRule (org.apache.druid.server.coordinator.rules.LoadRule)1