use of org.apache.druid.timeline.DataSegment in project druid by druid-io.
the class SqlSegmentsMetadataManagerTest method testIterateAllUsedSegmentsAwaitsPollOnRestart.
@Test
public void testIterateAllUsedSegmentsAwaitsPollOnRestart() throws IOException {
DataSegment newSegment = pollThenStopThenStartIntro();
Assert.assertEquals(ImmutableSet.of(segment1, segment2, newSegment), ImmutableSet.copyOf(sqlSegmentsMetadataManager.iterateAllUsedSegments()));
}
use of org.apache.druid.timeline.DataSegment in project druid by druid-io.
the class SqlSegmentsMetadataManagerTest method testIterateAllUsedNonOvershadowedSegmentsForDatasourceInterval.
@Test
public void testIterateAllUsedNonOvershadowedSegmentsForDatasourceInterval() throws Exception {
final Interval theInterval = Intervals.of("2012-03-15T00:00:00.000/2012-03-20T00:00:00.000");
Optional<Iterable<DataSegment>> segments = sqlSegmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval("wikipedia", theInterval, true);
Assert.assertTrue(segments.isPresent());
Set<DataSegment> dataSegmentSet = ImmutableSet.copyOf(segments.get());
Assert.assertEquals(1, dataSegmentSet.size());
Assert.assertTrue(dataSegmentSet.contains(segment1));
final DataSegment newSegment2 = createSegment("wikipedia", "2012-03-16T00:00:00.000/2012-03-17T00:00:00.000", "2017-10-15T20:19:12.565Z", "index/y=2017/m=10/d=15/2017-10-16T20:19:12.565Z/0/index.zip", 0);
publisher.publishSegment(newSegment2);
// New segment is not returned since we call without force poll
segments = sqlSegmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval("wikipedia", theInterval, false);
Assert.assertTrue(segments.isPresent());
dataSegmentSet = ImmutableSet.copyOf(segments.get());
Assert.assertEquals(1, dataSegmentSet.size());
Assert.assertTrue(dataSegmentSet.contains(segment1));
// New segment is returned since we call with force poll
segments = sqlSegmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval("wikipedia", theInterval, true);
Assert.assertTrue(segments.isPresent());
dataSegmentSet = ImmutableSet.copyOf(segments.get());
Assert.assertEquals(2, dataSegmentSet.size());
Assert.assertTrue(dataSegmentSet.contains(segment1));
Assert.assertTrue(dataSegmentSet.contains(newSegment2));
}
use of org.apache.druid.timeline.DataSegment in project druid by druid-io.
the class SqlSegmentsMetadataManagerTest method testMarkAsUsedNonOvershadowedSegmentsInIntervalWithOverlappingInterval.
@Test
public void testMarkAsUsedNonOvershadowedSegmentsInIntervalWithOverlappingInterval() throws IOException {
sqlSegmentsMetadataManager.startPollingDatabasePeriodically();
sqlSegmentsMetadataManager.poll();
Assert.assertTrue(sqlSegmentsMetadataManager.isPollingDatabasePeriodically());
final String newDataSource = "wikipedia2";
final DataSegment newSegment1 = createSegment(newDataSource, "2017-10-15T00:00:00.000/2017-10-17T00:00:00.000", "2017-10-15T20:19:12.565Z", "index/y=2017/m=10/d=15/2017-10-16T20:19:12.565Z/0/index.zip", 0);
final DataSegment newSegment2 = createSegment(newDataSource, "2017-10-17T00:00:00.000/2017-10-18T00:00:00.000", "2017-10-16T20:19:12.565Z", "index/y=2017/m=10/d=15/2017-10-16T20:19:12.565Z/0/index.zip", 1);
final DataSegment newSegment3 = createSegment(newDataSource, "2017-10-19T00:00:00.000/2017-10-22T00:00:00.000", "2017-10-15T20:19:12.565Z", "index/y=2017/m=10/d=15/2017-10-16T20:19:12.565Z/0/index.zip", 0);
// Overshadowed by newSegment2
final DataSegment newSegment4 = createNewSegment2(newDataSource);
publish(newSegment1, false);
publish(newSegment2, false);
publish(newSegment3, false);
publish(newSegment4, false);
final Interval theInterval = Intervals.of("2017-10-16T00:00:00.000/2017-10-20T00:00:00.000");
sqlSegmentsMetadataManager.poll();
Assert.assertEquals(ImmutableSet.of(segment1, segment2), ImmutableSet.copyOf(sqlSegmentsMetadataManager.iterateAllUsedSegments()));
// 1 out of 3 segments match the interval, other 2 overlap, only the segment fully contained will be marked unused
Assert.assertEquals(1, sqlSegmentsMetadataManager.markAsUsedNonOvershadowedSegmentsInInterval(newDataSource, theInterval));
sqlSegmentsMetadataManager.poll();
Assert.assertEquals(ImmutableSet.of(segment1, segment2, newSegment2), ImmutableSet.copyOf(sqlSegmentsMetadataManager.iterateAllUsedSegments()));
}
use of org.apache.druid.timeline.DataSegment in project druid by druid-io.
the class ServersResource method getServerSegments.
@GET
@Path("/{serverName}/segments")
@Produces(MediaType.APPLICATION_JSON)
public Response getServerSegments(@PathParam("serverName") String serverName, @QueryParam("full") String full) {
Response.ResponseBuilder builder = Response.status(Response.Status.OK);
DruidServer server = serverInventoryView.getInventoryValue(serverName);
if (server == null) {
return Response.status(Response.Status.NOT_FOUND).build();
}
if (full != null) {
return builder.entity(server.iterateAllSegments()).build();
}
return builder.entity(Iterables.transform(server.iterateAllSegments(), DataSegment::getId)).build();
}
use of org.apache.druid.timeline.DataSegment in project druid by druid-io.
the class CompactSegmentsTest method setup.
@Before
public void setup() {
List<DataSegment> allSegments = new ArrayList<>();
for (int i = 0; i < 3; i++) {
final String dataSource = DATA_SOURCE_PREFIX + i;
for (int j : new int[] { 0, 1, 2, 3, 7, 8 }) {
for (int k = 0; k < PARTITION_PER_TIME_INTERVAL; k++) {
List<DataSegment> segmentForDatasource = datasourceToSegments.computeIfAbsent(dataSource, key -> new ArrayList<>());
DataSegment dataSegment = createSegment(dataSource, j, true, k);
allSegments.add(dataSegment);
segmentForDatasource.add(dataSegment);
dataSegment = createSegment(dataSource, j, false, k);
allSegments.add(dataSegment);
segmentForDatasource.add(dataSegment);
}
}
}
dataSources = DataSourcesSnapshot.fromUsedSegments(allSegments, ImmutableMap.of()).getUsedSegmentsTimelinesPerDataSource();
Mockito.when(COORDINATOR_CONFIG.getCompactionSkipLockedIntervals()).thenReturn(true);
}
Aggregations