use of org.apache.druid.timeline.DataSegment in project druid by druid-io.
the class SqlSegmentsMetadataManagerTest method pollThenStopThenStartIntro.
private DataSegment pollThenStopThenStartIntro() throws IOException {
sqlSegmentsMetadataManager.startPollingDatabasePeriodically();
sqlSegmentsMetadataManager.poll();
sqlSegmentsMetadataManager.stopPollingDatabasePeriodically();
Assert.assertFalse(sqlSegmentsMetadataManager.isPollingDatabasePeriodically());
Assert.assertEquals(ImmutableSet.of("wikipedia"), sqlSegmentsMetadataManager.retrieveAllDataSourceNames());
DataSegment newSegment = createNewSegment1("wikipedia2");
publisher.publishSegment(newSegment);
sqlSegmentsMetadataManager.startPollingDatabasePeriodically();
return newSegment;
}
use of org.apache.druid.timeline.DataSegment in project druid by druid-io.
the class SqlSegmentsMetadataManagerTest method testPollPeriodicallyAndOnDemandInterleave.
@Test(timeout = 60_000)
public void testPollPeriodicallyAndOnDemandInterleave() throws Exception {
DataSourcesSnapshot dataSourcesSnapshot = sqlSegmentsMetadataManager.getDataSourcesSnapshot();
Assert.assertNull(dataSourcesSnapshot);
sqlSegmentsMetadataManager.startPollingDatabasePeriodically();
Assert.assertTrue(sqlSegmentsMetadataManager.isPollingDatabasePeriodically());
// This call make sure that the first poll is completed
sqlSegmentsMetadataManager.useLatestSnapshotIfWithinDelay();
Assert.assertTrue(sqlSegmentsMetadataManager.getLatestDatabasePoll() instanceof SqlSegmentsMetadataManager.PeriodicDatabasePoll);
dataSourcesSnapshot = sqlSegmentsMetadataManager.getDataSourcesSnapshot();
Assert.assertEquals(ImmutableList.of("wikipedia"), dataSourcesSnapshot.getDataSourcesWithAllUsedSegments().stream().map(ImmutableDruidDataSource::getName).collect(Collectors.toList()));
final String newDataSource2 = "wikipedia2";
final DataSegment newSegment2 = createNewSegment1(newDataSource2);
publisher.publishSegment(newSegment2);
// This call will force on demand poll
sqlSegmentsMetadataManager.forceOrWaitOngoingDatabasePoll();
Assert.assertTrue(sqlSegmentsMetadataManager.isPollingDatabasePeriodically());
Assert.assertTrue(sqlSegmentsMetadataManager.getLatestDatabasePoll() instanceof SqlSegmentsMetadataManager.OnDemandDatabasePoll);
// New datasource should now be in the snapshot since we just force on demand poll.
dataSourcesSnapshot = sqlSegmentsMetadataManager.getDataSourcesSnapshot();
Assert.assertEquals(ImmutableList.of("wikipedia2", "wikipedia"), dataSourcesSnapshot.getDataSourcesWithAllUsedSegments().stream().map(ImmutableDruidDataSource::getName).collect(Collectors.toList()));
final String newDataSource3 = "wikipedia3";
final DataSegment newSegment3 = createNewSegment1(newDataSource3);
publisher.publishSegment(newSegment3);
// This time wait for periodic poll (not doing on demand poll so we have to wait a bit...)
while (sqlSegmentsMetadataManager.getDataSourcesSnapshot().getDataSource(newDataSource3) == null) {
Thread.sleep(1000);
}
Assert.assertTrue(sqlSegmentsMetadataManager.isPollingDatabasePeriodically());
Assert.assertTrue(sqlSegmentsMetadataManager.getLatestDatabasePoll() instanceof SqlSegmentsMetadataManager.PeriodicDatabasePoll);
dataSourcesSnapshot = sqlSegmentsMetadataManager.getDataSourcesSnapshot();
Assert.assertEquals(ImmutableList.of("wikipedia2", "wikipedia3", "wikipedia"), dataSourcesSnapshot.getDataSourcesWithAllUsedSegments().stream().map(ImmutableDruidDataSource::getName).collect(Collectors.toList()));
}
use of org.apache.druid.timeline.DataSegment in project druid by druid-io.
the class SqlSegmentsMetadataManagerTest method testMarkAsUsedNonOvershadowedSegmentsWithInvalidSegmentIds.
@Test(expected = UnknownSegmentIdsException.class)
public void testMarkAsUsedNonOvershadowedSegmentsWithInvalidSegmentIds() throws UnknownSegmentIdsException {
sqlSegmentsMetadataManager.startPollingDatabasePeriodically();
sqlSegmentsMetadataManager.poll();
Assert.assertTrue(sqlSegmentsMetadataManager.isPollingDatabasePeriodically());
final String newDataSource = "wikipedia2";
final DataSegment newSegment1 = createNewSegment1(newDataSource);
final DataSegment newSegment2 = createNewSegment1(newDataSource);
final ImmutableSet<String> segmentIds = ImmutableSet.of(newSegment1.getId().toString(), newSegment2.getId().toString());
sqlSegmentsMetadataManager.poll();
Assert.assertEquals(ImmutableSet.of(segment1, segment2), ImmutableSet.copyOf(sqlSegmentsMetadataManager.iterateAllUsedSegments()));
// none of the segments are in data source
Assert.assertEquals(0, sqlSegmentsMetadataManager.markAsUsedNonOvershadowedSegments(newDataSource, segmentIds));
}
use of org.apache.druid.timeline.DataSegment in project druid by druid-io.
the class SqlSegmentsMetadataManagerTest method testMarkSegmentsAsUnused.
@Test
public void testMarkSegmentsAsUnused() throws IOException {
sqlSegmentsMetadataManager.startPollingDatabasePeriodically();
sqlSegmentsMetadataManager.poll();
Assert.assertTrue(sqlSegmentsMetadataManager.isPollingDatabasePeriodically());
final String newDataSource = "wikipedia2";
final DataSegment newSegment1 = createNewSegment1(newDataSource);
final DataSegment newSegment2 = createNewSegment1(newDataSource);
publisher.publishSegment(newSegment1);
publisher.publishSegment(newSegment2);
final ImmutableSet<SegmentId> segmentIds = ImmutableSet.of(newSegment1.getId(), newSegment1.getId());
Assert.assertEquals(segmentIds.size(), sqlSegmentsMetadataManager.markSegmentsAsUnused(segmentIds));
sqlSegmentsMetadataManager.poll();
Assert.assertEquals(ImmutableSet.of(segment1, segment2), ImmutableSet.copyOf(sqlSegmentsMetadataManager.iterateAllUsedSegments()));
}
use of org.apache.druid.timeline.DataSegment in project druid by druid-io.
the class SqlSegmentsMetadataManagerTest method testPrepareImmutableDataSourcesWithAllUsedSegmentsAwaitsPollOnRestart.
@Test
public void testPrepareImmutableDataSourcesWithAllUsedSegmentsAwaitsPollOnRestart() throws IOException {
DataSegment newSegment = pollThenStopThenStartIntro();
Assert.assertEquals(ImmutableSet.of(segment1, segment2, newSegment), ImmutableSet.copyOf(sqlSegmentsMetadataManager.getImmutableDataSourcesWithAllUsedSegments().stream().flatMap((ImmutableDruidDataSource dataSource) -> dataSource.getSegments().stream()).iterator()));
}
Aggregations