use of org.apache.druid.timeline.DataSegment in project druid by druid-io.
the class LoadRuleTest method createLoadingPeon.
private static LoadQueuePeon createLoadingPeon(List<DataSegment> segments, boolean slowLoading) {
final Set<DataSegment> segs = ImmutableSet.copyOf(segments);
final long loadingSize = segs.stream().mapToLong(DataSegment::getSize).sum();
final LoadQueuePeon mockPeon = EasyMock.createMock(LoadQueuePeon.class);
EasyMock.expect(mockPeon.getSegmentsToLoad()).andReturn(segs).anyTimes();
EasyMock.expect(mockPeon.getSegmentsMarkedToDrop()).andReturn(new HashSet<>()).anyTimes();
EasyMock.expect(mockPeon.getLoadQueueSize()).andReturn(loadingSize).anyTimes();
EasyMock.expect(mockPeon.getNumberOfSegmentsInQueue()).andReturn(segs.size()).anyTimes();
if (slowLoading) {
EasyMock.expect(mockPeon.getTimedOutSegments()).andReturn(new HashSet<>(segments)).anyTimes();
} else {
EasyMock.expect(mockPeon.getTimedOutSegments()).andReturn(new HashSet<>()).anyTimes();
}
return mockPeon;
}
use of org.apache.druid.timeline.DataSegment in project druid by druid-io.
the class DataSourcesResourceTest method testMarkSegmentAsUsed.
@Test
public void testMarkSegmentAsUsed() {
DataSegment segment = dataSegmentList.get(0);
EasyMock.expect(segmentsMetadataManager.markSegmentAsUsed(segment.getId().toString())).andReturn(true).once();
EasyMock.replay(segmentsMetadataManager);
DataSourcesResource dataSourcesResource = new DataSourcesResource(null, segmentsMetadataManager, null, null, null, null);
Response response = dataSourcesResource.markSegmentAsUsed(segment.getDataSource(), segment.getId().toString());
Assert.assertEquals(200, response.getStatus());
EasyMock.verify(segmentsMetadataManager);
}
use of org.apache.druid.timeline.DataSegment in project druid by druid-io.
the class DataSourcesResourceTest method testGetDatasourceLoadstatusSimple.
@Test
public void testGetDatasourceLoadstatusSimple() {
DataSegment datasource1Segment1 = new DataSegment("datasource1", Intervals.of("2010-01-01/P1D"), "", null, null, null, null, 0x9, 10);
DataSegment datasource1Segment2 = new DataSegment("datasource1", Intervals.of("2010-01-22/P1D"), "", null, null, null, null, 0x9, 20);
DataSegment datasource2Segment1 = new DataSegment("datasource2", Intervals.of("2010-01-01/P1D"), "", null, null, null, null, 0x9, 30);
List<DataSegment> segments = ImmutableList.of(datasource1Segment1, datasource1Segment2);
Map<SegmentId, SegmentLoadInfo> completedLoadInfoMap = ImmutableMap.of(datasource1Segment1.getId(), new SegmentLoadInfo(datasource1Segment1), datasource1Segment2.getId(), new SegmentLoadInfo(datasource1Segment2), datasource2Segment1.getId(), new SegmentLoadInfo(datasource2Segment1));
Map<SegmentId, SegmentLoadInfo> halfLoadedInfoMap = ImmutableMap.of(datasource1Segment1.getId(), new SegmentLoadInfo(datasource1Segment1));
// Test when datasource fully loaded
EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq("datasource1"), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())).andReturn(Optional.of(segments)).once();
EasyMock.expect(inventoryView.getSegmentLoadInfos()).andReturn(completedLoadInfoMap).once();
EasyMock.replay(segmentsMetadataManager, inventoryView);
DataSourcesResource dataSourcesResource = new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null);
Response response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, "simple", null, null);
Assert.assertEquals(200, response.getStatus());
Assert.assertNotNull(response.getEntity());
Assert.assertEquals(1, ((Map) response.getEntity()).size());
Assert.assertTrue(((Map) response.getEntity()).containsKey("datasource1"));
Assert.assertEquals(0, ((Map) response.getEntity()).get("datasource1"));
EasyMock.verify(segmentsMetadataManager, inventoryView);
EasyMock.reset(segmentsMetadataManager, inventoryView);
// Test when datasource half loaded
EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq("datasource1"), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())).andReturn(Optional.of(segments)).once();
EasyMock.expect(inventoryView.getSegmentLoadInfos()).andReturn(halfLoadedInfoMap).once();
EasyMock.replay(segmentsMetadataManager, inventoryView);
dataSourcesResource = new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null);
response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, "simple", null, null);
Assert.assertEquals(200, response.getStatus());
Assert.assertNotNull(response.getEntity());
Assert.assertEquals(1, ((Map) response.getEntity()).size());
Assert.assertTrue(((Map) response.getEntity()).containsKey("datasource1"));
Assert.assertEquals(1, ((Map) response.getEntity()).get("datasource1"));
EasyMock.verify(segmentsMetadataManager, inventoryView);
}
use of org.apache.druid.timeline.DataSegment in project druid by druid-io.
the class ServersResourceTest method setUp.
@Before
public void setUp() {
DruidServer dummyServer = new DruidServer("dummy", "host", null, 1234L, ServerType.HISTORICAL, "tier", 0);
DataSegment segment = DataSegment.builder().dataSource("dataSource").interval(Intervals.of("2016-03-22T14Z/2016-03-22T15Z")).version("v0").size(1L).build();
dummyServer.addDataSegment(segment);
CoordinatorServerView inventoryView = EasyMock.createMock(CoordinatorServerView.class);
EasyMock.expect(inventoryView.getInventory()).andReturn(ImmutableList.of(dummyServer)).anyTimes();
EasyMock.expect(inventoryView.getInventoryValue(dummyServer.getName())).andReturn(dummyServer).anyTimes();
EasyMock.replay(inventoryView);
server = dummyServer;
serversResource = new ServersResource(inventoryView);
}
use of org.apache.druid.timeline.DataSegment in project druid by druid-io.
the class NewestSegmentFirstPolicyTest method testIteratorDoesNotReturnCompactedInterval.
@Test
public void testIteratorDoesNotReturnCompactedInterval() {
final VersionedIntervalTimeline<String, DataSegment> timeline = createTimeline(new SegmentGenerateSpec(Intervals.of("2017-12-01T00:00:00/2017-12-02T00:00:00"), new Period("P1D")));
final CompactionSegmentIterator iterator = policy.reset(ImmutableMap.of(DATA_SOURCE, createCompactionConfig(40000, new Period("P0D"), new UserCompactionTaskGranularityConfig(Granularities.MINUTE, null, null))), ImmutableMap.of(DATA_SOURCE, timeline), Collections.emptyMap());
final List<DataSegment> expectedSegmentsToCompact = new ArrayList<>(timeline.findNonOvershadowedObjectsInInterval(Intervals.of("2017-12-01T00:00:00/2017-12-02T00:00:00"), Partitions.ONLY_COMPLETE));
Assert.assertTrue(iterator.hasNext());
Assert.assertEquals(ImmutableSet.copyOf(expectedSegmentsToCompact), ImmutableSet.copyOf(iterator.next()));
// Iterator should return only once since all the "minute" interval of the iterator contains the same interval
Assert.assertFalse(iterator.hasNext());
}
Aggregations