Search in sources :

Example 16 with DataSegment

use of org.apache.druid.timeline.DataSegment in project druid by druid-io.

the class LoadRuleTest method createLoadingPeon.

private static LoadQueuePeon createLoadingPeon(List<DataSegment> segments, boolean slowLoading) {
    final Set<DataSegment> segs = ImmutableSet.copyOf(segments);
    final long loadingSize = segs.stream().mapToLong(DataSegment::getSize).sum();
    final LoadQueuePeon mockPeon = EasyMock.createMock(LoadQueuePeon.class);
    EasyMock.expect(mockPeon.getSegmentsToLoad()).andReturn(segs).anyTimes();
    EasyMock.expect(mockPeon.getSegmentsMarkedToDrop()).andReturn(new HashSet<>()).anyTimes();
    EasyMock.expect(mockPeon.getLoadQueueSize()).andReturn(loadingSize).anyTimes();
    EasyMock.expect(mockPeon.getNumberOfSegmentsInQueue()).andReturn(segs.size()).anyTimes();
    if (slowLoading) {
        EasyMock.expect(mockPeon.getTimedOutSegments()).andReturn(new HashSet<>(segments)).anyTimes();
    } else {
        EasyMock.expect(mockPeon.getTimedOutSegments()).andReturn(new HashSet<>()).anyTimes();
    }
    return mockPeon;
}
Also used : LoadQueuePeon(org.apache.druid.server.coordinator.LoadQueuePeon) DataSegment(org.apache.druid.timeline.DataSegment) HashSet(java.util.HashSet)

Example 17 with DataSegment

use of org.apache.druid.timeline.DataSegment in project druid by druid-io.

the class DataSourcesResourceTest method testMarkSegmentAsUsed.

@Test
public void testMarkSegmentAsUsed() {
    DataSegment segment = dataSegmentList.get(0);
    EasyMock.expect(segmentsMetadataManager.markSegmentAsUsed(segment.getId().toString())).andReturn(true).once();
    EasyMock.replay(segmentsMetadataManager);
    DataSourcesResource dataSourcesResource = new DataSourcesResource(null, segmentsMetadataManager, null, null, null, null);
    Response response = dataSourcesResource.markSegmentAsUsed(segment.getDataSource(), segment.getId().toString());
    Assert.assertEquals(200, response.getStatus());
    EasyMock.verify(segmentsMetadataManager);
}
Also used : Response(javax.ws.rs.core.Response) DataSegment(org.apache.druid.timeline.DataSegment) Test(org.junit.Test)

Example 18 with DataSegment

use of org.apache.druid.timeline.DataSegment in project druid by druid-io.

the class DataSourcesResourceTest method testGetDatasourceLoadstatusSimple.

@Test
public void testGetDatasourceLoadstatusSimple() {
    DataSegment datasource1Segment1 = new DataSegment("datasource1", Intervals.of("2010-01-01/P1D"), "", null, null, null, null, 0x9, 10);
    DataSegment datasource1Segment2 = new DataSegment("datasource1", Intervals.of("2010-01-22/P1D"), "", null, null, null, null, 0x9, 20);
    DataSegment datasource2Segment1 = new DataSegment("datasource2", Intervals.of("2010-01-01/P1D"), "", null, null, null, null, 0x9, 30);
    List<DataSegment> segments = ImmutableList.of(datasource1Segment1, datasource1Segment2);
    Map<SegmentId, SegmentLoadInfo> completedLoadInfoMap = ImmutableMap.of(datasource1Segment1.getId(), new SegmentLoadInfo(datasource1Segment1), datasource1Segment2.getId(), new SegmentLoadInfo(datasource1Segment2), datasource2Segment1.getId(), new SegmentLoadInfo(datasource2Segment1));
    Map<SegmentId, SegmentLoadInfo> halfLoadedInfoMap = ImmutableMap.of(datasource1Segment1.getId(), new SegmentLoadInfo(datasource1Segment1));
    // Test when datasource fully loaded
    EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq("datasource1"), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())).andReturn(Optional.of(segments)).once();
    EasyMock.expect(inventoryView.getSegmentLoadInfos()).andReturn(completedLoadInfoMap).once();
    EasyMock.replay(segmentsMetadataManager, inventoryView);
    DataSourcesResource dataSourcesResource = new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null);
    Response response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, "simple", null, null);
    Assert.assertEquals(200, response.getStatus());
    Assert.assertNotNull(response.getEntity());
    Assert.assertEquals(1, ((Map) response.getEntity()).size());
    Assert.assertTrue(((Map) response.getEntity()).containsKey("datasource1"));
    Assert.assertEquals(0, ((Map) response.getEntity()).get("datasource1"));
    EasyMock.verify(segmentsMetadataManager, inventoryView);
    EasyMock.reset(segmentsMetadataManager, inventoryView);
    // Test when datasource half loaded
    EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq("datasource1"), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())).andReturn(Optional.of(segments)).once();
    EasyMock.expect(inventoryView.getSegmentLoadInfos()).andReturn(halfLoadedInfoMap).once();
    EasyMock.replay(segmentsMetadataManager, inventoryView);
    dataSourcesResource = new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null);
    response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, "simple", null, null);
    Assert.assertEquals(200, response.getStatus());
    Assert.assertNotNull(response.getEntity());
    Assert.assertEquals(1, ((Map) response.getEntity()).size());
    Assert.assertTrue(((Map) response.getEntity()).containsKey("datasource1"));
    Assert.assertEquals(1, ((Map) response.getEntity()).get("datasource1"));
    EasyMock.verify(segmentsMetadataManager, inventoryView);
}
Also used : Response(javax.ws.rs.core.Response) SegmentId(org.apache.druid.timeline.SegmentId) SegmentLoadInfo(org.apache.druid.client.SegmentLoadInfo) ImmutableSegmentLoadInfo(org.apache.druid.client.ImmutableSegmentLoadInfo) DataSegment(org.apache.druid.timeline.DataSegment) Test(org.junit.Test)

Example 19 with DataSegment

use of org.apache.druid.timeline.DataSegment in project druid by druid-io.

the class ServersResourceTest method setUp.

@Before
public void setUp() {
    DruidServer dummyServer = new DruidServer("dummy", "host", null, 1234L, ServerType.HISTORICAL, "tier", 0);
    DataSegment segment = DataSegment.builder().dataSource("dataSource").interval(Intervals.of("2016-03-22T14Z/2016-03-22T15Z")).version("v0").size(1L).build();
    dummyServer.addDataSegment(segment);
    CoordinatorServerView inventoryView = EasyMock.createMock(CoordinatorServerView.class);
    EasyMock.expect(inventoryView.getInventory()).andReturn(ImmutableList.of(dummyServer)).anyTimes();
    EasyMock.expect(inventoryView.getInventoryValue(dummyServer.getName())).andReturn(dummyServer).anyTimes();
    EasyMock.replay(inventoryView);
    server = dummyServer;
    serversResource = new ServersResource(inventoryView);
}
Also used : DruidServer(org.apache.druid.client.DruidServer) DataSegment(org.apache.druid.timeline.DataSegment) CoordinatorServerView(org.apache.druid.client.CoordinatorServerView) Before(org.junit.Before)

Example 20 with DataSegment

use of org.apache.druid.timeline.DataSegment in project druid by druid-io.

the class NewestSegmentFirstPolicyTest method testIteratorDoesNotReturnCompactedInterval.

@Test
public void testIteratorDoesNotReturnCompactedInterval() {
    final VersionedIntervalTimeline<String, DataSegment> timeline = createTimeline(new SegmentGenerateSpec(Intervals.of("2017-12-01T00:00:00/2017-12-02T00:00:00"), new Period("P1D")));
    final CompactionSegmentIterator iterator = policy.reset(ImmutableMap.of(DATA_SOURCE, createCompactionConfig(40000, new Period("P0D"), new UserCompactionTaskGranularityConfig(Granularities.MINUTE, null, null))), ImmutableMap.of(DATA_SOURCE, timeline), Collections.emptyMap());
    final List<DataSegment> expectedSegmentsToCompact = new ArrayList<>(timeline.findNonOvershadowedObjectsInInterval(Intervals.of("2017-12-01T00:00:00/2017-12-02T00:00:00"), Partitions.ONLY_COMPLETE));
    Assert.assertTrue(iterator.hasNext());
    Assert.assertEquals(ImmutableSet.copyOf(expectedSegmentsToCompact), ImmutableSet.copyOf(iterator.next()));
    // Iterator should return only once since all the "minute" interval of the iterator contains the same interval
    Assert.assertFalse(iterator.hasNext());
}
Also used : ArrayList(java.util.ArrayList) Period(org.joda.time.Period) UserCompactionTaskGranularityConfig(org.apache.druid.server.coordinator.UserCompactionTaskGranularityConfig) DataSegment(org.apache.druid.timeline.DataSegment) Test(org.junit.Test)

Aggregations

DataSegment (org.apache.druid.timeline.DataSegment)612 Test (org.junit.Test)386 ArrayList (java.util.ArrayList)161 Interval (org.joda.time.Interval)158 File (java.io.File)138 Map (java.util.Map)110 List (java.util.List)108 ImmutableList (com.google.common.collect.ImmutableList)77 IOException (java.io.IOException)77 HashMap (java.util.HashMap)74 ImmutableMap (com.google.common.collect.ImmutableMap)72 NumberedShardSpec (org.apache.druid.timeline.partition.NumberedShardSpec)68 HashSet (java.util.HashSet)58 TaskStatus (org.apache.druid.indexer.TaskStatus)53 Collectors (java.util.stream.Collectors)52 Set (java.util.Set)50 CountDownLatch (java.util.concurrent.CountDownLatch)50 ISE (org.apache.druid.java.util.common.ISE)50 SegmentId (org.apache.druid.timeline.SegmentId)47 LinearShardSpec (org.apache.druid.timeline.partition.LinearShardSpec)45