use of org.apache.druid.timeline.SegmentId in project druid by druid-io.
the class DataSourcesResourceTest method testGetDatasourceLoadstatusSimple.
@Test
public void testGetDatasourceLoadstatusSimple() {
DataSegment datasource1Segment1 = new DataSegment("datasource1", Intervals.of("2010-01-01/P1D"), "", null, null, null, null, 0x9, 10);
DataSegment datasource1Segment2 = new DataSegment("datasource1", Intervals.of("2010-01-22/P1D"), "", null, null, null, null, 0x9, 20);
DataSegment datasource2Segment1 = new DataSegment("datasource2", Intervals.of("2010-01-01/P1D"), "", null, null, null, null, 0x9, 30);
List<DataSegment> segments = ImmutableList.of(datasource1Segment1, datasource1Segment2);
Map<SegmentId, SegmentLoadInfo> completedLoadInfoMap = ImmutableMap.of(datasource1Segment1.getId(), new SegmentLoadInfo(datasource1Segment1), datasource1Segment2.getId(), new SegmentLoadInfo(datasource1Segment2), datasource2Segment1.getId(), new SegmentLoadInfo(datasource2Segment1));
Map<SegmentId, SegmentLoadInfo> halfLoadedInfoMap = ImmutableMap.of(datasource1Segment1.getId(), new SegmentLoadInfo(datasource1Segment1));
// Test when datasource fully loaded
EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq("datasource1"), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())).andReturn(Optional.of(segments)).once();
EasyMock.expect(inventoryView.getSegmentLoadInfos()).andReturn(completedLoadInfoMap).once();
EasyMock.replay(segmentsMetadataManager, inventoryView);
DataSourcesResource dataSourcesResource = new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null);
Response response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, "simple", null, null);
Assert.assertEquals(200, response.getStatus());
Assert.assertNotNull(response.getEntity());
Assert.assertEquals(1, ((Map) response.getEntity()).size());
Assert.assertTrue(((Map) response.getEntity()).containsKey("datasource1"));
Assert.assertEquals(0, ((Map) response.getEntity()).get("datasource1"));
EasyMock.verify(segmentsMetadataManager, inventoryView);
EasyMock.reset(segmentsMetadataManager, inventoryView);
// Test when datasource half loaded
EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq("datasource1"), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())).andReturn(Optional.of(segments)).once();
EasyMock.expect(inventoryView.getSegmentLoadInfos()).andReturn(halfLoadedInfoMap).once();
EasyMock.replay(segmentsMetadataManager, inventoryView);
dataSourcesResource = new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null);
response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, "simple", null, null);
Assert.assertEquals(200, response.getStatus());
Assert.assertNotNull(response.getEntity());
Assert.assertEquals(1, ((Map) response.getEntity()).size());
Assert.assertTrue(((Map) response.getEntity()).containsKey("datasource1"));
Assert.assertEquals(1, ((Map) response.getEntity()).get("datasource1"));
EasyMock.verify(segmentsMetadataManager, inventoryView);
}
use of org.apache.druid.timeline.SegmentId in project druid by druid-io.
the class DataSourcesResourceTest method testMarkSegmentsAsUnusedNoChanges.
@Test
public void testMarkSegmentsAsUnusedNoChanges() {
final DruidDataSource dataSource1 = new DruidDataSource("datasource1", new HashMap<>());
final Set<SegmentId> segmentIds = dataSegmentList.stream().filter(segment -> segment.getDataSource().equals(dataSource1.getName())).map(DataSegment::getId).collect(Collectors.toSet());
EasyMock.expect(inventoryView.getInventory()).andReturn(ImmutableList.of(server)).once();
EasyMock.expect(server.getDataSource("datasource1")).andReturn(dataSource1).once();
EasyMock.expect(segmentsMetadataManager.markSegmentsAsUnused(segmentIds)).andReturn(0).once();
EasyMock.replay(segmentsMetadataManager, inventoryView, server);
final DataSourcesResource.MarkDataSourceSegmentsPayload payload = new DataSourcesResource.MarkDataSourceSegmentsPayload(null, segmentIds.stream().map(SegmentId::toString).collect(Collectors.toSet()));
DataSourcesResource dataSourcesResource = new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null);
Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload);
Assert.assertEquals(200, response.getStatus());
Assert.assertEquals(ImmutableMap.of("numChangedSegments", 0), response.getEntity());
EasyMock.verify(segmentsMetadataManager, inventoryView, server);
}
use of org.apache.druid.timeline.SegmentId in project druid by druid-io.
the class SqlSegmentsMetadataManagerTest method testMarkSegmentsAsUnused.
@Test
public void testMarkSegmentsAsUnused() throws IOException {
sqlSegmentsMetadataManager.startPollingDatabasePeriodically();
sqlSegmentsMetadataManager.poll();
Assert.assertTrue(sqlSegmentsMetadataManager.isPollingDatabasePeriodically());
final String newDataSource = "wikipedia2";
final DataSegment newSegment1 = createNewSegment1(newDataSource);
final DataSegment newSegment2 = createNewSegment1(newDataSource);
publisher.publishSegment(newSegment1);
publisher.publishSegment(newSegment2);
final ImmutableSet<SegmentId> segmentIds = ImmutableSet.of(newSegment1.getId(), newSegment1.getId());
Assert.assertEquals(segmentIds.size(), sqlSegmentsMetadataManager.markSegmentsAsUnused(segmentIds));
sqlSegmentsMetadataManager.poll();
Assert.assertEquals(ImmutableSet.of(segment1, segment2), ImmutableSet.copyOf(sqlSegmentsMetadataManager.iterateAllUsedSegments()));
}
use of org.apache.druid.timeline.SegmentId in project druid by druid-io.
the class BroadcastSegmentIndexedTable method computeCacheKey.
@Override
public byte[] computeCacheKey() {
SegmentId segmentId = segment.getId();
CacheKeyBuilder keyBuilder = new CacheKeyBuilder(CACHE_PREFIX);
return keyBuilder.appendLong(segmentId.getInterval().getStartMillis()).appendLong(segmentId.getInterval().getEndMillis()).appendString(segmentId.getVersion()).appendString(segmentId.getDataSource()).appendInt(segmentId.getPartitionNum()).build();
}
use of org.apache.druid.timeline.SegmentId in project druid by druid-io.
the class CachingClusteredClientTest method testQueryCachingWithFilter.
@SuppressWarnings("unchecked")
public void testQueryCachingWithFilter(final QueryRunner runner, final int numTimesToQuery, final Query query, final List<Iterable<Result<TimeseriesResultValue>>> filteredExpected, // does this assume query intervals must be ordered?
Object... args) {
final List<Interval> queryIntervals = Lists.newArrayListWithCapacity(args.length / 2);
final List<List<Iterable<Result<Object>>>> expectedResults = Lists.newArrayListWithCapacity(queryIntervals.size());
parseResults(queryIntervals, expectedResults, args);
for (int i = 0; i < queryIntervals.size(); ++i) {
List<Object> mocks = new ArrayList<>();
mocks.add(serverView);
final Interval actualQueryInterval = new Interval(queryIntervals.get(0).getStart(), queryIntervals.get(i).getEnd());
final List<Map<DruidServer, ServerExpectations>> serverExpectationList = populateTimeline(queryIntervals, expectedResults, i, mocks);
final Map<DruidServer, ServerExpectations> finalExpectation = serverExpectationList.get(serverExpectationList.size() - 1);
for (Map.Entry<DruidServer, ServerExpectations> entry : finalExpectation.entrySet()) {
DruidServer server = entry.getKey();
ServerExpectations expectations = entry.getValue();
EasyMock.expect(serverView.getQueryRunner(server)).andReturn(expectations.getQueryRunner()).times(0, 1);
final Capture<? extends QueryPlus> capture = Capture.newInstance();
final Capture<? extends ResponseContext> context = Capture.newInstance();
QueryRunner queryable = expectations.getQueryRunner();
if (query instanceof TimeseriesQuery) {
final List<SegmentId> segmentIds = new ArrayList<>();
final List<Iterable<Result<TimeseriesResultValue>>> results = new ArrayList<>();
for (ServerExpectation expectation : expectations) {
segmentIds.add(expectation.getSegmentId());
results.add(expectation.getResults());
}
EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andAnswer(new IAnswer<Sequence>() {
@Override
public Sequence answer() {
return toFilteredQueryableTimeseriesResults((TimeseriesQuery) capture.getValue().getQuery(), segmentIds, queryIntervals, results);
}
}).times(0, 1);
} else {
throw new ISE("Unknown query type[%s]", query.getClass());
}
}
final Iterable<Result<Object>> expected = new ArrayList<>();
for (int intervalNo = 0; intervalNo < i + 1; intervalNo++) {
Iterables.addAll((List) expected, filteredExpected.get(intervalNo));
}
runWithMocks(new Runnable() {
@Override
public void run() {
for (int i = 0; i < numTimesToQuery; ++i) {
TestHelper.assertExpectedResults(expected, runner.run(QueryPlus.wrap(query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(ImmutableList.of(actualQueryInterval))))));
if (queryCompletedCallback != null) {
queryCompletedCallback.run();
}
}
}
}, mocks.toArray());
}
}
Aggregations