Search in sources :

Example 6 with SegmentId

use of org.apache.druid.timeline.SegmentId in project druid by druid-io.

the class DataSourcesResourceTest method testGetDatasourceLoadstatusSimple.

@Test
public void testGetDatasourceLoadstatusSimple() {
    DataSegment datasource1Segment1 = new DataSegment("datasource1", Intervals.of("2010-01-01/P1D"), "", null, null, null, null, 0x9, 10);
    DataSegment datasource1Segment2 = new DataSegment("datasource1", Intervals.of("2010-01-22/P1D"), "", null, null, null, null, 0x9, 20);
    DataSegment datasource2Segment1 = new DataSegment("datasource2", Intervals.of("2010-01-01/P1D"), "", null, null, null, null, 0x9, 30);
    List<DataSegment> segments = ImmutableList.of(datasource1Segment1, datasource1Segment2);
    Map<SegmentId, SegmentLoadInfo> completedLoadInfoMap = ImmutableMap.of(datasource1Segment1.getId(), new SegmentLoadInfo(datasource1Segment1), datasource1Segment2.getId(), new SegmentLoadInfo(datasource1Segment2), datasource2Segment1.getId(), new SegmentLoadInfo(datasource2Segment1));
    Map<SegmentId, SegmentLoadInfo> halfLoadedInfoMap = ImmutableMap.of(datasource1Segment1.getId(), new SegmentLoadInfo(datasource1Segment1));
    // Test when datasource fully loaded
    EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq("datasource1"), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())).andReturn(Optional.of(segments)).once();
    EasyMock.expect(inventoryView.getSegmentLoadInfos()).andReturn(completedLoadInfoMap).once();
    EasyMock.replay(segmentsMetadataManager, inventoryView);
    DataSourcesResource dataSourcesResource = new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null);
    Response response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, "simple", null, null);
    Assert.assertEquals(200, response.getStatus());
    Assert.assertNotNull(response.getEntity());
    Assert.assertEquals(1, ((Map) response.getEntity()).size());
    Assert.assertTrue(((Map) response.getEntity()).containsKey("datasource1"));
    Assert.assertEquals(0, ((Map) response.getEntity()).get("datasource1"));
    EasyMock.verify(segmentsMetadataManager, inventoryView);
    EasyMock.reset(segmentsMetadataManager, inventoryView);
    // Test when datasource half loaded
    EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq("datasource1"), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())).andReturn(Optional.of(segments)).once();
    EasyMock.expect(inventoryView.getSegmentLoadInfos()).andReturn(halfLoadedInfoMap).once();
    EasyMock.replay(segmentsMetadataManager, inventoryView);
    dataSourcesResource = new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null);
    response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, "simple", null, null);
    Assert.assertEquals(200, response.getStatus());
    Assert.assertNotNull(response.getEntity());
    Assert.assertEquals(1, ((Map) response.getEntity()).size());
    Assert.assertTrue(((Map) response.getEntity()).containsKey("datasource1"));
    Assert.assertEquals(1, ((Map) response.getEntity()).get("datasource1"));
    EasyMock.verify(segmentsMetadataManager, inventoryView);
}
Also used : Response(javax.ws.rs.core.Response) SegmentId(org.apache.druid.timeline.SegmentId) SegmentLoadInfo(org.apache.druid.client.SegmentLoadInfo) ImmutableSegmentLoadInfo(org.apache.druid.client.ImmutableSegmentLoadInfo) DataSegment(org.apache.druid.timeline.DataSegment) Test(org.junit.Test)

Example 7 with SegmentId

use of org.apache.druid.timeline.SegmentId in project druid by druid-io.

the class DataSourcesResourceTest method testMarkSegmentsAsUnusedNoChanges.

@Test
public void testMarkSegmentsAsUnusedNoChanges() {
    final DruidDataSource dataSource1 = new DruidDataSource("datasource1", new HashMap<>());
    final Set<SegmentId> segmentIds = dataSegmentList.stream().filter(segment -> segment.getDataSource().equals(dataSource1.getName())).map(DataSegment::getId).collect(Collectors.toSet());
    EasyMock.expect(inventoryView.getInventory()).andReturn(ImmutableList.of(server)).once();
    EasyMock.expect(server.getDataSource("datasource1")).andReturn(dataSource1).once();
    EasyMock.expect(segmentsMetadataManager.markSegmentsAsUnused(segmentIds)).andReturn(0).once();
    EasyMock.replay(segmentsMetadataManager, inventoryView, server);
    final DataSourcesResource.MarkDataSourceSegmentsPayload payload = new DataSourcesResource.MarkDataSourceSegmentsPayload(null, segmentIds.stream().map(SegmentId::toString).collect(Collectors.toSet()));
    DataSourcesResource dataSourcesResource = new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, null);
    Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload);
    Assert.assertEquals(200, response.getStatus());
    Assert.assertEquals(ImmutableMap.of("numChangedSegments", 0), response.getEntity());
    EasyMock.verify(segmentsMetadataManager, inventoryView, server);
}
Also used : Response(javax.ws.rs.core.Response) SegmentId(org.apache.druid.timeline.SegmentId) DruidDataSource(org.apache.druid.client.DruidDataSource) ImmutableDruidDataSource(org.apache.druid.client.ImmutableDruidDataSource) Test(org.junit.Test)

Example 8 with SegmentId

use of org.apache.druid.timeline.SegmentId in project druid by druid-io.

the class SqlSegmentsMetadataManagerTest method testMarkSegmentsAsUnused.

@Test
public void testMarkSegmentsAsUnused() throws IOException {
    sqlSegmentsMetadataManager.startPollingDatabasePeriodically();
    sqlSegmentsMetadataManager.poll();
    Assert.assertTrue(sqlSegmentsMetadataManager.isPollingDatabasePeriodically());
    final String newDataSource = "wikipedia2";
    final DataSegment newSegment1 = createNewSegment1(newDataSource);
    final DataSegment newSegment2 = createNewSegment1(newDataSource);
    publisher.publishSegment(newSegment1);
    publisher.publishSegment(newSegment2);
    final ImmutableSet<SegmentId> segmentIds = ImmutableSet.of(newSegment1.getId(), newSegment1.getId());
    Assert.assertEquals(segmentIds.size(), sqlSegmentsMetadataManager.markSegmentsAsUnused(segmentIds));
    sqlSegmentsMetadataManager.poll();
    Assert.assertEquals(ImmutableSet.of(segment1, segment2), ImmutableSet.copyOf(sqlSegmentsMetadataManager.iterateAllUsedSegments()));
}
Also used : SegmentId(org.apache.druid.timeline.SegmentId) DataSegment(org.apache.druid.timeline.DataSegment) Test(org.junit.Test)

Example 9 with SegmentId

use of org.apache.druid.timeline.SegmentId in project druid by druid-io.

the class BroadcastSegmentIndexedTable method computeCacheKey.

@Override
public byte[] computeCacheKey() {
    SegmentId segmentId = segment.getId();
    CacheKeyBuilder keyBuilder = new CacheKeyBuilder(CACHE_PREFIX);
    return keyBuilder.appendLong(segmentId.getInterval().getStartMillis()).appendLong(segmentId.getInterval().getEndMillis()).appendString(segmentId.getVersion()).appendString(segmentId.getDataSource()).appendInt(segmentId.getPartitionNum()).build();
}
Also used : SegmentId(org.apache.druid.timeline.SegmentId) CacheKeyBuilder(org.apache.druid.query.cache.CacheKeyBuilder)

Example 10 with SegmentId

use of org.apache.druid.timeline.SegmentId in project druid by druid-io.

the class CachingClusteredClientTest method testQueryCachingWithFilter.

@SuppressWarnings("unchecked")
public void testQueryCachingWithFilter(final QueryRunner runner, final int numTimesToQuery, final Query query, final List<Iterable<Result<TimeseriesResultValue>>> filteredExpected, // does this assume query intervals must be ordered?
Object... args) {
    final List<Interval> queryIntervals = Lists.newArrayListWithCapacity(args.length / 2);
    final List<List<Iterable<Result<Object>>>> expectedResults = Lists.newArrayListWithCapacity(queryIntervals.size());
    parseResults(queryIntervals, expectedResults, args);
    for (int i = 0; i < queryIntervals.size(); ++i) {
        List<Object> mocks = new ArrayList<>();
        mocks.add(serverView);
        final Interval actualQueryInterval = new Interval(queryIntervals.get(0).getStart(), queryIntervals.get(i).getEnd());
        final List<Map<DruidServer, ServerExpectations>> serverExpectationList = populateTimeline(queryIntervals, expectedResults, i, mocks);
        final Map<DruidServer, ServerExpectations> finalExpectation = serverExpectationList.get(serverExpectationList.size() - 1);
        for (Map.Entry<DruidServer, ServerExpectations> entry : finalExpectation.entrySet()) {
            DruidServer server = entry.getKey();
            ServerExpectations expectations = entry.getValue();
            EasyMock.expect(serverView.getQueryRunner(server)).andReturn(expectations.getQueryRunner()).times(0, 1);
            final Capture<? extends QueryPlus> capture = Capture.newInstance();
            final Capture<? extends ResponseContext> context = Capture.newInstance();
            QueryRunner queryable = expectations.getQueryRunner();
            if (query instanceof TimeseriesQuery) {
                final List<SegmentId> segmentIds = new ArrayList<>();
                final List<Iterable<Result<TimeseriesResultValue>>> results = new ArrayList<>();
                for (ServerExpectation expectation : expectations) {
                    segmentIds.add(expectation.getSegmentId());
                    results.add(expectation.getResults());
                }
                EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andAnswer(new IAnswer<Sequence>() {

                    @Override
                    public Sequence answer() {
                        return toFilteredQueryableTimeseriesResults((TimeseriesQuery) capture.getValue().getQuery(), segmentIds, queryIntervals, results);
                    }
                }).times(0, 1);
            } else {
                throw new ISE("Unknown query type[%s]", query.getClass());
            }
        }
        final Iterable<Result<Object>> expected = new ArrayList<>();
        for (int intervalNo = 0; intervalNo < i + 1; intervalNo++) {
            Iterables.addAll((List) expected, filteredExpected.get(intervalNo));
        }
        runWithMocks(new Runnable() {

            @Override
            public void run() {
                for (int i = 0; i < numTimesToQuery; ++i) {
                    TestHelper.assertExpectedResults(expected, runner.run(QueryPlus.wrap(query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(ImmutableList.of(actualQueryInterval))))));
                    if (queryCompletedCallback != null) {
                        queryCompletedCallback.run();
                    }
                }
            }
        }, mocks.toArray());
    }
}
Also used : TimeseriesResultValue(org.apache.druid.query.timeseries.TimeseriesResultValue) MergeIterable(org.apache.druid.java.util.common.guava.MergeIterable) FunctionalIterable(org.apache.druid.java.util.common.guava.FunctionalIterable) ArrayList(java.util.ArrayList) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) Result(org.apache.druid.query.Result) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) ISE(org.apache.druid.java.util.common.ISE) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) SegmentId(org.apache.druid.timeline.SegmentId) QueryableDruidServer(org.apache.druid.client.selector.QueryableDruidServer) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) QueryRunner(org.apache.druid.query.QueryRunner) IAnswer(org.easymock.IAnswer) Map(java.util.Map) TreeMap(java.util.TreeMap) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) Interval(org.joda.time.Interval)

Aggregations

SegmentId (org.apache.druid.timeline.SegmentId)63 DataSegment (org.apache.druid.timeline.DataSegment)32 Test (org.junit.Test)21 Interval (org.joda.time.Interval)14 ISE (org.apache.druid.java.util.common.ISE)13 ArrayList (java.util.ArrayList)12 Map (java.util.Map)12 Set (java.util.Set)12 ImmutableDruidDataSource (org.apache.druid.client.ImmutableDruidDataSource)12 List (java.util.List)11 ImmutableMap (com.google.common.collect.ImmutableMap)10 IOException (java.io.IOException)9 TreeMap (java.util.TreeMap)9 CountDownLatch (java.util.concurrent.CountDownLatch)9 VisibleForTesting (com.google.common.annotations.VisibleForTesting)8 Collectors (java.util.stream.Collectors)8 Optional (java.util.Optional)7 Sets (com.google.common.collect.Sets)6 Nullable (javax.annotation.Nullable)6 Response (javax.ws.rs.core.Response)6