use of org.apache.druid.timeline.SegmentId in project druid by druid-io.
the class HttpLoadQueuePeonTest method testLoadDropAfterStop.
@Test(timeout = 60_000L)
public void testLoadDropAfterStop() throws Exception {
HttpLoadQueuePeon httpLoadQueuePeon = new HttpLoadQueuePeon("http://dummy:4000", ServerTestHelper.MAPPER, new TestHttpClient(), config, Executors.newScheduledThreadPool(2, Execs.makeThreadFactory("HttpLoadQueuePeonTest-%s")), Execs.singleThreaded("HttpLoadQueuePeonTest"));
httpLoadQueuePeon.start();
Map<SegmentId, CountDownLatch> latches = ImmutableMap.of(segment1.getId(), new CountDownLatch(1), segment2.getId(), new CountDownLatch(1), segment3.getId(), new CountDownLatch(1), segment4.getId(), new CountDownLatch(1));
httpLoadQueuePeon.dropSegment(segment1, () -> latches.get(segment1.getId()).countDown());
httpLoadQueuePeon.loadSegment(segment2, () -> latches.get(segment2.getId()).countDown());
latches.get(segment1.getId()).await();
latches.get(segment2.getId()).await();
httpLoadQueuePeon.stop();
httpLoadQueuePeon.dropSegment(segment3, () -> latches.get(segment3.getId()).countDown());
httpLoadQueuePeon.loadSegment(segment4, () -> latches.get(segment4.getId()).countDown());
latches.get(segment3.getId()).await();
latches.get(segment4.getId()).await();
}
use of org.apache.druid.timeline.SegmentId in project druid by druid-io.
the class DruidSchema method runSegmentMetadataQuery.
/**
* Execute a SegmentMetadata query and return a {@link Sequence} of {@link SegmentAnalysis}.
*
* @param segments Iterable of {@link SegmentId} objects that are subject of the SegmentMetadata query.
* @return {@link Sequence} of {@link SegmentAnalysis} objects
*/
@VisibleForTesting
protected Sequence<SegmentAnalysis> runSegmentMetadataQuery(final Iterable<SegmentId> segments) {
// Sanity check: getOnlyElement of a set, to ensure all segments have the same dataSource.
final String dataSource = Iterables.getOnlyElement(StreamSupport.stream(segments.spliterator(), false).map(SegmentId::getDataSource).collect(Collectors.toSet()));
final MultipleSpecificSegmentSpec querySegmentSpec = new MultipleSpecificSegmentSpec(StreamSupport.stream(segments.spliterator(), false).map(SegmentId::toDescriptor).collect(Collectors.toList()));
final SegmentMetadataQuery segmentMetadataQuery = new SegmentMetadataQuery(new TableDataSource(dataSource), querySegmentSpec, new AllColumnIncluderator(), false, brokerInternalQueryConfig.getContext(), EnumSet.noneOf(SegmentMetadataQuery.AnalysisType.class), false, false);
return queryLifecycleFactory.factorize().runSimple(segmentMetadataQuery, escalator.createEscalatedAuthenticationResult(), Access.OK);
}
use of org.apache.druid.timeline.SegmentId in project druid by druid-io.
the class DruidSchema method refresh.
@VisibleForTesting
void refresh(final Set<SegmentId> segmentsToRefresh, final Set<String> dataSourcesToRebuild) throws IOException {
// Refresh the segments.
final Set<SegmentId> refreshed = refreshSegments(segmentsToRefresh);
synchronized (lock) {
// Add missing segments back to the refresh list.
segmentsNeedingRefresh.addAll(Sets.difference(segmentsToRefresh, refreshed));
// Compute the list of dataSources to rebuild tables for.
dataSourcesToRebuild.addAll(dataSourcesNeedingRebuild);
refreshed.forEach(segment -> dataSourcesToRebuild.add(segment.getDataSource()));
dataSourcesNeedingRebuild.clear();
}
// Rebuild the dataSources.
for (String dataSource : dataSourcesToRebuild) {
final DruidTable druidTable = buildDruidTable(dataSource);
final DruidTable oldTable = tables.put(dataSource, druidTable);
final String description = druidTable.getDataSource().isGlobal() ? "global dataSource" : "dataSource";
if (oldTable == null || !oldTable.getRowSignature().equals(druidTable.getRowSignature())) {
log.info("%s [%s] has new signature: %s.", description, dataSource, druidTable.getRowSignature());
} else {
log.debug("%s [%s] signature is unchanged.", description, dataSource);
}
}
}
Aggregations