use of io.druid.segment.ReferenceCountingSegment in project druid by druid-io.
the class ServerManager method getQueryRunnerForSegments.
@Override
public <T> QueryRunner<T> getQueryRunnerForSegments(Query<T> query, Iterable<SegmentDescriptor> specs) {
final QueryRunnerFactory<T, Query<T>> factory = conglomerate.findFactory(query);
if (factory == null) {
log.makeAlert("Unknown query type, [%s]", query.getClass()).addData("dataSource", query.getDataSource()).emit();
return new NoopQueryRunner<T>();
}
final QueryToolChest<T, Query<T>> toolChest = factory.getToolchest();
String dataSourceName = getDataSourceName(query.getDataSource());
final VersionedIntervalTimeline<String, ReferenceCountingSegment> timeline = dataSources.get(dataSourceName);
if (timeline == null) {
return new NoopQueryRunner<T>();
}
final Function<Query<T>, ServiceMetricEvent.Builder> builderFn = getBuilderFn(toolChest);
final AtomicLong cpuTimeAccumulator = new AtomicLong(0L);
FunctionalIterable<QueryRunner<T>> queryRunners = FunctionalIterable.create(specs).transformCat(new Function<SegmentDescriptor, Iterable<QueryRunner<T>>>() {
@Override
@SuppressWarnings("unchecked")
public Iterable<QueryRunner<T>> apply(SegmentDescriptor input) {
final PartitionHolder<ReferenceCountingSegment> entry = timeline.findEntry(input.getInterval(), input.getVersion());
if (entry == null) {
return Arrays.<QueryRunner<T>>asList(new ReportTimelineMissingSegmentQueryRunner<T>(input));
}
final PartitionChunk<ReferenceCountingSegment> chunk = entry.getChunk(input.getPartitionNumber());
if (chunk == null) {
return Arrays.<QueryRunner<T>>asList(new ReportTimelineMissingSegmentQueryRunner<T>(input));
}
final ReferenceCountingSegment adapter = chunk.getObject();
return Arrays.asList(buildAndDecorateQueryRunner(factory, toolChest, adapter, input, builderFn, cpuTimeAccumulator));
}
});
return CPUTimeMetricQueryRunner.safeBuild(new FinalizeResultsQueryRunner<>(toolChest.mergeResults(factory.mergeRunners(exec, queryRunners)), toolChest), builderFn, emitter, cpuTimeAccumulator, true);
}
use of io.druid.segment.ReferenceCountingSegment in project druid by druid-io.
the class ServerManager method loadSegment.
/**
* Load a single segment.
*
* @param segment segment to load
*
* @return true if the segment was newly loaded, false if it was already loaded
*
* @throws SegmentLoadingException if the segment cannot be loaded
*/
public boolean loadSegment(final DataSegment segment) throws SegmentLoadingException {
final Segment adapter;
try {
adapter = segmentLoader.getSegment(segment);
} catch (SegmentLoadingException e) {
try {
segmentLoader.cleanup(segment);
} catch (SegmentLoadingException e1) {
// ignore
}
throw e;
}
if (adapter == null) {
throw new SegmentLoadingException("Null adapter from loadSpec[%s]", segment.getLoadSpec());
}
synchronized (lock) {
String dataSource = segment.getDataSource();
VersionedIntervalTimeline<String, ReferenceCountingSegment> loadedIntervals = dataSources.get(dataSource);
if (loadedIntervals == null) {
loadedIntervals = new VersionedIntervalTimeline<>(Ordering.natural());
dataSources.put(dataSource, loadedIntervals);
}
PartitionHolder<ReferenceCountingSegment> entry = loadedIntervals.findEntry(segment.getInterval(), segment.getVersion());
if ((entry != null) && (entry.getChunk(segment.getShardSpec().getPartitionNum()) != null)) {
log.warn("Told to load a adapter for a segment[%s] that already exists", segment.getIdentifier());
return false;
}
loadedIntervals.add(segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(new ReferenceCountingSegment(adapter)));
synchronized (dataSourceSizes) {
dataSourceSizes.add(dataSource, segment.getSize());
}
synchronized (dataSourceCounts) {
dataSourceCounts.add(dataSource, 1L);
}
return true;
}
}
use of io.druid.segment.ReferenceCountingSegment in project druid by druid-io.
the class FireHydrant method swapSegment.
public void swapSegment(Segment newAdapter) {
synchronized (swapLock) {
if (adapter != null && newAdapter != null && !newAdapter.getIdentifier().equals(adapter.getIdentifier())) {
// Sanity check: identifier should not change
throw new ISE("WTF?! Cannot swap identifier[%s] -> [%s]!", adapter.getIdentifier(), newAdapter.getIdentifier());
}
if (this.adapter != null) {
try {
this.adapter.close();
} catch (IOException e) {
throw Throwables.propagate(e);
}
}
this.adapter = new ReferenceCountingSegment(newAdapter);
this.index = null;
}
}
use of io.druid.segment.ReferenceCountingSegment in project druid by druid-io.
the class ServerManagerTest method testReferenceCountingWhileQueryExecuting.
@Test
public void testReferenceCountingWhileQueryExecuting() throws Exception {
loadQueryable("test", "3", new Interval("2011-04-04/2011-04-05"));
Future future = assertQueryable(Granularities.DAY, "test", new Interval("2011-04-04/2011-04-06"), ImmutableList.<Pair<String, Interval>>of(new Pair<String, Interval>("3", new Interval("2011-04-04/2011-04-05"))));
queryNotifyLatch.await(1000, TimeUnit.MILLISECONDS);
Assert.assertEquals(1, factory.getSegmentReferences().size());
for (ReferenceCountingSegment referenceCountingSegment : factory.getSegmentReferences()) {
Assert.assertEquals(1, referenceCountingSegment.getNumReferences());
}
queryWaitYieldLatch.countDown();
Assert.assertEquals(1, factory.getAdapters().size());
for (SegmentForTesting segmentForTesting : factory.getAdapters()) {
Assert.assertFalse(segmentForTesting.isClosed());
}
dropQueryable("test", "3", new Interval("2011-04-04/2011-04-05"));
for (SegmentForTesting segmentForTesting : factory.getAdapters()) {
Assert.assertFalse(segmentForTesting.isClosed());
}
queryWaitLatch.countDown();
future.get();
for (SegmentForTesting segmentForTesting : factory.getAdapters()) {
Assert.assertTrue(segmentForTesting.isClosed());
}
}
use of io.druid.segment.ReferenceCountingSegment in project druid by druid-io.
the class ServerManager method getQueryRunnerForIntervals.
@Override
public <T> QueryRunner<T> getQueryRunnerForIntervals(Query<T> query, Iterable<Interval> intervals) {
final QueryRunnerFactory<T, Query<T>> factory = conglomerate.findFactory(query);
if (factory == null) {
throw new ISE("Unknown query type[%s].", query.getClass());
}
final QueryToolChest<T, Query<T>> toolChest = factory.getToolchest();
final Function<Query<T>, ServiceMetricEvent.Builder> builderFn = getBuilderFn(toolChest);
final AtomicLong cpuTimeAccumulator = new AtomicLong(0L);
DataSource dataSource = query.getDataSource();
if (!(dataSource instanceof TableDataSource)) {
throw new UnsupportedOperationException("data source type '" + dataSource.getClass().getName() + "' unsupported");
}
String dataSourceName = getDataSourceName(dataSource);
final VersionedIntervalTimeline<String, ReferenceCountingSegment> timeline = dataSources.get(dataSourceName);
if (timeline == null) {
return new NoopQueryRunner<T>();
}
FunctionalIterable<QueryRunner<T>> queryRunners = FunctionalIterable.create(intervals).transformCat(new Function<Interval, Iterable<TimelineObjectHolder<String, ReferenceCountingSegment>>>() {
@Override
public Iterable<TimelineObjectHolder<String, ReferenceCountingSegment>> apply(Interval input) {
return timeline.lookup(input);
}
}).transformCat(new Function<TimelineObjectHolder<String, ReferenceCountingSegment>, Iterable<QueryRunner<T>>>() {
@Override
public Iterable<QueryRunner<T>> apply(@Nullable final TimelineObjectHolder<String, ReferenceCountingSegment> holder) {
if (holder == null) {
return null;
}
return FunctionalIterable.create(holder.getObject()).transform(new Function<PartitionChunk<ReferenceCountingSegment>, QueryRunner<T>>() {
@Override
public QueryRunner<T> apply(PartitionChunk<ReferenceCountingSegment> input) {
return buildAndDecorateQueryRunner(factory, toolChest, input.getObject(), new SegmentDescriptor(holder.getInterval(), holder.getVersion(), input.getChunkNumber()), builderFn, cpuTimeAccumulator);
}
});
}
});
return CPUTimeMetricQueryRunner.safeBuild(new FinalizeResultsQueryRunner<T>(toolChest.mergeResults(factory.mergeRunners(exec, queryRunners)), toolChest), builderFn, emitter, cpuTimeAccumulator, true);
}
Aggregations