use of org.apache.druid.segment.SegmentMissingException in project druid by druid-io.
the class TimeseriesQueryEngine method process.
/**
* Run a single-segment, single-interval timeseries query on a particular adapter. The query must have been
* scoped down to a single interval before calling this method.
*/
public Sequence<Result<TimeseriesResultValue>> process(final TimeseriesQuery query, final StorageAdapter adapter) {
if (adapter == null) {
throw new SegmentMissingException("Null storage adapter found. Probably trying to issue a query against a segment being memory unmapped.");
}
final Filter filter = Filters.convertToCNFFromQueryContext(query, Filters.toFilter(query.getFilter()));
final Interval interval = Iterables.getOnlyElement(query.getIntervals());
final Granularity gran = query.getGranularity();
final boolean descending = query.isDescending();
final ColumnInspector inspector = query.getVirtualColumns().wrapInspector(adapter);
final boolean doVectorize = QueryContexts.getVectorize(query).shouldVectorize(adapter.canVectorize(filter, query.getVirtualColumns(), descending) && VirtualColumns.shouldVectorize(query, query.getVirtualColumns(), adapter) && query.getAggregatorSpecs().stream().allMatch(aggregatorFactory -> aggregatorFactory.canVectorize(inspector)));
final Sequence<Result<TimeseriesResultValue>> result;
if (doVectorize) {
result = processVectorized(query, adapter, filter, interval, gran, descending);
} else {
result = processNonVectorized(query, adapter, filter, interval, gran, descending);
}
final int limit = query.getLimit();
if (limit < Integer.MAX_VALUE) {
return result.limit(limit);
} else {
return result;
}
}
use of org.apache.druid.segment.SegmentMissingException in project druid by druid-io.
the class TopNQueryEngine method query.
/**
* Do the thing - process a {@link StorageAdapter} into a {@link Sequence} of {@link TopNResultValue}, with one of the
* fine {@link TopNAlgorithm} available chosen based on the type of column being aggregated. The algorithm provides a
* mapping function to process rows from the adapter {@link org.apache.druid.segment.Cursor} to apply
* {@link AggregatorFactory} and create or update {@link TopNResultValue}
*/
public Sequence<Result<TopNResultValue>> query(final TopNQuery query, final StorageAdapter adapter, @Nullable final TopNQueryMetrics queryMetrics) {
if (adapter == null) {
throw new SegmentMissingException("Null storage adapter found. Probably trying to issue a query against a segment being memory unmapped.");
}
final List<Interval> queryIntervals = query.getQuerySegmentSpec().getIntervals();
final Filter filter = Filters.convertToCNFFromQueryContext(query, Filters.toFilter(query.getDimensionsFilter()));
final Granularity granularity = query.getGranularity();
final TopNMapFn mapFn = getMapFn(query, adapter, queryMetrics);
Preconditions.checkArgument(queryIntervals.size() == 1, "Can only handle a single interval, got[%s]", queryIntervals);
return Sequences.filter(Sequences.map(adapter.makeCursors(filter, queryIntervals.get(0), query.getVirtualColumns(), granularity, query.isDescending(), queryMetrics), input -> {
if (queryMetrics != null) {
queryMetrics.cursor(input);
}
return mapFn.apply(input, queryMetrics);
}), Predicates.notNull());
}
use of org.apache.druid.segment.SegmentMissingException in project druid by druid-io.
the class SpecificSegmentQueryRunner method run.
@Override
public Sequence<T> run(final QueryPlus<T> input, final ResponseContext responseContext) {
final QueryPlus<T> queryPlus = input.withQuery(Queries.withSpecificSegments(input.getQuery(), Collections.singletonList(specificSpec.getDescriptor())));
final Query<T> query = queryPlus.getQuery();
final Thread currThread = Thread.currentThread();
final String currThreadName = currThread.getName();
final String newName = query.getType() + "_" + query.getDataSource() + "_" + query.getIntervals();
final Sequence<T> baseSequence = doNamed(currThread, currThreadName, newName, () -> base.run(queryPlus, responseContext));
Sequence<T> segmentMissingCatchingSequence = new Sequence<T>() {
@Override
public <OutType> OutType accumulate(final OutType initValue, final Accumulator<OutType, T> accumulator) {
try {
return baseSequence.accumulate(initValue, accumulator);
} catch (SegmentMissingException e) {
appendMissingSegment(responseContext);
return initValue;
}
}
@Override
public <OutType> Yielder<OutType> toYielder(final OutType initValue, final YieldingAccumulator<OutType, T> accumulator) {
try {
return makeYielder(baseSequence.toYielder(initValue, accumulator));
} catch (SegmentMissingException e) {
appendMissingSegment(responseContext);
return Yielders.done(initValue, null);
}
}
private <OutType> Yielder<OutType> makeYielder(final Yielder<OutType> yielder) {
return new Yielder<OutType>() {
@Override
public OutType get() {
return yielder.get();
}
@Override
public Yielder<OutType> next(final OutType initValue) {
try {
return yielder.next(initValue);
} catch (SegmentMissingException e) {
appendMissingSegment(responseContext);
return Yielders.done(initValue, null);
}
}
@Override
public boolean isDone() {
return yielder.isDone();
}
@Override
public void close() throws IOException {
yielder.close();
}
};
}
};
return Sequences.wrap(segmentMissingCatchingSequence, new SequenceWrapper() {
@Override
public <RetType> RetType wrap(Supplier<RetType> sequenceProcessing) {
return doNamed(currThread, currThreadName, newName, sequenceProcessing);
}
});
}
use of org.apache.druid.segment.SegmentMissingException in project druid by druid-io.
the class SpecificSegmentQueryRunnerTest method testRetry.
@Test
public void testRetry() throws Exception {
final ObjectMapper mapper = new DefaultObjectMapper();
SegmentDescriptor descriptor = new SegmentDescriptor(Intervals.of("2012-01-01T00:00:00Z/P1D"), "version", 0);
final SpecificSegmentQueryRunner queryRunner = new SpecificSegmentQueryRunner(new QueryRunner() {
@Override
public Sequence run(QueryPlus queryPlus, ResponseContext responseContext) {
return new Sequence() {
@Override
public Object accumulate(Object initValue, Accumulator accumulator) {
throw new SegmentMissingException("FAILSAUCE");
}
@Override
public Yielder<Object> toYielder(Object initValue, YieldingAccumulator accumulator) {
throw new SegmentMissingException("FAILSAUCE");
}
};
}
}, new SpecificSegmentSpec(descriptor));
// from accumulate
ResponseContext responseContext = ResponseContext.createEmpty();
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("foo").granularity(Granularities.ALL).intervals(ImmutableList.of(Intervals.of("2012-01-01T00:00:00Z/P1D"))).aggregators(ImmutableList.of(new CountAggregatorFactory("rows"))).build();
Sequence results = queryRunner.run(QueryPlus.wrap(query), responseContext);
results.toList();
validate(mapper, descriptor, responseContext);
// from toYielder
responseContext = ResponseContext.createEmpty();
results = queryRunner.run(QueryPlus.wrap(query), responseContext);
results.toYielder(null, new YieldingAccumulator() {
final List lists = new ArrayList<>();
@Override
public Object accumulate(Object accumulated, Object in) {
lists.add(in);
return in;
}
});
validate(mapper, descriptor, responseContext);
}
use of org.apache.druid.segment.SegmentMissingException in project druid by druid-io.
the class SpecificSegmentQueryRunnerTest method testRetry2.
@SuppressWarnings("unchecked")
@Test
public void testRetry2() throws Exception {
final ObjectMapper mapper = new DefaultObjectMapper();
SegmentDescriptor descriptor = new SegmentDescriptor(Intervals.of("2012-01-01T00:00:00Z/P1D"), "version", 0);
TimeseriesResultBuilder builder = new TimeseriesResultBuilder(DateTimes.of("2012-01-01T00:00:00Z"));
CountAggregator rows = new CountAggregator();
rows.aggregate();
builder.addMetric("rows", rows.get());
final Result<TimeseriesResultValue> value = builder.build();
final SpecificSegmentQueryRunner queryRunner = new SpecificSegmentQueryRunner(new QueryRunner() {
@Override
public Sequence run(QueryPlus queryPlus, ResponseContext responseContext) {
return Sequences.withEffect(Sequences.simple(Collections.singletonList(value)), new Runnable() {
@Override
public void run() {
throw new SegmentMissingException("FAILSAUCE");
}
}, Execs.directExecutor());
}
}, new SpecificSegmentSpec(descriptor));
final ResponseContext responseContext = ResponseContext.createEmpty();
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("foo").granularity(Granularities.ALL).intervals(ImmutableList.of(Intervals.of("2012-01-01T00:00:00Z/P1D"))).aggregators(ImmutableList.of(new CountAggregatorFactory("rows"))).build();
Sequence results = queryRunner.run(QueryPlus.wrap(query), responseContext);
List<Result<TimeseriesResultValue>> res = results.toList();
Assert.assertEquals(1, res.size());
Result<TimeseriesResultValue> theVal = res.get(0);
Assert.assertTrue(1L == theVal.getValue().getLongMetric("rows"));
validate(mapper, descriptor, responseContext);
}
Aggregations