use of org.apache.druid.java.util.common.guava.YieldingAccumulator in project druid by druid-io.
the class AggregationTestHelper method makeStringSerdeQueryRunner.
public QueryRunner<ResultRow> makeStringSerdeQueryRunner(final ObjectMapper mapper, final QueryToolChest toolChest, final QueryRunner<ResultRow> baseRunner) {
return new QueryRunner<ResultRow>() {
@Override
public Sequence<ResultRow> run(QueryPlus<ResultRow> queryPlus, ResponseContext map) {
try {
Sequence<ResultRow> resultSeq = baseRunner.run(queryPlus, ResponseContext.createEmpty());
final Yielder yielder = resultSeq.toYielder(null, new YieldingAccumulator() {
@Override
public Object accumulate(Object accumulated, Object in) {
yield();
return in;
}
});
String resultStr = mapper.writer().writeValueAsString(yielder);
List resultRows = Lists.transform(readQueryResultArrayFromString(resultStr), toolChest.makePreComputeManipulatorFn(queryPlus.getQuery(), MetricManipulatorFns.deserializing()));
return Sequences.simple(resultRows);
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
};
}
use of org.apache.druid.java.util.common.guava.YieldingAccumulator in project druid by druid-io.
the class CombiningSequenceTest method testCombining.
private void testCombining(List<Pair<Integer, Integer>> pairs, List<Pair<Integer, Integer>> expected, int limit) throws Exception {
// Test that closing works too
final CountDownLatch closed = new CountDownLatch(1);
final Closeable closeable = closed::countDown;
Sequence<Pair<Integer, Integer>> seq = CombiningSequence.create(Sequences.simple(pairs).withBaggage(closeable), Ordering.natural().onResultOf(p -> p.lhs), (lhs, rhs) -> {
if (lhs == null) {
return rhs;
}
if (rhs == null) {
return lhs;
}
return Pair.of(lhs.lhs, lhs.rhs + rhs.rhs);
}).limit(limit);
List<Pair<Integer, Integer>> merged = seq.toList();
Assert.assertEquals(expected, merged);
Yielder<Pair<Integer, Integer>> yielder = seq.toYielder(null, new YieldingAccumulator<Pair<Integer, Integer>, Pair<Integer, Integer>>() {
int count = 0;
@Override
public Pair<Integer, Integer> accumulate(Pair<Integer, Integer> lhs, Pair<Integer, Integer> rhs) {
count++;
if (count % yieldEvery == 0) {
yield();
}
return rhs;
}
});
Iterator<Pair<Integer, Integer>> expectedVals = Iterators.filter(expected.iterator(), new Predicate<Pair<Integer, Integer>>() {
int count = 0;
@Override
public boolean apply(@Nullable Pair<Integer, Integer> input) {
count++;
if (count % yieldEvery == 0) {
return true;
}
return false;
}
});
if (expectedVals.hasNext()) {
while (!yielder.isDone()) {
final Pair<Integer, Integer> expectedVal = expectedVals.next();
final Pair<Integer, Integer> actual = yielder.get();
Assert.assertEquals(expectedVal, actual);
yielder = yielder.next(actual);
}
}
Assert.assertTrue(yielder.isDone());
Assert.assertFalse(expectedVals.hasNext());
yielder.close();
Assert.assertTrue("resource closed", closed.await(10000, TimeUnit.MILLISECONDS));
}
use of org.apache.druid.java.util.common.guava.YieldingAccumulator in project druid by druid-io.
the class SpecificSegmentQueryRunner method run.
@Override
public Sequence<T> run(final QueryPlus<T> input, final ResponseContext responseContext) {
final QueryPlus<T> queryPlus = input.withQuery(Queries.withSpecificSegments(input.getQuery(), Collections.singletonList(specificSpec.getDescriptor())));
final Query<T> query = queryPlus.getQuery();
final Thread currThread = Thread.currentThread();
final String currThreadName = currThread.getName();
final String newName = query.getType() + "_" + query.getDataSource() + "_" + query.getIntervals();
final Sequence<T> baseSequence = doNamed(currThread, currThreadName, newName, () -> base.run(queryPlus, responseContext));
Sequence<T> segmentMissingCatchingSequence = new Sequence<T>() {
@Override
public <OutType> OutType accumulate(final OutType initValue, final Accumulator<OutType, T> accumulator) {
try {
return baseSequence.accumulate(initValue, accumulator);
} catch (SegmentMissingException e) {
appendMissingSegment(responseContext);
return initValue;
}
}
@Override
public <OutType> Yielder<OutType> toYielder(final OutType initValue, final YieldingAccumulator<OutType, T> accumulator) {
try {
return makeYielder(baseSequence.toYielder(initValue, accumulator));
} catch (SegmentMissingException e) {
appendMissingSegment(responseContext);
return Yielders.done(initValue, null);
}
}
private <OutType> Yielder<OutType> makeYielder(final Yielder<OutType> yielder) {
return new Yielder<OutType>() {
@Override
public OutType get() {
return yielder.get();
}
@Override
public Yielder<OutType> next(final OutType initValue) {
try {
return yielder.next(initValue);
} catch (SegmentMissingException e) {
appendMissingSegment(responseContext);
return Yielders.done(initValue, null);
}
}
@Override
public boolean isDone() {
return yielder.isDone();
}
@Override
public void close() throws IOException {
yielder.close();
}
};
}
};
return Sequences.wrap(segmentMissingCatchingSequence, new SequenceWrapper() {
@Override
public <RetType> RetType wrap(Supplier<RetType> sequenceProcessing) {
return doNamed(currThread, currThreadName, newName, sequenceProcessing);
}
});
}
use of org.apache.druid.java.util.common.guava.YieldingAccumulator in project druid by druid-io.
the class SpecificSegmentQueryRunnerTest method testRetry.
@Test
public void testRetry() throws Exception {
final ObjectMapper mapper = new DefaultObjectMapper();
SegmentDescriptor descriptor = new SegmentDescriptor(Intervals.of("2012-01-01T00:00:00Z/P1D"), "version", 0);
final SpecificSegmentQueryRunner queryRunner = new SpecificSegmentQueryRunner(new QueryRunner() {
@Override
public Sequence run(QueryPlus queryPlus, ResponseContext responseContext) {
return new Sequence() {
@Override
public Object accumulate(Object initValue, Accumulator accumulator) {
throw new SegmentMissingException("FAILSAUCE");
}
@Override
public Yielder<Object> toYielder(Object initValue, YieldingAccumulator accumulator) {
throw new SegmentMissingException("FAILSAUCE");
}
};
}
}, new SpecificSegmentSpec(descriptor));
// from accumulate
ResponseContext responseContext = ResponseContext.createEmpty();
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("foo").granularity(Granularities.ALL).intervals(ImmutableList.of(Intervals.of("2012-01-01T00:00:00Z/P1D"))).aggregators(ImmutableList.of(new CountAggregatorFactory("rows"))).build();
Sequence results = queryRunner.run(QueryPlus.wrap(query), responseContext);
results.toList();
validate(mapper, descriptor, responseContext);
// from toYielder
responseContext = ResponseContext.createEmpty();
results = queryRunner.run(QueryPlus.wrap(query), responseContext);
results.toYielder(null, new YieldingAccumulator() {
final List lists = new ArrayList<>();
@Override
public Object accumulate(Object accumulated, Object in) {
lists.add(in);
return in;
}
});
validate(mapper, descriptor, responseContext);
}
use of org.apache.druid.java.util.common.guava.YieldingAccumulator in project druid by druid-io.
the class ServerManagerForQueryErrorTest method buildQueryRunnerForSegment.
@Override
protected <T> QueryRunner<T> buildQueryRunnerForSegment(Query<T> query, SegmentDescriptor descriptor, QueryRunnerFactory<T, Query<T>> factory, QueryToolChest<T, Query<T>> toolChest, VersionedIntervalTimeline<String, ReferenceCountingSegment> timeline, Function<SegmentReference, SegmentReference> segmentMapFn, AtomicLong cpuTimeAccumulator, Optional<byte[]> cacheKeyPrefix) {
if (query.getContextBoolean(QUERY_RETRY_TEST_CONTEXT_KEY, false)) {
final MutableBoolean isIgnoreSegment = new MutableBoolean(false);
queryToIgnoredSegments.compute(query.getMostSpecificId(), (queryId, ignoredSegments) -> {
if (ignoredSegments == null) {
ignoredSegments = new HashSet<>();
}
if (ignoredSegments.size() < MAX_NUM_FALSE_MISSING_SEGMENTS_REPORTS) {
ignoredSegments.add(descriptor);
isIgnoreSegment.setTrue();
}
return ignoredSegments;
});
if (isIgnoreSegment.isTrue()) {
LOG.info("Pretending I don't have segment[%s]", descriptor);
return new ReportTimelineMissingSegmentQueryRunner<>(descriptor);
}
} else if (query.getContextBoolean(QUERY_TIMEOUT_TEST_CONTEXT_KEY, false)) {
return (queryPlus, responseContext) -> new Sequence<T>() {
@Override
public <OutType> OutType accumulate(OutType initValue, Accumulator<OutType, T> accumulator) {
throw new QueryTimeoutException("query timeout test");
}
@Override
public <OutType> Yielder<OutType> toYielder(OutType initValue, YieldingAccumulator<OutType, T> accumulator) {
throw new QueryTimeoutException("query timeout test");
}
};
} else if (query.getContextBoolean(QUERY_CAPACITY_EXCEEDED_TEST_CONTEXT_KEY, false)) {
return (queryPlus, responseContext) -> new Sequence<T>() {
@Override
public <OutType> OutType accumulate(OutType initValue, Accumulator<OutType, T> accumulator) {
throw QueryCapacityExceededException.withErrorMessageAndResolvedHost("query capacity exceeded test");
}
@Override
public <OutType> Yielder<OutType> toYielder(OutType initValue, YieldingAccumulator<OutType, T> accumulator) {
throw QueryCapacityExceededException.withErrorMessageAndResolvedHost("query capacity exceeded test");
}
};
} else if (query.getContextBoolean(QUERY_UNSUPPORTED_TEST_CONTEXT_KEY, false)) {
return (queryPlus, responseContext) -> new Sequence<T>() {
@Override
public <OutType> OutType accumulate(OutType initValue, Accumulator<OutType, T> accumulator) {
throw new QueryUnsupportedException("query unsupported test");
}
@Override
public <OutType> Yielder<OutType> toYielder(OutType initValue, YieldingAccumulator<OutType, T> accumulator) {
throw new QueryUnsupportedException("query unsupported test");
}
};
} else if (query.getContextBoolean(RESOURCE_LIMIT_EXCEEDED_TEST_CONTEXT_KEY, false)) {
return (queryPlus, responseContext) -> new Sequence<T>() {
@Override
public <OutType> OutType accumulate(OutType initValue, Accumulator<OutType, T> accumulator) {
throw new ResourceLimitExceededException("resource limit exceeded test");
}
@Override
public <OutType> Yielder<OutType> toYielder(OutType initValue, YieldingAccumulator<OutType, T> accumulator) {
throw new ResourceLimitExceededException("resource limit exceeded test");
}
};
} else if (query.getContextBoolean(QUERY_FAILURE_TEST_CONTEXT_KEY, false)) {
return (queryPlus, responseContext) -> new Sequence<T>() {
@Override
public <OutType> OutType accumulate(OutType initValue, Accumulator<OutType, T> accumulator) {
throw new RuntimeException("query failure test");
}
@Override
public <OutType> Yielder<OutType> toYielder(OutType initValue, YieldingAccumulator<OutType, T> accumulator) {
throw new RuntimeException("query failure test");
}
};
}
return super.buildQueryRunnerForSegment(query, descriptor, factory, toolChest, timeline, segmentMapFn, cpuTimeAccumulator, cacheKeyPrefix);
}
Aggregations