use of io.druid.java.util.common.guava.YieldingAccumulator in project druid by druid-io.
the class RetryQueryRunner method run.
@Override
public Sequence<T> run(final Query<T> query, final Map<String, Object> context) {
final List<Sequence<T>> listOfSequences = Lists.newArrayList();
listOfSequences.add(baseRunner.run(query, context));
return new YieldingSequenceBase<T>() {
@Override
public <OutType> Yielder<OutType> toYielder(OutType initValue, YieldingAccumulator<OutType, T> accumulator) {
List<SegmentDescriptor> missingSegments = getMissingSegments(context);
if (!missingSegments.isEmpty()) {
for (int i = 0; i < config.getNumTries(); i++) {
log.info("[%,d] missing segments found. Retry attempt [%,d]", missingSegments.size(), i);
context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList());
final Query<T> retryQuery = query.withQuerySegmentSpec(new MultipleSpecificSegmentSpec(missingSegments));
Sequence<T> retrySequence = baseRunner.run(retryQuery, context);
listOfSequences.add(retrySequence);
missingSegments = getMissingSegments(context);
if (missingSegments.isEmpty()) {
break;
}
}
final List<SegmentDescriptor> finalMissingSegs = getMissingSegments(context);
if (!config.isReturnPartialResults() && !finalMissingSegs.isEmpty()) {
throw new SegmentMissingException("No results found for segments[%s]", finalMissingSegs);
}
return new MergeSequence<>(query.getResultOrdering(), Sequences.simple(listOfSequences)).toYielder(initValue, accumulator);
} else {
return Iterables.getOnlyElement(listOfSequences).toYielder(initValue, accumulator);
}
}
};
}
use of io.druid.java.util.common.guava.YieldingAccumulator in project druid by druid-io.
the class SpecificSegmentQueryRunnerTest method testRetry.
@Test
public void testRetry() throws Exception {
final ObjectMapper mapper = new DefaultObjectMapper();
SegmentDescriptor descriptor = new SegmentDescriptor(new Interval("2012-01-01T00:00:00Z/P1D"), "version", 0);
final SpecificSegmentQueryRunner queryRunner = new SpecificSegmentQueryRunner(new QueryRunner() {
@Override
public Sequence run(Query query, Map responseContext) {
return new Sequence() {
@Override
public Object accumulate(Object initValue, Accumulator accumulator) {
throw new SegmentMissingException("FAILSAUCE");
}
@Override
public Yielder<Object> toYielder(Object initValue, YieldingAccumulator accumulator) {
throw new SegmentMissingException("FAILSAUCE");
}
};
}
}, new SpecificSegmentSpec(descriptor));
// from accumulate
Map<String, Object> responseContext = Maps.newHashMap();
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("foo").granularity(Granularities.ALL).intervals(ImmutableList.of(new Interval("2012-01-01T00:00:00Z/P1D"))).aggregators(ImmutableList.<AggregatorFactory>of(new CountAggregatorFactory("rows"))).build();
Sequence results = queryRunner.run(query, responseContext);
Sequences.toList(results, Lists.newArrayList());
validate(mapper, descriptor, responseContext);
// from toYielder
responseContext = Maps.newHashMap();
results = queryRunner.run(query, responseContext);
results.toYielder(null, new YieldingAccumulator() {
final List lists = Lists.newArrayList();
@Override
public Object accumulate(Object accumulated, Object in) {
lists.add(in);
return in;
}
});
validate(mapper, descriptor, responseContext);
}
use of io.druid.java.util.common.guava.YieldingAccumulator in project druid by druid-io.
the class SpecificSegmentQueryRunner method run.
@Override
public Sequence<T> run(final Query<T> input, final Map<String, Object> responseContext) {
final Query<T> query = input.withQuerySegmentSpec(specificSpec);
final Thread currThread = Thread.currentThread();
final String currThreadName = currThread.getName();
final String newName = String.format("%s_%s_%s", query.getType(), query.getDataSource(), query.getIntervals());
final Sequence<T> baseSequence = doNamed(currThread, currThreadName, newName, new Supplier<Sequence<T>>() {
@Override
public Sequence<T> get() {
return base.run(query, responseContext);
}
});
Sequence<T> segmentMissingCatchingSequence = new Sequence<T>() {
@Override
public <OutType> OutType accumulate(final OutType initValue, final Accumulator<OutType, T> accumulator) {
try {
return baseSequence.accumulate(initValue, accumulator);
} catch (SegmentMissingException e) {
appendMissingSegment(responseContext);
return initValue;
}
}
@Override
public <OutType> Yielder<OutType> toYielder(final OutType initValue, final YieldingAccumulator<OutType, T> accumulator) {
try {
return makeYielder(baseSequence.toYielder(initValue, accumulator));
} catch (SegmentMissingException e) {
appendMissingSegment(responseContext);
return Yielders.done(initValue, null);
}
}
private <OutType> Yielder<OutType> makeYielder(final Yielder<OutType> yielder) {
return new Yielder<OutType>() {
@Override
public OutType get() {
return yielder.get();
}
@Override
public Yielder<OutType> next(final OutType initValue) {
try {
return yielder.next(initValue);
} catch (SegmentMissingException e) {
appendMissingSegment(responseContext);
return Yielders.done(initValue, null);
}
}
@Override
public boolean isDone() {
return yielder.isDone();
}
@Override
public void close() throws IOException {
yielder.close();
}
};
}
};
return Sequences.wrap(segmentMissingCatchingSequence, new SequenceWrapper() {
@Override
public <RetType> RetType wrap(Supplier<RetType> sequenceProcessing) {
return doNamed(currThread, currThreadName, newName, sequenceProcessing);
}
});
}
use of io.druid.java.util.common.guava.YieldingAccumulator in project druid by druid-io.
the class AggregationTestHelper method makeStringSerdeQueryRunner.
public QueryRunner<Row> makeStringSerdeQueryRunner(final ObjectMapper mapper, final QueryToolChest toolChest, final Query<Row> query, final QueryRunner<Row> baseRunner) {
return new QueryRunner<Row>() {
@Override
public Sequence<Row> run(Query<Row> query, Map<String, Object> map) {
try {
Sequence<Row> resultSeq = baseRunner.run(query, Maps.<String, Object>newHashMap());
final Yielder yielder = resultSeq.toYielder(null, new YieldingAccumulator() {
@Override
public Object accumulate(Object accumulated, Object in) {
yield();
return in;
}
});
String resultStr = mapper.writer().writeValueAsString(yielder);
TypeFactory typeFactory = mapper.getTypeFactory();
JavaType baseType = typeFactory.constructType(toolChest.getResultTypeReference());
List resultRows = Lists.transform(readQueryResultArrayFromString(resultStr), toolChest.makePreComputeManipulatorFn(query, MetricManipulatorFns.deserializing()));
return Sequences.simple(resultRows);
} catch (Exception ex) {
throw Throwables.propagate(ex);
}
}
};
}
Aggregations