use of org.apache.druid.java.util.common.guava.Yielder in project druid by druid-io.
the class AggregationTestHelper method makeStringSerdeQueryRunner.
public QueryRunner<ResultRow> makeStringSerdeQueryRunner(final ObjectMapper mapper, final QueryToolChest toolChest, final QueryRunner<ResultRow> baseRunner) {
return new QueryRunner<ResultRow>() {
@Override
public Sequence<ResultRow> run(QueryPlus<ResultRow> queryPlus, ResponseContext map) {
try {
Sequence<ResultRow> resultSeq = baseRunner.run(queryPlus, ResponseContext.createEmpty());
final Yielder yielder = resultSeq.toYielder(null, new YieldingAccumulator() {
@Override
public Object accumulate(Object accumulated, Object in) {
yield();
return in;
}
});
String resultStr = mapper.writer().writeValueAsString(yielder);
List resultRows = Lists.transform(readQueryResultArrayFromString(resultStr), toolChest.makePreComputeManipulatorFn(queryPlus.getQuery(), MetricManipulatorFns.deserializing()));
return Sequences.simple(resultRows);
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
};
}
use of org.apache.druid.java.util.common.guava.Yielder in project druid by druid-io.
the class DruidSchema method refreshSegmentsForDataSource.
/**
* Attempt to refresh "segmentSignatures" for a set of segments for a particular dataSource. Returns the set of
* segments actually refreshed, which may be a subset of the asked-for set.
*/
private Set<SegmentId> refreshSegmentsForDataSource(final String dataSource, final Set<SegmentId> segments) throws IOException {
if (!segments.stream().allMatch(segmentId -> segmentId.getDataSource().equals(dataSource))) {
// Sanity check. We definitely expect this to pass.
throw new ISE("'segments' must all match 'dataSource'!");
}
log.debug("Refreshing metadata for dataSource[%s].", dataSource);
final long startTime = System.currentTimeMillis();
// Segment id string -> SegmentId object.
final Map<String, SegmentId> segmentIdMap = Maps.uniqueIndex(segments, SegmentId::toString);
final Set<SegmentId> retVal = new HashSet<>();
final Sequence<SegmentAnalysis> sequence = runSegmentMetadataQuery(Iterables.limit(segments, MAX_SEGMENTS_PER_QUERY));
Yielder<SegmentAnalysis> yielder = Yielders.each(sequence);
try {
while (!yielder.isDone()) {
final SegmentAnalysis analysis = yielder.get();
final SegmentId segmentId = segmentIdMap.get(analysis.getId());
if (segmentId == null) {
log.warn("Got analysis for segment[%s] we didn't ask for, ignoring.", analysis.getId());
} else {
final RowSignature rowSignature = analysisToRowSignature(analysis);
log.debug("Segment[%s] has signature[%s].", segmentId, rowSignature);
segmentMetadataInfo.compute(dataSource, (datasourceKey, dataSourceSegments) -> {
if (dataSourceSegments == null) {
// Datasource may have been removed or become unavailable while this refresh was ongoing.
log.warn("No segment map found with datasource[%s], skipping refresh of segment[%s]", datasourceKey, segmentId);
return null;
} else {
dataSourceSegments.compute(segmentId, (segmentIdKey, segmentMetadata) -> {
if (segmentMetadata == null) {
log.warn("No segment[%s] found, skipping refresh", segmentId);
return null;
} else {
final AvailableSegmentMetadata updatedSegmentMetadata = AvailableSegmentMetadata.from(segmentMetadata).withRowSignature(rowSignature).withNumRows(analysis.getNumRows()).build();
retVal.add(segmentId);
return updatedSegmentMetadata;
}
});
if (dataSourceSegments.isEmpty()) {
return null;
} else {
return dataSourceSegments;
}
}
});
}
yielder = yielder.next(null);
}
} finally {
yielder.close();
}
log.debug("Refreshed metadata for dataSource[%s] in %,d ms (%d segments queried, %d segments left).", dataSource, System.currentTimeMillis() - startTime, retVal.size(), segments.size() - retVal.size());
return retVal;
}
use of org.apache.druid.java.util.common.guava.Yielder in project druid by druid-io.
the class CombiningSequenceTest method testCombining.
private void testCombining(List<Pair<Integer, Integer>> pairs, List<Pair<Integer, Integer>> expected, int limit) throws Exception {
// Test that closing works too
final CountDownLatch closed = new CountDownLatch(1);
final Closeable closeable = closed::countDown;
Sequence<Pair<Integer, Integer>> seq = CombiningSequence.create(Sequences.simple(pairs).withBaggage(closeable), Ordering.natural().onResultOf(p -> p.lhs), (lhs, rhs) -> {
if (lhs == null) {
return rhs;
}
if (rhs == null) {
return lhs;
}
return Pair.of(lhs.lhs, lhs.rhs + rhs.rhs);
}).limit(limit);
List<Pair<Integer, Integer>> merged = seq.toList();
Assert.assertEquals(expected, merged);
Yielder<Pair<Integer, Integer>> yielder = seq.toYielder(null, new YieldingAccumulator<Pair<Integer, Integer>, Pair<Integer, Integer>>() {
int count = 0;
@Override
public Pair<Integer, Integer> accumulate(Pair<Integer, Integer> lhs, Pair<Integer, Integer> rhs) {
count++;
if (count % yieldEvery == 0) {
yield();
}
return rhs;
}
});
Iterator<Pair<Integer, Integer>> expectedVals = Iterators.filter(expected.iterator(), new Predicate<Pair<Integer, Integer>>() {
int count = 0;
@Override
public boolean apply(@Nullable Pair<Integer, Integer> input) {
count++;
if (count % yieldEvery == 0) {
return true;
}
return false;
}
});
if (expectedVals.hasNext()) {
while (!yielder.isDone()) {
final Pair<Integer, Integer> expectedVal = expectedVals.next();
final Pair<Integer, Integer> actual = yielder.get();
Assert.assertEquals(expectedVal, actual);
yielder = yielder.next(actual);
}
}
Assert.assertTrue(yielder.isDone());
Assert.assertFalse(expectedVals.hasNext());
yielder.close();
Assert.assertTrue("resource closed", closed.await(10000, TimeUnit.MILLISECONDS));
}
use of org.apache.druid.java.util.common.guava.Yielder in project druid by druid-io.
the class SpecificSegmentQueryRunner method run.
@Override
public Sequence<T> run(final QueryPlus<T> input, final ResponseContext responseContext) {
final QueryPlus<T> queryPlus = input.withQuery(Queries.withSpecificSegments(input.getQuery(), Collections.singletonList(specificSpec.getDescriptor())));
final Query<T> query = queryPlus.getQuery();
final Thread currThread = Thread.currentThread();
final String currThreadName = currThread.getName();
final String newName = query.getType() + "_" + query.getDataSource() + "_" + query.getIntervals();
final Sequence<T> baseSequence = doNamed(currThread, currThreadName, newName, () -> base.run(queryPlus, responseContext));
Sequence<T> segmentMissingCatchingSequence = new Sequence<T>() {
@Override
public <OutType> OutType accumulate(final OutType initValue, final Accumulator<OutType, T> accumulator) {
try {
return baseSequence.accumulate(initValue, accumulator);
} catch (SegmentMissingException e) {
appendMissingSegment(responseContext);
return initValue;
}
}
@Override
public <OutType> Yielder<OutType> toYielder(final OutType initValue, final YieldingAccumulator<OutType, T> accumulator) {
try {
return makeYielder(baseSequence.toYielder(initValue, accumulator));
} catch (SegmentMissingException e) {
appendMissingSegment(responseContext);
return Yielders.done(initValue, null);
}
}
private <OutType> Yielder<OutType> makeYielder(final Yielder<OutType> yielder) {
return new Yielder<OutType>() {
@Override
public OutType get() {
return yielder.get();
}
@Override
public Yielder<OutType> next(final OutType initValue) {
try {
return yielder.next(initValue);
} catch (SegmentMissingException e) {
appendMissingSegment(responseContext);
return Yielders.done(initValue, null);
}
}
@Override
public boolean isDone() {
return yielder.isDone();
}
@Override
public void close() throws IOException {
yielder.close();
}
};
}
};
return Sequences.wrap(segmentMissingCatchingSequence, new SequenceWrapper() {
@Override
public <RetType> RetType wrap(Supplier<RetType> sequenceProcessing) {
return doNamed(currThread, currThreadName, newName, sequenceProcessing);
}
});
}
use of org.apache.druid.java.util.common.guava.Yielder in project druid by druid-io.
the class SpecificSegmentQueryRunnerTest method testRetry.
@Test
public void testRetry() throws Exception {
final ObjectMapper mapper = new DefaultObjectMapper();
SegmentDescriptor descriptor = new SegmentDescriptor(Intervals.of("2012-01-01T00:00:00Z/P1D"), "version", 0);
final SpecificSegmentQueryRunner queryRunner = new SpecificSegmentQueryRunner(new QueryRunner() {
@Override
public Sequence run(QueryPlus queryPlus, ResponseContext responseContext) {
return new Sequence() {
@Override
public Object accumulate(Object initValue, Accumulator accumulator) {
throw new SegmentMissingException("FAILSAUCE");
}
@Override
public Yielder<Object> toYielder(Object initValue, YieldingAccumulator accumulator) {
throw new SegmentMissingException("FAILSAUCE");
}
};
}
}, new SpecificSegmentSpec(descriptor));
// from accumulate
ResponseContext responseContext = ResponseContext.createEmpty();
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("foo").granularity(Granularities.ALL).intervals(ImmutableList.of(Intervals.of("2012-01-01T00:00:00Z/P1D"))).aggregators(ImmutableList.of(new CountAggregatorFactory("rows"))).build();
Sequence results = queryRunner.run(QueryPlus.wrap(query), responseContext);
results.toList();
validate(mapper, descriptor, responseContext);
// from toYielder
responseContext = ResponseContext.createEmpty();
results = queryRunner.run(QueryPlus.wrap(query), responseContext);
results.toYielder(null, new YieldingAccumulator() {
final List lists = new ArrayList<>();
@Override
public Object accumulate(Object accumulated, Object in) {
lists.add(in);
return in;
}
});
validate(mapper, descriptor, responseContext);
}
Aggregations