Search in sources :

Example 1 with Yielder

use of org.apache.druid.java.util.common.guava.Yielder in project druid by druid-io.

the class AggregationTestHelper method makeStringSerdeQueryRunner.

public QueryRunner<ResultRow> makeStringSerdeQueryRunner(final ObjectMapper mapper, final QueryToolChest toolChest, final QueryRunner<ResultRow> baseRunner) {
    return new QueryRunner<ResultRow>() {

        @Override
        public Sequence<ResultRow> run(QueryPlus<ResultRow> queryPlus, ResponseContext map) {
            try {
                Sequence<ResultRow> resultSeq = baseRunner.run(queryPlus, ResponseContext.createEmpty());
                final Yielder yielder = resultSeq.toYielder(null, new YieldingAccumulator() {

                    @Override
                    public Object accumulate(Object accumulated, Object in) {
                        yield();
                        return in;
                    }
                });
                String resultStr = mapper.writer().writeValueAsString(yielder);
                List resultRows = Lists.transform(readQueryResultArrayFromString(resultStr), toolChest.makePreComputeManipulatorFn(queryPlus.getQuery(), MetricManipulatorFns.deserializing()));
                return Sequences.simple(resultRows);
            } catch (Exception ex) {
                throw new RuntimeException(ex);
            }
        }
    };
}
Also used : ResultRow(org.apache.druid.query.groupby.ResultRow) Yielder(org.apache.druid.java.util.common.guava.Yielder) YieldingAccumulator(org.apache.druid.java.util.common.guava.YieldingAccumulator) QueryRunner(org.apache.druid.query.QueryRunner) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) IOException(java.io.IOException) ResponseContext(org.apache.druid.query.context.ResponseContext) List(java.util.List) ArrayList(java.util.ArrayList) QueryPlus(org.apache.druid.query.QueryPlus)

Example 2 with Yielder

use of org.apache.druid.java.util.common.guava.Yielder in project druid by druid-io.

the class DruidSchema method refreshSegmentsForDataSource.

/**
 * Attempt to refresh "segmentSignatures" for a set of segments for a particular dataSource. Returns the set of
 * segments actually refreshed, which may be a subset of the asked-for set.
 */
private Set<SegmentId> refreshSegmentsForDataSource(final String dataSource, final Set<SegmentId> segments) throws IOException {
    if (!segments.stream().allMatch(segmentId -> segmentId.getDataSource().equals(dataSource))) {
        // Sanity check. We definitely expect this to pass.
        throw new ISE("'segments' must all match 'dataSource'!");
    }
    log.debug("Refreshing metadata for dataSource[%s].", dataSource);
    final long startTime = System.currentTimeMillis();
    // Segment id string -> SegmentId object.
    final Map<String, SegmentId> segmentIdMap = Maps.uniqueIndex(segments, SegmentId::toString);
    final Set<SegmentId> retVal = new HashSet<>();
    final Sequence<SegmentAnalysis> sequence = runSegmentMetadataQuery(Iterables.limit(segments, MAX_SEGMENTS_PER_QUERY));
    Yielder<SegmentAnalysis> yielder = Yielders.each(sequence);
    try {
        while (!yielder.isDone()) {
            final SegmentAnalysis analysis = yielder.get();
            final SegmentId segmentId = segmentIdMap.get(analysis.getId());
            if (segmentId == null) {
                log.warn("Got analysis for segment[%s] we didn't ask for, ignoring.", analysis.getId());
            } else {
                final RowSignature rowSignature = analysisToRowSignature(analysis);
                log.debug("Segment[%s] has signature[%s].", segmentId, rowSignature);
                segmentMetadataInfo.compute(dataSource, (datasourceKey, dataSourceSegments) -> {
                    if (dataSourceSegments == null) {
                        // Datasource may have been removed or become unavailable while this refresh was ongoing.
                        log.warn("No segment map found with datasource[%s], skipping refresh of segment[%s]", datasourceKey, segmentId);
                        return null;
                    } else {
                        dataSourceSegments.compute(segmentId, (segmentIdKey, segmentMetadata) -> {
                            if (segmentMetadata == null) {
                                log.warn("No segment[%s] found, skipping refresh", segmentId);
                                return null;
                            } else {
                                final AvailableSegmentMetadata updatedSegmentMetadata = AvailableSegmentMetadata.from(segmentMetadata).withRowSignature(rowSignature).withNumRows(analysis.getNumRows()).build();
                                retVal.add(segmentId);
                                return updatedSegmentMetadata;
                            }
                        });
                        if (dataSourceSegments.isEmpty()) {
                            return null;
                        } else {
                            return dataSourceSegments;
                        }
                    }
                });
            }
            yielder = yielder.next(null);
        }
    } finally {
        yielder.close();
    }
    log.debug("Refreshed metadata for dataSource[%s] in %,d ms (%d segments queried, %d segments left).", dataSource, System.currentTimeMillis() - startTime, retVal.size(), segments.size() - retVal.size());
    return retVal;
}
Also used : SegmentManager(org.apache.druid.server.SegmentManager) Inject(com.google.inject.Inject) LifecycleStart(org.apache.druid.java.util.common.lifecycle.LifecycleStart) AllColumnIncluderator(org.apache.druid.query.metadata.metadata.AllColumnIncluderator) FluentIterable(com.google.common.collect.FluentIterable) Map(java.util.Map) ServerType(org.apache.druid.server.coordination.ServerType) ManageLifecycle(org.apache.druid.guice.ManageLifecycle) TimelineServerView(org.apache.druid.client.TimelineServerView) EnumSet(java.util.EnumSet) DateTimes(org.apache.druid.java.util.common.DateTimes) Sequence(org.apache.druid.java.util.common.guava.Sequence) ImmutableSet(com.google.common.collect.ImmutableSet) Execs(org.apache.druid.java.util.common.concurrent.Execs) ImmutableMap(com.google.common.collect.ImmutableMap) GuardedBy(com.google.errorprone.annotations.concurrent.GuardedBy) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Table(org.apache.calcite.schema.Table) Set(java.util.Set) ISE(org.apache.druid.java.util.common.ISE) Interner(com.google.common.collect.Interner) PlannerConfig(org.apache.druid.sql.calcite.planner.PlannerConfig) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) CountDownLatch(java.util.concurrent.CountDownLatch) SegmentMetadataQuery(org.apache.druid.query.metadata.metadata.SegmentMetadataQuery) BrokerInternalQueryConfig(org.apache.druid.client.BrokerInternalQueryConfig) DataSegment(org.apache.druid.timeline.DataSegment) Optional(java.util.Optional) SegmentId(org.apache.druid.timeline.SegmentId) QueryLifecycleFactory(org.apache.druid.server.QueryLifecycleFactory) Iterables(com.google.common.collect.Iterables) GlobalTableDataSource(org.apache.druid.query.GlobalTableDataSource) JoinableFactory(org.apache.druid.segment.join.JoinableFactory) Function(java.util.function.Function) TreeSet(java.util.TreeSet) ConcurrentMap(java.util.concurrent.ConcurrentMap) ServerView(org.apache.druid.client.ServerView) Yielders(org.apache.druid.java.util.common.guava.Yielders) MultipleSpecificSegmentSpec(org.apache.druid.query.spec.MultipleSpecificSegmentSpec) HashSet(java.util.HashSet) LifecycleStop(org.apache.druid.java.util.common.lifecycle.LifecycleStop) DruidServerMetadata(org.apache.druid.server.coordination.DruidServerMetadata) SegmentAnalysis(org.apache.druid.query.metadata.metadata.SegmentAnalysis) Predicates(com.google.common.base.Predicates) AbstractSchema(org.apache.calcite.schema.impl.AbstractSchema) StreamSupport(java.util.stream.StreamSupport) Yielder(org.apache.druid.java.util.common.guava.Yielder) ExecutorService(java.util.concurrent.ExecutorService) Access(org.apache.druid.server.security.Access) EmittingLogger(org.apache.druid.java.util.emitter.EmittingLogger) Interners(com.google.common.collect.Interners) IOException(java.io.IOException) Maps(com.google.common.collect.Maps) TableDataSource(org.apache.druid.query.TableDataSource) ColumnAnalysis(org.apache.druid.query.metadata.metadata.ColumnAnalysis) ConcurrentSkipListMap(java.util.concurrent.ConcurrentSkipListMap) TreeMap(java.util.TreeMap) DruidTable(org.apache.druid.sql.calcite.table.DruidTable) RowSignature(org.apache.druid.segment.column.RowSignature) ColumnType(org.apache.druid.segment.column.ColumnType) Preconditions(com.google.common.base.Preconditions) VisibleForTesting(com.google.common.annotations.VisibleForTesting) Escalator(org.apache.druid.server.security.Escalator) Comparator(java.util.Comparator) SegmentId(org.apache.druid.timeline.SegmentId) ISE(org.apache.druid.java.util.common.ISE) SegmentAnalysis(org.apache.druid.query.metadata.metadata.SegmentAnalysis) RowSignature(org.apache.druid.segment.column.RowSignature) HashSet(java.util.HashSet)

Example 3 with Yielder

use of org.apache.druid.java.util.common.guava.Yielder in project druid by druid-io.

the class CombiningSequenceTest method testCombining.

private void testCombining(List<Pair<Integer, Integer>> pairs, List<Pair<Integer, Integer>> expected, int limit) throws Exception {
    // Test that closing works too
    final CountDownLatch closed = new CountDownLatch(1);
    final Closeable closeable = closed::countDown;
    Sequence<Pair<Integer, Integer>> seq = CombiningSequence.create(Sequences.simple(pairs).withBaggage(closeable), Ordering.natural().onResultOf(p -> p.lhs), (lhs, rhs) -> {
        if (lhs == null) {
            return rhs;
        }
        if (rhs == null) {
            return lhs;
        }
        return Pair.of(lhs.lhs, lhs.rhs + rhs.rhs);
    }).limit(limit);
    List<Pair<Integer, Integer>> merged = seq.toList();
    Assert.assertEquals(expected, merged);
    Yielder<Pair<Integer, Integer>> yielder = seq.toYielder(null, new YieldingAccumulator<Pair<Integer, Integer>, Pair<Integer, Integer>>() {

        int count = 0;

        @Override
        public Pair<Integer, Integer> accumulate(Pair<Integer, Integer> lhs, Pair<Integer, Integer> rhs) {
            count++;
            if (count % yieldEvery == 0) {
                yield();
            }
            return rhs;
        }
    });
    Iterator<Pair<Integer, Integer>> expectedVals = Iterators.filter(expected.iterator(), new Predicate<Pair<Integer, Integer>>() {

        int count = 0;

        @Override
        public boolean apply(@Nullable Pair<Integer, Integer> input) {
            count++;
            if (count % yieldEvery == 0) {
                return true;
            }
            return false;
        }
    });
    if (expectedVals.hasNext()) {
        while (!yielder.isDone()) {
            final Pair<Integer, Integer> expectedVal = expectedVals.next();
            final Pair<Integer, Integer> actual = yielder.get();
            Assert.assertEquals(expectedVal, actual);
            yielder = yielder.next(actual);
        }
    }
    Assert.assertTrue(yielder.isDone());
    Assert.assertFalse(expectedVals.hasNext());
    yielder.close();
    Assert.assertTrue("resource closed", closed.await(10000, TimeUnit.MILLISECONDS));
}
Also used : CoreMatchers(org.hamcrest.CoreMatchers) Iterables(com.google.common.collect.Iterables) Arrays(java.util.Arrays) RunWith(org.junit.runner.RunWith) ThrowableMessageMatcher(org.junit.internal.matchers.ThrowableMessageMatcher) Iterators(com.google.common.collect.Iterators) Pair(org.apache.druid.java.util.common.Pair) Lists(com.google.common.collect.Lists) ImmutableList(com.google.common.collect.ImmutableList) ExplodingSequence(org.apache.druid.java.util.common.guava.ExplodingSequence) Yielder(org.apache.druid.java.util.common.guava.Yielder) Sequences(org.apache.druid.java.util.common.guava.Sequences) Parameterized(org.junit.runners.Parameterized) Nullable(javax.annotation.Nullable) Sequence(org.apache.druid.java.util.common.guava.Sequence) YieldingAccumulator(org.apache.druid.java.util.common.guava.YieldingAccumulator) Iterator(java.util.Iterator) Collection(java.util.Collection) Test(org.junit.Test) TimeUnit(java.util.concurrent.TimeUnit) CountDownLatch(java.util.concurrent.CountDownLatch) List(java.util.List) Ordering(com.google.common.collect.Ordering) Predicate(com.google.common.base.Predicate) Closeable(java.io.Closeable) Assert(org.junit.Assert) Comparator(java.util.Comparator) Collections(java.util.Collections) Closeable(java.io.Closeable) CountDownLatch(java.util.concurrent.CountDownLatch) Pair(org.apache.druid.java.util.common.Pair)

Example 4 with Yielder

use of org.apache.druid.java.util.common.guava.Yielder in project druid by druid-io.

the class SpecificSegmentQueryRunner method run.

@Override
public Sequence<T> run(final QueryPlus<T> input, final ResponseContext responseContext) {
    final QueryPlus<T> queryPlus = input.withQuery(Queries.withSpecificSegments(input.getQuery(), Collections.singletonList(specificSpec.getDescriptor())));
    final Query<T> query = queryPlus.getQuery();
    final Thread currThread = Thread.currentThread();
    final String currThreadName = currThread.getName();
    final String newName = query.getType() + "_" + query.getDataSource() + "_" + query.getIntervals();
    final Sequence<T> baseSequence = doNamed(currThread, currThreadName, newName, () -> base.run(queryPlus, responseContext));
    Sequence<T> segmentMissingCatchingSequence = new Sequence<T>() {

        @Override
        public <OutType> OutType accumulate(final OutType initValue, final Accumulator<OutType, T> accumulator) {
            try {
                return baseSequence.accumulate(initValue, accumulator);
            } catch (SegmentMissingException e) {
                appendMissingSegment(responseContext);
                return initValue;
            }
        }

        @Override
        public <OutType> Yielder<OutType> toYielder(final OutType initValue, final YieldingAccumulator<OutType, T> accumulator) {
            try {
                return makeYielder(baseSequence.toYielder(initValue, accumulator));
            } catch (SegmentMissingException e) {
                appendMissingSegment(responseContext);
                return Yielders.done(initValue, null);
            }
        }

        private <OutType> Yielder<OutType> makeYielder(final Yielder<OutType> yielder) {
            return new Yielder<OutType>() {

                @Override
                public OutType get() {
                    return yielder.get();
                }

                @Override
                public Yielder<OutType> next(final OutType initValue) {
                    try {
                        return yielder.next(initValue);
                    } catch (SegmentMissingException e) {
                        appendMissingSegment(responseContext);
                        return Yielders.done(initValue, null);
                    }
                }

                @Override
                public boolean isDone() {
                    return yielder.isDone();
                }

                @Override
                public void close() throws IOException {
                    yielder.close();
                }
            };
        }
    };
    return Sequences.wrap(segmentMissingCatchingSequence, new SequenceWrapper() {

        @Override
        public <RetType> RetType wrap(Supplier<RetType> sequenceProcessing) {
            return doNamed(currThread, currThreadName, newName, sequenceProcessing);
        }
    });
}
Also used : YieldingAccumulator(org.apache.druid.java.util.common.guava.YieldingAccumulator) Accumulator(org.apache.druid.java.util.common.guava.Accumulator) SequenceWrapper(org.apache.druid.java.util.common.guava.SequenceWrapper) Yielder(org.apache.druid.java.util.common.guava.Yielder) SegmentMissingException(org.apache.druid.segment.SegmentMissingException) Sequence(org.apache.druid.java.util.common.guava.Sequence) YieldingAccumulator(org.apache.druid.java.util.common.guava.YieldingAccumulator)

Example 5 with Yielder

use of org.apache.druid.java.util.common.guava.Yielder in project druid by druid-io.

the class SpecificSegmentQueryRunnerTest method testRetry.

@Test
public void testRetry() throws Exception {
    final ObjectMapper mapper = new DefaultObjectMapper();
    SegmentDescriptor descriptor = new SegmentDescriptor(Intervals.of("2012-01-01T00:00:00Z/P1D"), "version", 0);
    final SpecificSegmentQueryRunner queryRunner = new SpecificSegmentQueryRunner(new QueryRunner() {

        @Override
        public Sequence run(QueryPlus queryPlus, ResponseContext responseContext) {
            return new Sequence() {

                @Override
                public Object accumulate(Object initValue, Accumulator accumulator) {
                    throw new SegmentMissingException("FAILSAUCE");
                }

                @Override
                public Yielder<Object> toYielder(Object initValue, YieldingAccumulator accumulator) {
                    throw new SegmentMissingException("FAILSAUCE");
                }
            };
        }
    }, new SpecificSegmentSpec(descriptor));
    // from accumulate
    ResponseContext responseContext = ResponseContext.createEmpty();
    TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("foo").granularity(Granularities.ALL).intervals(ImmutableList.of(Intervals.of("2012-01-01T00:00:00Z/P1D"))).aggregators(ImmutableList.of(new CountAggregatorFactory("rows"))).build();
    Sequence results = queryRunner.run(QueryPlus.wrap(query), responseContext);
    results.toList();
    validate(mapper, descriptor, responseContext);
    // from toYielder
    responseContext = ResponseContext.createEmpty();
    results = queryRunner.run(QueryPlus.wrap(query), responseContext);
    results.toYielder(null, new YieldingAccumulator() {

        final List lists = new ArrayList<>();

        @Override
        public Object accumulate(Object accumulated, Object in) {
            lists.add(in);
            return in;
        }
    });
    validate(mapper, descriptor, responseContext);
}
Also used : YieldingAccumulator(org.apache.druid.java.util.common.guava.YieldingAccumulator) Accumulator(org.apache.druid.java.util.common.guava.Accumulator) Yielder(org.apache.druid.java.util.common.guava.Yielder) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) ArrayList(java.util.ArrayList) SegmentMissingException(org.apache.druid.segment.SegmentMissingException) Sequence(org.apache.druid.java.util.common.guava.Sequence) YieldingAccumulator(org.apache.druid.java.util.common.guava.YieldingAccumulator) QueryRunner(org.apache.druid.query.QueryRunner) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) ResponseContext(org.apache.druid.query.context.ResponseContext) ArrayList(java.util.ArrayList) ImmutableList(com.google.common.collect.ImmutableList) List(java.util.List) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) QueryPlus(org.apache.druid.query.QueryPlus) Test(org.junit.Test)

Aggregations

Yielder (org.apache.druid.java.util.common.guava.Yielder)6 Sequence (org.apache.druid.java.util.common.guava.Sequence)5 YieldingAccumulator (org.apache.druid.java.util.common.guava.YieldingAccumulator)5 List (java.util.List)3 Accumulator (org.apache.druid.java.util.common.guava.Accumulator)3 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)2 ImmutableList (com.google.common.collect.ImmutableList)2 Iterables (com.google.common.collect.Iterables)2 Inject (com.google.inject.Inject)2 IOException (java.io.IOException)2 ArrayList (java.util.ArrayList)2 Comparator (java.util.Comparator)2 HashSet (java.util.HashSet)2 Optional (java.util.Optional)2 Set (java.util.Set)2 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)2 CountDownLatch (java.util.concurrent.CountDownLatch)2 QueryRunner (org.apache.druid.query.QueryRunner)2 SegmentDescriptor (org.apache.druid.query.SegmentDescriptor)2 SegmentMissingException (org.apache.druid.segment.SegmentMissingException)2