Search in sources :

Example 1 with Yielder

use of io.druid.java.util.common.guava.Yielder in project druid by druid-io.

the class SpecificSegmentQueryRunnerTest method testRetry.

@Test
public void testRetry() throws Exception {
    final ObjectMapper mapper = new DefaultObjectMapper();
    SegmentDescriptor descriptor = new SegmentDescriptor(new Interval("2012-01-01T00:00:00Z/P1D"), "version", 0);
    final SpecificSegmentQueryRunner queryRunner = new SpecificSegmentQueryRunner(new QueryRunner() {

        @Override
        public Sequence run(Query query, Map responseContext) {
            return new Sequence() {

                @Override
                public Object accumulate(Object initValue, Accumulator accumulator) {
                    throw new SegmentMissingException("FAILSAUCE");
                }

                @Override
                public Yielder<Object> toYielder(Object initValue, YieldingAccumulator accumulator) {
                    throw new SegmentMissingException("FAILSAUCE");
                }
            };
        }
    }, new SpecificSegmentSpec(descriptor));
    // from accumulate
    Map<String, Object> responseContext = Maps.newHashMap();
    TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("foo").granularity(Granularities.ALL).intervals(ImmutableList.of(new Interval("2012-01-01T00:00:00Z/P1D"))).aggregators(ImmutableList.<AggregatorFactory>of(new CountAggregatorFactory("rows"))).build();
    Sequence results = queryRunner.run(query, responseContext);
    Sequences.toList(results, Lists.newArrayList());
    validate(mapper, descriptor, responseContext);
    // from toYielder
    responseContext = Maps.newHashMap();
    results = queryRunner.run(query, responseContext);
    results.toYielder(null, new YieldingAccumulator() {

        final List lists = Lists.newArrayList();

        @Override
        public Object accumulate(Object accumulated, Object in) {
            lists.add(in);
            return in;
        }
    });
    validate(mapper, descriptor, responseContext);
}
Also used : Accumulator(io.druid.java.util.common.guava.Accumulator) YieldingAccumulator(io.druid.java.util.common.guava.YieldingAccumulator) Yielder(io.druid.java.util.common.guava.Yielder) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) Query(io.druid.query.Query) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) SegmentMissingException(io.druid.segment.SegmentMissingException) Sequence(io.druid.java.util.common.guava.Sequence) YieldingAccumulator(io.druid.java.util.common.guava.YieldingAccumulator) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) QueryRunner(io.druid.query.QueryRunner) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) SegmentDescriptor(io.druid.query.SegmentDescriptor) ImmutableList(com.google.common.collect.ImmutableList) List(java.util.List) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) Map(java.util.Map) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 2 with Yielder

use of io.druid.java.util.common.guava.Yielder in project druid by druid-io.

the class SpecificSegmentQueryRunner method run.

@Override
public Sequence<T> run(final Query<T> input, final Map<String, Object> responseContext) {
    final Query<T> query = input.withQuerySegmentSpec(specificSpec);
    final Thread currThread = Thread.currentThread();
    final String currThreadName = currThread.getName();
    final String newName = String.format("%s_%s_%s", query.getType(), query.getDataSource(), query.getIntervals());
    final Sequence<T> baseSequence = doNamed(currThread, currThreadName, newName, new Supplier<Sequence<T>>() {

        @Override
        public Sequence<T> get() {
            return base.run(query, responseContext);
        }
    });
    Sequence<T> segmentMissingCatchingSequence = new Sequence<T>() {

        @Override
        public <OutType> OutType accumulate(final OutType initValue, final Accumulator<OutType, T> accumulator) {
            try {
                return baseSequence.accumulate(initValue, accumulator);
            } catch (SegmentMissingException e) {
                appendMissingSegment(responseContext);
                return initValue;
            }
        }

        @Override
        public <OutType> Yielder<OutType> toYielder(final OutType initValue, final YieldingAccumulator<OutType, T> accumulator) {
            try {
                return makeYielder(baseSequence.toYielder(initValue, accumulator));
            } catch (SegmentMissingException e) {
                appendMissingSegment(responseContext);
                return Yielders.done(initValue, null);
            }
        }

        private <OutType> Yielder<OutType> makeYielder(final Yielder<OutType> yielder) {
            return new Yielder<OutType>() {

                @Override
                public OutType get() {
                    return yielder.get();
                }

                @Override
                public Yielder<OutType> next(final OutType initValue) {
                    try {
                        return yielder.next(initValue);
                    } catch (SegmentMissingException e) {
                        appendMissingSegment(responseContext);
                        return Yielders.done(initValue, null);
                    }
                }

                @Override
                public boolean isDone() {
                    return yielder.isDone();
                }

                @Override
                public void close() throws IOException {
                    yielder.close();
                }
            };
        }
    };
    return Sequences.wrap(segmentMissingCatchingSequence, new SequenceWrapper() {

        @Override
        public <RetType> RetType wrap(Supplier<RetType> sequenceProcessing) {
            return doNamed(currThread, currThreadName, newName, sequenceProcessing);
        }
    });
}
Also used : YieldingAccumulator(io.druid.java.util.common.guava.YieldingAccumulator) Accumulator(io.druid.java.util.common.guava.Accumulator) SequenceWrapper(io.druid.java.util.common.guava.SequenceWrapper) Yielder(io.druid.java.util.common.guava.Yielder) SegmentMissingException(io.druid.segment.SegmentMissingException) Sequence(io.druid.java.util.common.guava.Sequence) YieldingAccumulator(io.druid.java.util.common.guava.YieldingAccumulator)

Example 3 with Yielder

use of io.druid.java.util.common.guava.Yielder in project druid by druid-io.

the class AggregationTestHelper method makeStringSerdeQueryRunner.

public QueryRunner<Row> makeStringSerdeQueryRunner(final ObjectMapper mapper, final QueryToolChest toolChest, final Query<Row> query, final QueryRunner<Row> baseRunner) {
    return new QueryRunner<Row>() {

        @Override
        public Sequence<Row> run(Query<Row> query, Map<String, Object> map) {
            try {
                Sequence<Row> resultSeq = baseRunner.run(query, Maps.<String, Object>newHashMap());
                final Yielder yielder = resultSeq.toYielder(null, new YieldingAccumulator() {

                    @Override
                    public Object accumulate(Object accumulated, Object in) {
                        yield();
                        return in;
                    }
                });
                String resultStr = mapper.writer().writeValueAsString(yielder);
                TypeFactory typeFactory = mapper.getTypeFactory();
                JavaType baseType = typeFactory.constructType(toolChest.getResultTypeReference());
                List resultRows = Lists.transform(readQueryResultArrayFromString(resultStr), toolChest.makePreComputeManipulatorFn(query, MetricManipulatorFns.deserializing()));
                return Sequences.simple(resultRows);
            } catch (Exception ex) {
                throw Throwables.propagate(ex);
            }
        }
    };
}
Also used : Yielder(io.druid.java.util.common.guava.Yielder) Query(io.druid.query.Query) YieldingAccumulator(io.druid.java.util.common.guava.YieldingAccumulator) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) QueryRunner(io.druid.query.QueryRunner) IOException(java.io.IOException) JavaType(com.fasterxml.jackson.databind.JavaType) List(java.util.List) ArrayList(java.util.ArrayList) Row(io.druid.data.input.Row) TypeFactory(com.fasterxml.jackson.databind.type.TypeFactory) Map(java.util.Map)

Example 4 with Yielder

use of io.druid.java.util.common.guava.Yielder in project druid by druid-io.

the class QueryResource method doPost.

@POST
@Produces({ MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE })
@Consumes({ MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE, APPLICATION_SMILE })
public Response doPost(InputStream in, @QueryParam("pretty") String pretty, // used to get request content-type, remote address and AuthorizationInfo
@Context final HttpServletRequest req) throws IOException {
    final long start = System.currentTimeMillis();
    Query query = null;
    QueryToolChest toolChest = null;
    String queryId = null;
    final ResponseContext context = createContext(req.getContentType(), pretty != null);
    final String currThreadName = Thread.currentThread().getName();
    try {
        query = context.getObjectMapper().readValue(in, Query.class);
        queryId = query.getId();
        if (queryId == null) {
            queryId = UUID.randomUUID().toString();
            query = query.withId(queryId);
        }
        if (query.getContextValue(QueryContextKeys.TIMEOUT) == null) {
            query = query.withOverriddenContext(ImmutableMap.of(QueryContextKeys.TIMEOUT, config.getMaxIdleTime().toStandardDuration().getMillis()));
        }
        toolChest = warehouse.getToolChest(query);
        Thread.currentThread().setName(String.format("%s[%s_%s_%s]", currThreadName, query.getType(), query.getDataSource().getNames(), queryId));
        if (log.isDebugEnabled()) {
            log.debug("Got query [%s]", query);
        }
        if (authConfig.isEnabled()) {
            // This is an experimental feature, see - https://github.com/druid-io/druid/pull/2424
            AuthorizationInfo authorizationInfo = (AuthorizationInfo) req.getAttribute(AuthConfig.DRUID_AUTH_TOKEN);
            if (authorizationInfo != null) {
                for (String dataSource : query.getDataSource().getNames()) {
                    Access authResult = authorizationInfo.isAuthorized(new Resource(dataSource, ResourceType.DATASOURCE), Action.READ);
                    if (!authResult.isAllowed()) {
                        return Response.status(Response.Status.FORBIDDEN).header("Access-Check-Result", authResult).build();
                    }
                }
            } else {
                throw new ISE("WTF?! Security is enabled but no authorization info found in the request");
            }
        }
        String prevEtag = req.getHeader(HDR_IF_NONE_MATCH);
        if (prevEtag != null) {
            query = query.withOverriddenContext(ImmutableMap.of(HDR_IF_NONE_MATCH, prevEtag));
        }
        final Map<String, Object> responseContext = new MapMaker().makeMap();
        final Sequence res = query.run(texasRanger, responseContext);
        if (prevEtag != null && prevEtag.equals(responseContext.get(HDR_ETAG))) {
            return Response.notModified().build();
        }
        final Sequence results;
        if (res == null) {
            results = Sequences.empty();
        } else {
            results = res;
        }
        final Yielder yielder = Yielders.each(results);
        try {
            final Query theQuery = query;
            final QueryToolChest theToolChest = toolChest;
            final ObjectWriter jsonWriter = context.newOutputWriter();
            Response.ResponseBuilder builder = Response.ok(new StreamingOutput() {

                @Override
                public void write(OutputStream outputStream) throws IOException, WebApplicationException {
                    try {
                        // json serializer will always close the yielder
                        CountingOutputStream os = new CountingOutputStream(outputStream);
                        jsonWriter.writeValue(os, yielder);
                        // Some types of OutputStream suppress flush errors in the .close() method.
                        os.flush();
                        os.close();
                        successfulQueryCount.incrementAndGet();
                        final long queryTime = System.currentTimeMillis() - start;
                        emitter.emit(DruidMetrics.makeQueryTimeMetric(theToolChest, jsonMapper, theQuery, req.getRemoteAddr()).setDimension("success", "true").build("query/time", queryTime));
                        emitter.emit(DruidMetrics.makeQueryTimeMetric(theToolChest, jsonMapper, theQuery, req.getRemoteAddr()).build("query/bytes", os.getCount()));
                        requestLogger.log(new RequestLogLine(new DateTime(start), req.getRemoteAddr(), theQuery, new QueryStats(ImmutableMap.<String, Object>of("query/time", queryTime, "query/bytes", os.getCount(), "success", true))));
                    } finally {
                        Thread.currentThread().setName(currThreadName);
                    }
                }
            }, context.getContentType()).header("X-Druid-Query-Id", queryId);
            if (responseContext.get(HDR_ETAG) != null) {
                builder.header(HDR_ETAG, responseContext.get(HDR_ETAG));
                responseContext.remove(HDR_ETAG);
            }
            //Limit the response-context header, see https://github.com/druid-io/druid/issues/2331
            //Note that Response.ResponseBuilder.header(String key,Object value).build() calls value.toString()
            //and encodes the string using ASCII, so 1 char is = 1 byte
            String responseCtxString = jsonMapper.writeValueAsString(responseContext);
            if (responseCtxString.length() > RESPONSE_CTX_HEADER_LEN_LIMIT) {
                log.warn("Response Context truncated for id [%s] . Full context is [%s].", queryId, responseCtxString);
                responseCtxString = responseCtxString.substring(0, RESPONSE_CTX_HEADER_LEN_LIMIT);
            }
            return builder.header("X-Druid-Response-Context", responseCtxString).build();
        } catch (Exception e) {
            // make sure to close yielder if anything happened before starting to serialize the response.
            yielder.close();
            throw Throwables.propagate(e);
        } finally {
        // do not close yielder here, since we do not want to close the yielder prior to
        // StreamingOutput having iterated over all the results
        }
    } catch (QueryInterruptedException e) {
        try {
            log.warn(e, "Exception while processing queryId [%s]", queryId);
            interruptedQueryCount.incrementAndGet();
            final long queryTime = System.currentTimeMillis() - start;
            emitter.emit(DruidMetrics.makeQueryTimeMetric(toolChest, jsonMapper, query, req.getRemoteAddr()).setDimension("success", "false").build("query/time", queryTime));
            requestLogger.log(new RequestLogLine(new DateTime(start), req.getRemoteAddr(), query, new QueryStats(ImmutableMap.<String, Object>of("query/time", queryTime, "success", false, "interrupted", true, "reason", e.toString()))));
        } catch (Exception e2) {
            log.error(e2, "Unable to log query [%s]!", query);
        }
        return context.gotError(e);
    } catch (Exception e) {
        // Input stream has already been consumed by the json object mapper if query == null
        final String queryString = query == null ? "unparsable query" : query.toString();
        log.warn(e, "Exception occurred on request [%s]", queryString);
        failedQueryCount.incrementAndGet();
        try {
            final long queryTime = System.currentTimeMillis() - start;
            emitter.emit(DruidMetrics.makeQueryTimeMetric(toolChest, jsonMapper, query, req.getRemoteAddr()).setDimension("success", "false").build("query/time", queryTime));
            requestLogger.log(new RequestLogLine(new DateTime(start), req.getRemoteAddr(), query, new QueryStats(ImmutableMap.<String, Object>of("query/time", queryTime, "success", false, "exception", e.toString()))));
        } catch (Exception e2) {
            log.error(e2, "Unable to log query [%s]!", queryString);
        }
        log.makeAlert(e, "Exception handling request").addData("exception", e.toString()).addData("query", queryString).addData("peer", req.getRemoteAddr()).emit();
        return context.gotError(e);
    } finally {
        Thread.currentThread().setName(currThreadName);
    }
}
Also used : Query(io.druid.query.Query) CountingOutputStream(com.google.common.io.CountingOutputStream) OutputStream(java.io.OutputStream) Access(io.druid.server.security.Access) StreamingOutput(javax.ws.rs.core.StreamingOutput) QueryToolChest(io.druid.query.QueryToolChest) DateTime(org.joda.time.DateTime) CountingOutputStream(com.google.common.io.CountingOutputStream) ISE(io.druid.java.util.common.ISE) QueryInterruptedException(io.druid.query.QueryInterruptedException) Yielder(io.druid.java.util.common.guava.Yielder) Resource(io.druid.server.security.Resource) MapMaker(com.google.common.collect.MapMaker) ObjectWriter(com.fasterxml.jackson.databind.ObjectWriter) Sequence(io.druid.java.util.common.guava.Sequence) AuthorizationInfo(io.druid.server.security.AuthorizationInfo) WebApplicationException(javax.ws.rs.WebApplicationException) QueryInterruptedException(io.druid.query.QueryInterruptedException) IOException(java.io.IOException) Response(javax.ws.rs.core.Response) POST(javax.ws.rs.POST) Produces(javax.ws.rs.Produces) Consumes(javax.ws.rs.Consumes)

Example 5 with Yielder

use of io.druid.java.util.common.guava.Yielder in project druid by druid-io.

the class OrderedMergeSequence method makeYielder.

private <OutType> Yielder<OutType> makeYielder(final PriorityQueue<Yielder<T>> pQueue, Yielder<Yielder<T>> oldDudeAtCrosswalk, OutType initVal, final YieldingAccumulator<OutType, T> accumulator) {
    OutType retVal = initVal;
    while (!accumulator.yielded() && (!pQueue.isEmpty() || !oldDudeAtCrosswalk.isDone())) {
        Yielder<T> yielder;
        if (oldDudeAtCrosswalk.isDone()) {
            yielder = pQueue.remove();
        } else if (pQueue.isEmpty()) {
            yielder = oldDudeAtCrosswalk.get();
            oldDudeAtCrosswalk = oldDudeAtCrosswalk.next(null);
        } else {
            Yielder<T> queueYielder = pQueue.peek();
            Yielder<T> iterYielder = oldDudeAtCrosswalk.get();
            if (ordering.compare(queueYielder.get(), iterYielder.get()) <= 0) {
                yielder = pQueue.remove();
            } else {
                yielder = oldDudeAtCrosswalk.get();
                oldDudeAtCrosswalk = oldDudeAtCrosswalk.next(null);
            }
        }
        retVal = accumulator.accumulate(retVal, yielder.get());
        yielder = yielder.next(null);
        if (yielder.isDone()) {
            try {
                yielder.close();
            } catch (IOException e) {
                throw Throwables.propagate(e);
            }
        } else {
            pQueue.add(yielder);
        }
    }
    if (!accumulator.yielded()) {
        return Yielders.done(retVal, oldDudeAtCrosswalk);
    }
    final OutType yieldVal = retVal;
    final Yielder<Yielder<T>> finalOldDudeAtCrosswalk = oldDudeAtCrosswalk;
    return new Yielder<OutType>() {

        @Override
        public OutType get() {
            return yieldVal;
        }

        @Override
        public Yielder<OutType> next(OutType initValue) {
            accumulator.reset();
            return makeYielder(pQueue, finalOldDudeAtCrosswalk, initValue, accumulator);
        }

        @Override
        public boolean isDone() {
            return false;
        }

        @Override
        public void close() throws IOException {
            Closer closer = Closer.create();
            while (!pQueue.isEmpty()) {
                closer.register(pQueue.remove());
            }
            closer.close();
        }
    };
}
Also used : Closer(com.google.common.io.Closer) Yielder(io.druid.java.util.common.guava.Yielder) IOException(java.io.IOException)

Aggregations

Yielder (io.druid.java.util.common.guava.Yielder)5 Sequence (io.druid.java.util.common.guava.Sequence)3 YieldingAccumulator (io.druid.java.util.common.guava.YieldingAccumulator)3 Query (io.druid.query.Query)3 IOException (java.io.IOException)3 Accumulator (io.druid.java.util.common.guava.Accumulator)2 QueryRunner (io.druid.query.QueryRunner)2 SegmentMissingException (io.druid.segment.SegmentMissingException)2 List (java.util.List)2 Map (java.util.Map)2 JavaType (com.fasterxml.jackson.databind.JavaType)1 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)1 ObjectWriter (com.fasterxml.jackson.databind.ObjectWriter)1 TypeFactory (com.fasterxml.jackson.databind.type.TypeFactory)1 ImmutableList (com.google.common.collect.ImmutableList)1 MapMaker (com.google.common.collect.MapMaker)1 Closer (com.google.common.io.Closer)1 CountingOutputStream (com.google.common.io.CountingOutputStream)1 Row (io.druid.data.input.Row)1 DefaultObjectMapper (io.druid.jackson.DefaultObjectMapper)1