Search in sources :

Example 1 with HttpResponseHandler

use of org.apache.druid.java.util.http.client.response.HttpResponseHandler in project druid by druid-io.

the class DirectDruidClientTest method testRun.

@Test
public void testRun() throws Exception {
    final URL url = new URL(StringUtils.format("http://%s/druid/v2/", hostName));
    SettableFuture<InputStream> futureResult = SettableFuture.create();
    Capture<Request> capturedRequest = EasyMock.newCapture();
    EasyMock.expect(httpClient.go(EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject(), EasyMock.anyObject(Duration.class))).andReturn(futureResult).times(1);
    SettableFuture futureException = SettableFuture.create();
    EasyMock.expect(httpClient.go(EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject(), EasyMock.anyObject(Duration.class))).andReturn(futureException).times(1);
    EasyMock.expect(httpClient.go(EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject(), EasyMock.anyObject(Duration.class))).andReturn(SettableFuture.create()).atLeastOnce();
    EasyMock.replay(httpClient);
    DirectDruidClient client2 = new DirectDruidClient(new ReflectionQueryToolChestWarehouse(), QueryRunnerTestHelper.NOOP_QUERYWATCHER, new DefaultObjectMapper(), httpClient, "http", "foo2", new NoopServiceEmitter());
    QueryableDruidServer queryableDruidServer2 = new QueryableDruidServer(new DruidServer("test1", "localhost", null, 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 0), client2);
    serverSelector.addServerAndUpdateSegment(queryableDruidServer2, serverSelector.getSegment());
    TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder().dataSource("test").build();
    query = query.withOverriddenContext(ImmutableMap.of(DirectDruidClient.QUERY_FAIL_TIME, Long.MAX_VALUE));
    Sequence s1 = client.run(QueryPlus.wrap(query));
    Assert.assertTrue(capturedRequest.hasCaptured());
    Assert.assertEquals(url, capturedRequest.getValue().getUrl());
    Assert.assertEquals(HttpMethod.POST, capturedRequest.getValue().getMethod());
    Assert.assertEquals(1, client.getNumOpenConnections());
    // simulate read timeout
    client.run(QueryPlus.wrap(query));
    Assert.assertEquals(2, client.getNumOpenConnections());
    futureException.setException(new ReadTimeoutException());
    Assert.assertEquals(1, client.getNumOpenConnections());
    // subsequent connections should work
    client.run(QueryPlus.wrap(query));
    client.run(QueryPlus.wrap(query));
    client.run(QueryPlus.wrap(query));
    Assert.assertTrue(client.getNumOpenConnections() == 4);
    // produce result for first connection
    futureResult.set(new ByteArrayInputStream(StringUtils.toUtf8("[{\"timestamp\":\"2014-01-01T01:02:03Z\", \"result\": 42.0}]")));
    List<Result> results = s1.toList();
    Assert.assertEquals(1, results.size());
    Assert.assertEquals(DateTimes.of("2014-01-01T01:02:03Z"), results.get(0).getTimestamp());
    Assert.assertEquals(3, client.getNumOpenConnections());
    client2.run(QueryPlus.wrap(query));
    client2.run(QueryPlus.wrap(query));
    Assert.assertEquals(2, client2.getNumOpenConnections());
    Assert.assertEquals(serverSelector.pick(null), queryableDruidServer2);
    EasyMock.verify(httpClient);
}
Also used : SettableFuture(com.google.common.util.concurrent.SettableFuture) ByteArrayInputStream(java.io.ByteArrayInputStream) PipedInputStream(java.io.PipedInputStream) InputStream(java.io.InputStream) ReadTimeoutException(org.jboss.netty.handler.timeout.ReadTimeoutException) Request(org.apache.druid.java.util.http.client.Request) QueryableDruidServer(org.apache.druid.client.selector.QueryableDruidServer) NoopServiceEmitter(org.apache.druid.server.metrics.NoopServiceEmitter) TimeBoundaryQuery(org.apache.druid.query.timeboundary.TimeBoundaryQuery) Sequence(org.apache.druid.java.util.common.guava.Sequence) URL(java.net.URL) QueryableDruidServer(org.apache.druid.client.selector.QueryableDruidServer) Result(org.apache.druid.query.Result) ByteArrayInputStream(java.io.ByteArrayInputStream) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) HttpResponseHandler(org.apache.druid.java.util.http.client.response.HttpResponseHandler) ReflectionQueryToolChestWarehouse(org.apache.druid.query.ReflectionQueryToolChestWarehouse) Test(org.junit.Test)

Example 2 with HttpResponseHandler

use of org.apache.druid.java.util.http.client.response.HttpResponseHandler in project druid by druid-io.

the class DirectDruidClientTest method testCancel.

@Test
public void testCancel() {
    Capture<Request> capturedRequest = EasyMock.newCapture();
    ListenableFuture<Object> cancelledFuture = Futures.immediateCancelledFuture();
    SettableFuture<Object> cancellationFuture = SettableFuture.create();
    EasyMock.expect(httpClient.go(EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject(), EasyMock.anyObject(Duration.class))).andReturn(cancelledFuture).once();
    EasyMock.expect(httpClient.go(EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject(), EasyMock.anyObject(Duration.class))).andReturn(cancellationFuture).anyTimes();
    EasyMock.replay(httpClient);
    TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder().dataSource("test").build();
    query = query.withOverriddenContext(ImmutableMap.of(DirectDruidClient.QUERY_FAIL_TIME, Long.MAX_VALUE));
    cancellationFuture.set(new StatusResponseHolder(HttpResponseStatus.OK, new StringBuilder("cancelled")));
    Sequence results = client.run(QueryPlus.wrap(query));
    Assert.assertEquals(HttpMethod.POST, capturedRequest.getValue().getMethod());
    Assert.assertEquals(0, client.getNumOpenConnections());
    QueryInterruptedException exception = null;
    try {
        results.toList();
    } catch (QueryInterruptedException e) {
        exception = e;
    }
    Assert.assertNotNull(exception);
    EasyMock.verify(httpClient);
}
Also used : Request(org.apache.druid.java.util.http.client.Request) StatusResponseHolder(org.apache.druid.java.util.http.client.response.StatusResponseHolder) TimeBoundaryQuery(org.apache.druid.query.timeboundary.TimeBoundaryQuery) Sequence(org.apache.druid.java.util.common.guava.Sequence) HttpResponseHandler(org.apache.druid.java.util.http.client.response.HttpResponseHandler) QueryInterruptedException(org.apache.druid.query.QueryInterruptedException) Test(org.junit.Test)

Example 3 with HttpResponseHandler

use of org.apache.druid.java.util.http.client.response.HttpResponseHandler in project druid by druid-io.

the class DirectDruidClientTest method testQueryTimeoutBeforeFuture.

@Test
public void testQueryTimeoutBeforeFuture() throws IOException, InterruptedException {
    SettableFuture<Object> timeoutFuture = SettableFuture.create();
    Capture<Request> capturedRequest = EasyMock.newCapture();
    final String queryId = "timeout-before-future";
    EasyMock.expect(httpClient.go(EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject(), EasyMock.anyObject(Duration.class))).andReturn(timeoutFuture).anyTimes();
    EasyMock.replay(httpClient);
    TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder().dataSource("test").build();
    query = query.withOverriddenContext(ImmutableMap.of(DirectDruidClient.QUERY_FAIL_TIME, System.currentTimeMillis() + 250, "queryId", queryId));
    Sequence results = client.run(QueryPlus.wrap(query));
    // incomplete result set
    PipedInputStream in = new PipedInputStream();
    final PipedOutputStream out = new PipedOutputStream(in);
    timeoutFuture.set(in);
    QueryTimeoutException actualException = null;
    try {
        out.write(StringUtils.toUtf8("[{\"timestamp\":\"2014-01-01T01:02:03Z\"}"));
        Thread.sleep(250);
        out.write(StringUtils.toUtf8("]"));
        out.close();
        results.toList();
    } catch (QueryTimeoutException e) {
        actualException = e;
    }
    Assert.assertNotNull(actualException);
    Assert.assertEquals("Query timeout", actualException.getErrorCode());
    Assert.assertEquals("url[http://localhost:8080/druid/v2/] timed out", actualException.getMessage());
    Assert.assertEquals(hostName, actualException.getHost());
    EasyMock.verify(httpClient);
}
Also used : QueryTimeoutException(org.apache.druid.query.QueryTimeoutException) Request(org.apache.druid.java.util.http.client.Request) PipedOutputStream(java.io.PipedOutputStream) TimeBoundaryQuery(org.apache.druid.query.timeboundary.TimeBoundaryQuery) Sequence(org.apache.druid.java.util.common.guava.Sequence) PipedInputStream(java.io.PipedInputStream) HttpResponseHandler(org.apache.druid.java.util.http.client.response.HttpResponseHandler) Test(org.junit.Test)

Example 4 with HttpResponseHandler

use of org.apache.druid.java.util.http.client.response.HttpResponseHandler in project druid by druid-io.

the class DirectDruidClient method run.

@Override
public Sequence<T> run(final QueryPlus<T> queryPlus, final ResponseContext context) {
    final Query<T> query = queryPlus.getQuery();
    QueryToolChest<T, Query<T>> toolChest = warehouse.getToolChest(query);
    boolean isBySegment = QueryContexts.isBySegment(query);
    final JavaType queryResultType = isBySegment ? toolChest.getBySegmentResultType() : toolChest.getBaseResultType();
    final ListenableFuture<InputStream> future;
    final String url = scheme + "://" + host + "/druid/v2/";
    final String cancelUrl = url + query.getId();
    try {
        log.debug("Querying queryId[%s] url[%s]", query.getId(), url);
        final long requestStartTimeNs = System.nanoTime();
        final long timeoutAt = query.getContextValue(QUERY_FAIL_TIME);
        final long maxScatterGatherBytes = QueryContexts.getMaxScatterGatherBytes(query);
        final AtomicLong totalBytesGathered = context.getTotalBytes();
        final long maxQueuedBytes = QueryContexts.getMaxQueuedBytes(query, 0);
        final boolean usingBackpressure = maxQueuedBytes > 0;
        final HttpResponseHandler<InputStream, InputStream> responseHandler = new HttpResponseHandler<InputStream, InputStream>() {

            private final AtomicLong totalByteCount = new AtomicLong(0);

            private final AtomicLong queuedByteCount = new AtomicLong(0);

            private final AtomicLong channelSuspendedTime = new AtomicLong(0);

            private final BlockingQueue<InputStreamHolder> queue = new LinkedBlockingQueue<>();

            private final AtomicBoolean done = new AtomicBoolean(false);

            private final AtomicReference<String> fail = new AtomicReference<>();

            private final AtomicReference<TrafficCop> trafficCopRef = new AtomicReference<>();

            private QueryMetrics<? super Query<T>> queryMetrics;

            private long responseStartTimeNs;

            private QueryMetrics<? super Query<T>> acquireResponseMetrics() {
                if (queryMetrics == null) {
                    queryMetrics = toolChest.makeMetrics(query);
                    queryMetrics.server(host);
                }
                return queryMetrics;
            }

            /**
             * Queue a buffer. Returns true if we should keep reading, false otherwise.
             */
            private boolean enqueue(ChannelBuffer buffer, long chunkNum) throws InterruptedException {
                // Increment queuedByteCount before queueing the object, so queuedByteCount is at least as high as
                // the actual number of queued bytes at any particular time.
                final InputStreamHolder holder = InputStreamHolder.fromChannelBuffer(buffer, chunkNum);
                final long currentQueuedByteCount = queuedByteCount.addAndGet(holder.getLength());
                queue.put(holder);
                // True if we should keep reading.
                return !usingBackpressure || currentQueuedByteCount < maxQueuedBytes;
            }

            private InputStream dequeue() throws InterruptedException {
                final InputStreamHolder holder = queue.poll(checkQueryTimeout(), TimeUnit.MILLISECONDS);
                if (holder == null) {
                    throw new QueryTimeoutException(StringUtils.nonStrictFormat("Query[%s] url[%s] timed out.", query.getId(), url));
                }
                final long currentQueuedByteCount = queuedByteCount.addAndGet(-holder.getLength());
                if (usingBackpressure && currentQueuedByteCount < maxQueuedBytes) {
                    long backPressureTime = Preconditions.checkNotNull(trafficCopRef.get(), "No TrafficCop, how can this be?").resume(holder.getChunkNum());
                    channelSuspendedTime.addAndGet(backPressureTime);
                }
                return holder.getStream();
            }

            @Override
            public ClientResponse<InputStream> handleResponse(HttpResponse response, TrafficCop trafficCop) {
                trafficCopRef.set(trafficCop);
                checkQueryTimeout();
                checkTotalBytesLimit(response.getContent().readableBytes());
                log.debug("Initial response from url[%s] for queryId[%s]", url, query.getId());
                responseStartTimeNs = System.nanoTime();
                acquireResponseMetrics().reportNodeTimeToFirstByte(responseStartTimeNs - requestStartTimeNs).emit(emitter);
                final boolean continueReading;
                try {
                    log.trace("Got a response from [%s] for query ID[%s], subquery ID[%s]", url, query.getId(), query.getSubQueryId());
                    final String responseContext = response.headers().get(QueryResource.HEADER_RESPONSE_CONTEXT);
                    context.addRemainingResponse(query.getMostSpecificId(), VAL_TO_REDUCE_REMAINING_RESPONSES);
                    // context may be null in case of error or query timeout
                    if (responseContext != null) {
                        context.merge(ResponseContext.deserialize(responseContext, objectMapper));
                    }
                    continueReading = enqueue(response.getContent(), 0L);
                } catch (final IOException e) {
                    log.error(e, "Error parsing response context from url [%s]", url);
                    return ClientResponse.finished(new InputStream() {

                        @Override
                        public int read() throws IOException {
                            throw e;
                        }
                    });
                } catch (InterruptedException e) {
                    log.error(e, "Queue appending interrupted");
                    Thread.currentThread().interrupt();
                    throw new RuntimeException(e);
                }
                totalByteCount.addAndGet(response.getContent().readableBytes());
                return ClientResponse.finished(new SequenceInputStream(new Enumeration<InputStream>() {

                    @Override
                    public boolean hasMoreElements() {
                        if (fail.get() != null) {
                            throw new RE(fail.get());
                        }
                        checkQueryTimeout();
                        // Then the stream should be spouting good InputStreams.
                        synchronized (done) {
                            return !done.get() || !queue.isEmpty();
                        }
                    }

                    @Override
                    public InputStream nextElement() {
                        if (fail.get() != null) {
                            throw new RE(fail.get());
                        }
                        try {
                            return dequeue();
                        } catch (InterruptedException e) {
                            Thread.currentThread().interrupt();
                            throw new RuntimeException(e);
                        }
                    }
                }), continueReading);
            }

            @Override
            public ClientResponse<InputStream> handleChunk(ClientResponse<InputStream> clientResponse, HttpChunk chunk, long chunkNum) {
                checkQueryTimeout();
                final ChannelBuffer channelBuffer = chunk.getContent();
                final int bytes = channelBuffer.readableBytes();
                checkTotalBytesLimit(bytes);
                boolean continueReading = true;
                if (bytes > 0) {
                    try {
                        continueReading = enqueue(channelBuffer, chunkNum);
                    } catch (InterruptedException e) {
                        log.error(e, "Unable to put finalizing input stream into Sequence queue for url [%s]", url);
                        Thread.currentThread().interrupt();
                        throw new RuntimeException(e);
                    }
                    totalByteCount.addAndGet(bytes);
                }
                return ClientResponse.finished(clientResponse.getObj(), continueReading);
            }

            @Override
            public ClientResponse<InputStream> done(ClientResponse<InputStream> clientResponse) {
                long stopTimeNs = System.nanoTime();
                long nodeTimeNs = stopTimeNs - requestStartTimeNs;
                final long nodeTimeMs = TimeUnit.NANOSECONDS.toMillis(nodeTimeNs);
                log.debug("Completed queryId[%s] request to url[%s] with %,d bytes returned in %,d millis [%,f b/s].", query.getId(), url, totalByteCount.get(), nodeTimeMs, // Floating math; division by zero will yield Inf, not exception
                totalByteCount.get() / (0.001 * nodeTimeMs));
                QueryMetrics<? super Query<T>> responseMetrics = acquireResponseMetrics();
                responseMetrics.reportNodeTime(nodeTimeNs);
                responseMetrics.reportNodeBytes(totalByteCount.get());
                if (usingBackpressure) {
                    responseMetrics.reportBackPressureTime(channelSuspendedTime.get());
                }
                responseMetrics.emit(emitter);
                synchronized (done) {
                    try {
                        // An empty byte array is put at the end to give the SequenceInputStream.close() as something to close out
                        // after done is set to true, regardless of the rest of the stream's state.
                        queue.put(InputStreamHolder.fromChannelBuffer(ChannelBuffers.EMPTY_BUFFER, Long.MAX_VALUE));
                    } catch (InterruptedException e) {
                        log.error(e, "Unable to put finalizing input stream into Sequence queue for url [%s]", url);
                        Thread.currentThread().interrupt();
                        throw new RuntimeException(e);
                    } finally {
                        done.set(true);
                    }
                }
                return ClientResponse.finished(clientResponse.getObj());
            }

            @Override
            public void exceptionCaught(final ClientResponse<InputStream> clientResponse, final Throwable e) {
                String msg = StringUtils.format("Query[%s] url[%s] failed with exception msg [%s]", query.getId(), url, e.getMessage());
                setupResponseReadFailure(msg, e);
            }

            private void setupResponseReadFailure(String msg, Throwable th) {
                fail.set(msg);
                queue.clear();
                queue.offer(InputStreamHolder.fromStream(new InputStream() {

                    @Override
                    public int read() throws IOException {
                        if (th != null) {
                            throw new IOException(msg, th);
                        } else {
                            throw new IOException(msg);
                        }
                    }
                }, -1, 0));
            }

            // Returns remaining timeout or throws exception if timeout already elapsed.
            private long checkQueryTimeout() {
                long timeLeft = timeoutAt - System.currentTimeMillis();
                if (timeLeft <= 0) {
                    String msg = StringUtils.format("Query[%s] url[%s] timed out.", query.getId(), url);
                    setupResponseReadFailure(msg, null);
                    throw new QueryTimeoutException(msg);
                } else {
                    return timeLeft;
                }
            }

            private void checkTotalBytesLimit(long bytes) {
                if (maxScatterGatherBytes < Long.MAX_VALUE && totalBytesGathered.addAndGet(bytes) > maxScatterGatherBytes) {
                    String msg = StringUtils.format("Query[%s] url[%s] max scatter-gather bytes limit reached.", query.getId(), url);
                    setupResponseReadFailure(msg, null);
                    throw new ResourceLimitExceededException(msg);
                }
            }
        };
        long timeLeft = timeoutAt - System.currentTimeMillis();
        if (timeLeft <= 0) {
            throw new QueryTimeoutException(StringUtils.nonStrictFormat("Query[%s] url[%s] timed out.", query.getId(), url));
        }
        future = httpClient.go(new Request(HttpMethod.POST, new URL(url)).setContent(objectMapper.writeValueAsBytes(QueryContexts.withTimeout(query, timeLeft))).setHeader(HttpHeaders.Names.CONTENT_TYPE, isSmile ? SmileMediaTypes.APPLICATION_JACKSON_SMILE : MediaType.APPLICATION_JSON), responseHandler, Duration.millis(timeLeft));
        queryWatcher.registerQueryFuture(query, future);
        openConnections.getAndIncrement();
        Futures.addCallback(future, new FutureCallback<InputStream>() {

            @Override
            public void onSuccess(InputStream result) {
                openConnections.getAndDecrement();
            }

            @Override
            public void onFailure(Throwable t) {
                openConnections.getAndDecrement();
                if (future.isCancelled()) {
                    cancelQuery(query, cancelUrl);
                }
            }
        }, // The callback is non-blocking and quick, so it's OK to schedule it using directExecutor()
        Execs.directExecutor());
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
    Sequence<T> retVal = new BaseSequence<>(new BaseSequence.IteratorMaker<T, JsonParserIterator<T>>() {

        @Override
        public JsonParserIterator<T> make() {
            return new JsonParserIterator<T>(queryResultType, future, url, query, host, toolChest.decorateObjectMapper(objectMapper, query));
        }

        @Override
        public void cleanup(JsonParserIterator<T> iterFromMake) {
            CloseableUtils.closeAndWrapExceptions(iterFromMake);
        }
    });
    // avoid the cost of de-serializing and then re-serializing again when adding to cache
    if (!isBySegment) {
        retVal = Sequences.map(retVal, toolChest.makePreComputeManipulatorFn(query, MetricManipulatorFns.deserializing()));
    }
    return retVal;
}
Also used : ClientResponse(org.apache.druid.java.util.http.client.response.ClientResponse) Query(org.apache.druid.query.Query) URL(java.net.URL) ChannelBuffer(org.jboss.netty.buffer.ChannelBuffer) QueryTimeoutException(org.apache.druid.query.QueryTimeoutException) BlockingQueue(java.util.concurrent.BlockingQueue) LinkedBlockingQueue(java.util.concurrent.LinkedBlockingQueue) Enumeration(java.util.Enumeration) SequenceInputStream(java.io.SequenceInputStream) InputStream(java.io.InputStream) Request(org.apache.druid.java.util.http.client.Request) HttpResponse(org.jboss.netty.handler.codec.http.HttpResponse) AtomicReference(java.util.concurrent.atomic.AtomicReference) IOException(java.io.IOException) BaseSequence(org.apache.druid.java.util.common.guava.BaseSequence) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) JavaType(com.fasterxml.jackson.databind.JavaType) AtomicLong(java.util.concurrent.atomic.AtomicLong) RE(org.apache.druid.java.util.common.RE) ResourceLimitExceededException(org.apache.druid.query.ResourceLimitExceededException) SequenceInputStream(java.io.SequenceInputStream) QueryMetrics(org.apache.druid.query.QueryMetrics) HttpResponseHandler(org.apache.druid.java.util.http.client.response.HttpResponseHandler) HttpChunk(org.jboss.netty.handler.codec.http.HttpChunk)

Example 5 with HttpResponseHandler

use of org.apache.druid.java.util.http.client.response.HttpResponseHandler in project druid by druid-io.

the class WorkerTaskRunnerQueryAdpaterTest method getHttpClientRequestCapture.

private Capture<Request> getHttpClientRequestCapture(HttpResponseStatus httpStatus, String responseContent) {
    SettableFuture<StatusResponseHolder> futureResult = SettableFuture.create();
    futureResult.set(new StatusResponseHolder(httpStatus, new StringBuilder(responseContent)));
    Capture<Request> capturedRequest = EasyMock.newCapture();
    EasyMock.expect(httpClient.go(EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject())).andReturn(futureResult).once();
    return capturedRequest;
}
Also used : Request(org.apache.druid.java.util.http.client.Request) StatusResponseHolder(org.apache.druid.java.util.http.client.response.StatusResponseHolder) HttpResponseHandler(org.apache.druid.java.util.http.client.response.HttpResponseHandler)

Aggregations

Request (org.apache.druid.java.util.http.client.Request)7 HttpResponseHandler (org.apache.druid.java.util.http.client.response.HttpResponseHandler)7 Sequence (org.apache.druid.java.util.common.guava.Sequence)5 TimeBoundaryQuery (org.apache.druid.query.timeboundary.TimeBoundaryQuery)5 Test (org.junit.Test)5 QueryTimeoutException (org.apache.druid.query.QueryTimeoutException)3 ByteArrayInputStream (java.io.ByteArrayInputStream)2 InputStream (java.io.InputStream)2 PipedInputStream (java.io.PipedInputStream)2 URL (java.net.URL)2 StatusResponseHolder (org.apache.druid.java.util.http.client.response.StatusResponseHolder)2 QueryInterruptedException (org.apache.druid.query.QueryInterruptedException)2 JavaType (com.fasterxml.jackson.databind.JavaType)1 SettableFuture (com.google.common.util.concurrent.SettableFuture)1 IOException (java.io.IOException)1 PipedOutputStream (java.io.PipedOutputStream)1 SequenceInputStream (java.io.SequenceInputStream)1 Enumeration (java.util.Enumeration)1 BlockingQueue (java.util.concurrent.BlockingQueue)1 LinkedBlockingQueue (java.util.concurrent.LinkedBlockingQueue)1