use of org.jboss.netty.handler.codec.http.HttpChunk in project databus by linkedin.
the class DummySuccessfulErrorCountingConsumer method captureAndReplySourcesRequest.
private void captureAndReplySourcesRequest(SimpleObjectCaptureHandler objCapture, SimpleTestServerConnection relay, SocketAddress clientAddr, final DatabusSourcesConnection clientConn, final Logger log) {
NettyTestUtils.waitForHttpRequest(objCapture, SOURCES_REQUEST_REGEX, 1000);
objCapture.clear();
// send back the /sources response
HttpResponse sourcesResp = new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK);
sourcesResp.setHeader(HttpHeaders.Names.CONNECTION, HttpHeaders.Values.KEEP_ALIVE);
sourcesResp.setHeader(HttpHeaders.Names.TRANSFER_ENCODING, HttpHeaders.Values.CHUNKED);
HttpChunk body = new DefaultHttpChunk(ChannelBuffers.wrappedBuffer(("[{\"id\":1,\"name\":\"" + SOURCE1_NAME + "\"}]").getBytes(Charset.defaultCharset())));
NettyTestUtils.sendServerResponses(relay, clientAddr, sourcesResp, body);
// make sure the client processes the response correctly
TestUtil.assertWithBackoff(new ConditionCheck() {
@Override
public boolean check() {
String idListString = clientConn.getRelayPullThread()._currentState.getSourcesIdListString();
return "1".equals(idListString);
}
}, "client processes /sources response", 100, log);
}
use of org.jboss.netty.handler.codec.http.HttpChunk in project databus by linkedin.
the class DummySuccessfulErrorCountingConsumer method testInStreamTimeOut3.
/**
* same as above, but server doesn't send any data, and WriteComplete comes between WriteTimeout
* and channel close
* @throws Exception
*/
@Test
public void testInStreamTimeOut3() throws Exception {
final Logger log = Logger.getLogger("TestDatabusHttpClient.testInStreamTimeout3");
Level debugLevel = Level.DEBUG;
log.setLevel(debugLevel);
// Logger.getRootLogger().setLevel(Level.DEBUG);
MockServerChannelHandler.LOG.setLevel(debugLevel);
final int eventsNum = 20;
DbusEventInfo[] eventInfos = createSampleSchema1Events(eventsNum);
// simulate relay buffers
DbusEventBuffer relayBuffer = new DbusEventBuffer(_bufCfg);
relayBuffer.start(0);
writeEventsToBuffer(relayBuffer, eventInfos, 4);
// prepare stream response ??????????????//
Checkpoint cp = Checkpoint.createFlexibleCheckpoint();
final DbusEventsStatisticsCollector stats = new DbusEventsStatisticsCollector(1, "test1", true, false, null);
// create ChunnelBuffer and fill it with events from relayBuffer
ChannelBuffer streamResPrefix = NettyTestUtils.streamToChannelBuffer(relayBuffer, cp, 20000, stats);
// create client
_stdClientCfgBuilder.getContainer().setReadTimeoutMs(DEFAULT_READ_TIMEOUT_MS);
final DatabusHttpClientImpl client = new DatabusHttpClientImpl(_stdClientCfgBuilder.build());
final TestConsumer consumer = new TestConsumer();
client.registerDatabusStreamListener(consumer, null, SOURCE1_NAME);
// connect to a relay created in SetupClass (one out of three)
client.start();
// wait until a connection made
try {
TestUtil.assertWithBackoff(new ConditionCheck() {
@Override
public boolean check() {
return client._relayConnections.size() == 1;
}
}, "sources connection present", 100, log);
// get the connection
final DatabusSourcesConnection clientConn = client._relayConnections.get(0);
TestUtil.assertWithBackoff(new ConditionCheck() {
@Override
public boolean check() {
return null != clientConn.getRelayPullThread().getLastOpenConnection();
}
}, "relay connection present", 100, log);
// figure out connection details
final NettyHttpDatabusRelayConnection relayConn = (NettyHttpDatabusRelayConnection) clientConn.getRelayPullThread().getLastOpenConnection();
final NettyHttpDatabusRelayConnectionInspector relayConnInsp = new NettyHttpDatabusRelayConnectionInspector(relayConn);
relayConnInsp.getHandler().getLog().setLevel(debugLevel);
// wait until client is connected
TestUtil.assertWithBackoff(new ConditionCheck() {
@Override
public boolean check() {
return null != relayConnInsp.getChannel() && relayConnInsp.getChannel().isConnected();
}
}, "client connected", 200, log);
// figure out which port we got connected to on the server side
Channel clientChannel = relayConnInsp.getChannel();
InetSocketAddress relayAddr = (InetSocketAddress) clientChannel.getRemoteAddress();
int relayPort = relayAddr.getPort();
log.info("relay selected: " + relayPort);
// add our handler to the client's pipeline which will generate the timeout
MockServerChannelHandler mock = new MockServerChannelHandler();
clientChannel.getPipeline().addBefore("inflater", "mockServer", mock);
// verify it is there
Map<String, ChannelHandler> map = clientChannel.getPipeline().toMap();
boolean handlerFound = false;
for (Map.Entry<String, ChannelHandler> m : map.entrySet()) {
if (LOG.isDebugEnabled())
LOG.debug(m.getKey() + "=>" + m.getValue());
if (m.getKey().equals("mockServer"))
handlerFound = true;
}
Assert.assertTrue(handlerFound, "handler added");
SimpleTestServerConnection relay = null;
// Find the relay's object
for (int i = 0; i < RELAY_PORT.length; ++i) {
if (relayPort == RELAY_PORT[i])
relay = _dummyServer[i];
}
assertTrue(null != relay);
SocketAddress clientAddr = clientChannel.getLocalAddress();
final SocketAddress testClientAddr = clientAddr;
final SimpleTestServerConnection testRelay = relay;
TestUtil.assertWithBackoff(new ConditionCheck() {
@Override
public boolean check() {
return null != testRelay.getChildChannel(testClientAddr);
}
}, "relay detects new connection", 1000, log);
Channel serverChannel = relay.getChildChannel(clientAddr);
assertTrue(null != serverChannel);
ChannelPipeline serverPipeline = serverChannel.getPipeline();
SimpleObjectCaptureHandler objCapture = (SimpleObjectCaptureHandler) serverPipeline.get("3");
// process the /sources request
NettyTestUtils.waitForHttpRequest(objCapture, SOURCES_REQUEST_REGEX, 1000);
objCapture.clear();
// send back the /sources response
HttpResponse httpResp = new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK);
httpResp.setHeader(HttpHeaders.Names.CONNECTION, HttpHeaders.Values.KEEP_ALIVE);
httpResp.setHeader(HttpHeaders.Names.TRANSFER_ENCODING, HttpHeaders.Values.CHUNKED);
HttpChunk body = new DefaultHttpChunk(ChannelBuffers.wrappedBuffer(("[{\"id\":1,\"name\":\"" + SOURCE1_NAME + "\"}]").getBytes(Charset.defaultCharset())));
NettyTestUtils.sendServerResponses(relay, clientAddr, httpResp, body);
// make sure the client processes the response correctly
TestUtil.assertWithBackoff(new ConditionCheck() {
@Override
public boolean check() {
String idListString = clientConn.getRelayPullThread()._currentState.getSourcesIdListString();
return "1".equals(idListString);
}
}, "client processes /sources response", 100, log);
log.debug("process the /register request");
NettyTestUtils.waitForHttpRequest(objCapture, "/register.*", 1000);
objCapture.clear();
String msgHistory = clientConn.getRelayPullThread().getMessageHistoryLog();
log.debug("MSG HISTORY before: " + msgHistory);
// make sure our handler will save the 'future' of the next write operation - 'stream'
mock.enableSaveTheFuture(true);
// delay write complete. insert Timeout exception before that
mock.delayWriteComplete(true);
log.debug("send back the /register response");
RegisterResponseEntry entry = new RegisterResponseEntry(1L, (short) 1, SOURCE1_SCHEMA_STR);
String responseStr = NettyTestUtils.generateRegisterResponse(entry);
body = new DefaultHttpChunk(ChannelBuffers.wrappedBuffer(responseStr.getBytes(Charset.defaultCharset())));
NettyTestUtils.sendServerResponses(relay, clientAddr, httpResp, body);
log.debug("make sure the client processes the response /register correctly");
TestUtil.assertWithBackoff(new ConditionCheck() {
@Override
public boolean check() {
DispatcherState dispState = clientConn.getRelayDispatcher().getDispatcherState();
return null != dispState.getSchemaMap() && 1 == dispState.getSchemaMap().size();
}
}, "client processes /register response", 100, log);
LOG.info("*************>Message state after write complete is " + relayConnInsp.getResponseHandlerMessageState().toString());
msgHistory = clientConn.getRelayPullThread().getMessageHistoryLog();
log.debug("MSG HISTORY after: " + msgHistory);
// should be one error only
Assert.assertEquals(countOccurencesOfWord(msgHistory, "_ERROR"), 1);
// ////////////////////////////////////////////////////////////////////////////////////////////////
/*
TestUtil.assertWithBackoff(new ConditionCheck()
{
@Override
public boolean check()
{
return client._relayConnections.size() == 1;
}
}, "sources connection present", 100, log);
//get the connection
final DatabusSourcesConnection clientConn1 = client._relayConnections.get(0);
TestUtil.assertWithBackoff(new ConditionCheck()
{
@Override
public boolean check()
{
return null != clientConn1.getRelayPullThread().getLastOpenConnection();
}
}, "relay connection1 present", 100, log);
// figure out connection details
final NettyHttpDatabusRelayConnection relayConn1 =
(NettyHttpDatabusRelayConnection)clientConn1.getRelayPullThread().getLastOpenConnection();
final NettyHttpDatabusRelayConnectionInspector relayConnInsp1 =
new NettyHttpDatabusRelayConnectionInspector(relayConn1);
relayConnInsp1.getHandler().getLog().setLevel(debugLevel);
// wait until client is connected
TestUtil.assertWithBackoff(new ConditionCheck()
{
@Override
public boolean check()
{
return null != relayConnInsp1.getChannel() && relayConnInsp1.getChannel().isConnected();
}
}, "client connected", 200, log);
//figure out which port we got connected to on the server side
Channel clientChannel1 = relayConnInsp1.getChannel();
InetSocketAddress relayAddr1 = (InetSocketAddress)clientChannel1.getRemoteAddress();
relayPort = relayAddr1.getPort();
log.info("relay selected: " + relayPort);
// do it again - no errors
// process the /sources request
captureAndReplySourcesRequest(objCapture, relay, clientAddr, clientConn1, log);
captureAndReplyRegisterRequest(objCapture, relay, clientAddr, clientConn1, log);
log.debug("process /stream call and return a response");
captureAndReplyStreamRequest(objCapture, relay, clientAddr, clientConn1, streamResPrefix, log);
LOG.info("*************>Message state after write complete is " + relayConnInsp1.getResponseHandlerMessageState().toString());
msgHistory = clientConn1.getRelayPullThread().getMessageHistoryLog();
log.debug("MSG HISTORY after: " + msgHistory);
Assert.assertEquals(countOccurencesOfWord(msgHistory, "_ERROR"), 1); //should be one error only
// make sure close channel event and future failure are propagated
TestUtil.sleep(3000);
// get the history and validate it
String expectedHistory = "[START, PICK_SERVER, REQUEST_SOURCES, SOURCES_RESPONSE_SUCCESS, REQUEST_REGISTER, REGISTER_RESPONSE_SUCCESS, REQUEST_STREAM, STREAM_REQUEST_SUCCESS, STREAM_RESPONSE_DONE, REQUEST_STREAM, STREAM_REQUEST_ERROR, PICK_SERVER, REQUEST_SOURCES]".trim();
msgHistory = clientConn.getRelayPullThread().getMessageHistoryLog().trim();
LOG.info("MSG HISTORY: " + msgHistory);
Assert.assertEquals(msgHistory, expectedHistory, "Puller thread message history doesn't match");
*/
} finally {
client.shutdown();
}
}
use of org.jboss.netty.handler.codec.http.HttpChunk in project databus by linkedin.
the class DummySuccessfulErrorCountingConsumer method testInStreamTimeOut.
/**
* Tests the logic of the client to handle Timeout that comes while processing stream request.
* the script:
* setup client and connect to one of the servers
* wait for /sources and register call and replay
* save the 'future' of the write operation for the /stream call. Replace this future down the stream with the fake one,
* so the notification of write completion will never come
* make server send only headers info first
* make server send data, but intercept the message before it reaches the client. At this moment fire WriteTimeout
* exception from a separate thread.
* Make sure PullerThread doesn't get two error messages (and as a result tries to setup up two new connections)
*/
@Test
public void testInStreamTimeOut() throws Exception {
final Logger log = Logger.getLogger("TestDatabusHttpClient.testInStreamTimeout");
// log.setLevel(Level.DEBUG);
final int eventsNum = 20;
DbusEventInfo[] eventInfos = createSampleSchema1Events(eventsNum);
// simulate relay buffers
DbusEventBuffer relayBuffer = new DbusEventBuffer(_bufCfg);
relayBuffer.start(0);
writeEventsToBuffer(relayBuffer, eventInfos, 4);
// prepare stream response
Checkpoint cp = Checkpoint.createFlexibleCheckpoint();
final DbusEventsStatisticsCollector stats = new DbusEventsStatisticsCollector(1, "test1", true, false, null);
// create ChunnelBuffer and fill it with events from relayBuffer
ChannelBuffer streamResPrefix = NettyTestUtils.streamToChannelBuffer(relayBuffer, cp, 20000, stats);
// create client
_stdClientCfgBuilder.getContainer().setReadTimeoutMs(DEFAULT_READ_TIMEOUT_MS);
final DatabusHttpClientImpl client = new DatabusHttpClientImpl(_stdClientCfgBuilder.build());
final TestConsumer consumer = new TestConsumer();
client.registerDatabusStreamListener(consumer, null, SOURCE1_NAME);
// connect to a relay created in SetupClass (one out of three)
client.start();
// wait until a connection made
try {
TestUtil.assertWithBackoff(new ConditionCheck() {
@Override
public boolean check() {
return client._relayConnections.size() == 1;
}
}, "sources connection present", 100, log);
// get the connection
final DatabusSourcesConnection clientConn = client._relayConnections.get(0);
TestUtil.assertWithBackoff(new ConditionCheck() {
@Override
public boolean check() {
return null != clientConn.getRelayPullThread().getLastOpenConnection();
}
}, "relay connection present", 100, log);
// figure out connection details
final NettyHttpDatabusRelayConnection relayConn = (NettyHttpDatabusRelayConnection) clientConn.getRelayPullThread().getLastOpenConnection();
final NettyHttpDatabusRelayConnectionInspector relayConnInsp = new NettyHttpDatabusRelayConnectionInspector(relayConn);
// wait until client is connected
TestUtil.assertWithBackoff(new ConditionCheck() {
@Override
public boolean check() {
return null != relayConnInsp.getChannel() && relayConnInsp.getChannel().isConnected();
}
}, "client connected", 200, log);
// figure out which port we got connected to on the server side
Channel clientChannel = relayConnInsp.getChannel();
InetSocketAddress relayAddr = (InetSocketAddress) clientChannel.getRemoteAddress();
int relayPort = relayAddr.getPort();
log.info("relay selected: " + relayPort);
// add our handler to the client's pipeline which will generate the timeout
MockServerChannelHandler mock = new MockServerChannelHandler();
clientChannel.getPipeline().addBefore("inflater", "mockServer", mock);
Map<String, ChannelHandler> map = clientChannel.getPipeline().toMap();
boolean handlerFound = false;
for (Map.Entry<String, ChannelHandler> m : map.entrySet()) {
if (LOG.isDebugEnabled())
LOG.debug(m.getKey() + "=>" + m.getValue());
if (m.getKey().equals("mockServer"))
handlerFound = true;
}
Assert.assertTrue(handlerFound, "handler added");
SimpleTestServerConnection relay = null;
// Find the relay's object
for (int i = 0; i < RELAY_PORT.length; ++i) {
if (relayPort == RELAY_PORT[i])
relay = _dummyServer[i];
}
assertTrue(null != relay);
SocketAddress clientAddr = clientChannel.getLocalAddress();
final SocketAddress testClientAddr = clientAddr;
final SimpleTestServerConnection testRelay = relay;
TestUtil.assertWithBackoff(new ConditionCheck() {
@Override
public boolean check() {
return null != testRelay.getChildChannel(testClientAddr);
}
}, "relay detects new connection", 1000, log);
Channel serverChannel = relay.getChildChannel(clientAddr);
assertTrue(null != serverChannel);
ChannelPipeline serverPipeline = serverChannel.getPipeline();
SimpleObjectCaptureHandler objCapture = (SimpleObjectCaptureHandler) serverPipeline.get("3");
// process the /sources request
NettyTestUtils.waitForHttpRequest(objCapture, SOURCES_REQUEST_REGEX, 1000);
objCapture.clear();
// send back the /sources response
HttpResponse sourcesResp = new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK);
sourcesResp.setHeader(HttpHeaders.Names.CONNECTION, HttpHeaders.Values.KEEP_ALIVE);
sourcesResp.setHeader(HttpHeaders.Names.TRANSFER_ENCODING, HttpHeaders.Values.CHUNKED);
HttpChunk body = new DefaultHttpChunk(ChannelBuffers.wrappedBuffer(("[{\"id\":1,\"name\":\"" + SOURCE1_NAME + "\"}]").getBytes(Charset.defaultCharset())));
NettyTestUtils.sendServerResponses(relay, clientAddr, sourcesResp, body);
// make sure the client processes the response correctly
TestUtil.assertWithBackoff(new ConditionCheck() {
@Override
public boolean check() {
String idListString = clientConn.getRelayPullThread()._currentState.getSourcesIdListString();
return "1".equals(idListString);
}
}, "client processes /sources response", 100, log);
log.debug("process the /register request");
NettyTestUtils.waitForHttpRequest(objCapture, "/register.*", 1000);
objCapture.clear();
String msgHistory = clientConn.getRelayPullThread().getMessageHistoryLog();
log.debug("MSG HISTORY before: " + msgHistory);
// make sure our handler will save the 'future' of the next write operation - 'stream'
mock.enableSaveTheFuture(true);
log.debug("send back the /register response");
RegisterResponseEntry entry = new RegisterResponseEntry(1L, (short) 1, SOURCE1_SCHEMA_STR);
String responseStr = NettyTestUtils.generateRegisterResponse(entry);
body = new DefaultHttpChunk(ChannelBuffers.wrappedBuffer(responseStr.getBytes(Charset.defaultCharset())));
NettyTestUtils.sendServerResponses(relay, clientAddr, sourcesResp, body);
log.debug("make sure the client processes the response /register correctly");
TestUtil.assertWithBackoff(new ConditionCheck() {
@Override
public boolean check() {
DispatcherState dispState = clientConn.getRelayDispatcher().getDispatcherState();
return null != dispState.getSchemaMap() && 1 == dispState.getSchemaMap().size();
}
}, "client processes /register response", 100, log);
log.debug("process /stream call and return a response");
NettyTestUtils.waitForHttpRequest(objCapture, "/stream.*", 1000);
objCapture.clear();
// disable save future as it should be saved by now
mock.enableSaveTheFuture(false);
final HttpResponse streamResp = new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK);
streamResp.setHeader(HttpHeaders.Names.CONNECTION, HttpHeaders.Values.KEEP_ALIVE);
streamResp.setHeader(HttpHeaders.Names.TRANSFER_ENCODING, HttpHeaders.Values.CHUNKED);
// timeout for local netty calls (in test only)
int timeout = 1000;
// send header info
relay.sendServerResponse(clientAddr, sourcesResp, timeout);
TestUtil.sleep(1000);
// when write data arrives from the server - we want to simulate/throw WriteTimeoutException
mock.enableThrowWTOException(true);
// send data
relay.sendServerResponse(clientAddr, new DefaultHttpChunk(streamResPrefix), timeout);
relay.sendServerResponse(clientAddr, HttpChunk.LAST_CHUNK, timeout);
// make sure close channel event and future failure are propagated
TestUtil.sleep(3000);
// get the history and validate it
String expectedHistory = "[START, PICK_SERVER, REQUEST_SOURCES, SOURCES_RESPONSE_SUCCESS, REQUEST_REGISTER, REGISTER_RESPONSE_SUCCESS, REQUEST_STREAM, STREAM_REQUEST_SUCCESS, STREAM_RESPONSE_DONE, REQUEST_STREAM, STREAM_REQUEST_ERROR, PICK_SERVER, REQUEST_SOURCES]".trim();
msgHistory = clientConn.getRelayPullThread().getMessageHistoryLog().trim();
LOG.info("MSG HISTORY: " + msgHistory);
Assert.assertEquals(msgHistory, expectedHistory, "Puller thread message history doesn't match");
} finally {
client.shutdown();
}
}
use of org.jboss.netty.handler.codec.http.HttpChunk in project druid by druid-io.
the class DirectDruidClient method run.
@Override
public Sequence<T> run(final QueryPlus<T> queryPlus, final ResponseContext context) {
final Query<T> query = queryPlus.getQuery();
QueryToolChest<T, Query<T>> toolChest = warehouse.getToolChest(query);
boolean isBySegment = QueryContexts.isBySegment(query);
final JavaType queryResultType = isBySegment ? toolChest.getBySegmentResultType() : toolChest.getBaseResultType();
final ListenableFuture<InputStream> future;
final String url = scheme + "://" + host + "/druid/v2/";
final String cancelUrl = url + query.getId();
try {
log.debug("Querying queryId[%s] url[%s]", query.getId(), url);
final long requestStartTimeNs = System.nanoTime();
final long timeoutAt = query.getContextValue(QUERY_FAIL_TIME);
final long maxScatterGatherBytes = QueryContexts.getMaxScatterGatherBytes(query);
final AtomicLong totalBytesGathered = context.getTotalBytes();
final long maxQueuedBytes = QueryContexts.getMaxQueuedBytes(query, 0);
final boolean usingBackpressure = maxQueuedBytes > 0;
final HttpResponseHandler<InputStream, InputStream> responseHandler = new HttpResponseHandler<InputStream, InputStream>() {
private final AtomicLong totalByteCount = new AtomicLong(0);
private final AtomicLong queuedByteCount = new AtomicLong(0);
private final AtomicLong channelSuspendedTime = new AtomicLong(0);
private final BlockingQueue<InputStreamHolder> queue = new LinkedBlockingQueue<>();
private final AtomicBoolean done = new AtomicBoolean(false);
private final AtomicReference<String> fail = new AtomicReference<>();
private final AtomicReference<TrafficCop> trafficCopRef = new AtomicReference<>();
private QueryMetrics<? super Query<T>> queryMetrics;
private long responseStartTimeNs;
private QueryMetrics<? super Query<T>> acquireResponseMetrics() {
if (queryMetrics == null) {
queryMetrics = toolChest.makeMetrics(query);
queryMetrics.server(host);
}
return queryMetrics;
}
/**
* Queue a buffer. Returns true if we should keep reading, false otherwise.
*/
private boolean enqueue(ChannelBuffer buffer, long chunkNum) throws InterruptedException {
// Increment queuedByteCount before queueing the object, so queuedByteCount is at least as high as
// the actual number of queued bytes at any particular time.
final InputStreamHolder holder = InputStreamHolder.fromChannelBuffer(buffer, chunkNum);
final long currentQueuedByteCount = queuedByteCount.addAndGet(holder.getLength());
queue.put(holder);
// True if we should keep reading.
return !usingBackpressure || currentQueuedByteCount < maxQueuedBytes;
}
private InputStream dequeue() throws InterruptedException {
final InputStreamHolder holder = queue.poll(checkQueryTimeout(), TimeUnit.MILLISECONDS);
if (holder == null) {
throw new QueryTimeoutException(StringUtils.nonStrictFormat("Query[%s] url[%s] timed out.", query.getId(), url));
}
final long currentQueuedByteCount = queuedByteCount.addAndGet(-holder.getLength());
if (usingBackpressure && currentQueuedByteCount < maxQueuedBytes) {
long backPressureTime = Preconditions.checkNotNull(trafficCopRef.get(), "No TrafficCop, how can this be?").resume(holder.getChunkNum());
channelSuspendedTime.addAndGet(backPressureTime);
}
return holder.getStream();
}
@Override
public ClientResponse<InputStream> handleResponse(HttpResponse response, TrafficCop trafficCop) {
trafficCopRef.set(trafficCop);
checkQueryTimeout();
checkTotalBytesLimit(response.getContent().readableBytes());
log.debug("Initial response from url[%s] for queryId[%s]", url, query.getId());
responseStartTimeNs = System.nanoTime();
acquireResponseMetrics().reportNodeTimeToFirstByte(responseStartTimeNs - requestStartTimeNs).emit(emitter);
final boolean continueReading;
try {
log.trace("Got a response from [%s] for query ID[%s], subquery ID[%s]", url, query.getId(), query.getSubQueryId());
final String responseContext = response.headers().get(QueryResource.HEADER_RESPONSE_CONTEXT);
context.addRemainingResponse(query.getMostSpecificId(), VAL_TO_REDUCE_REMAINING_RESPONSES);
// context may be null in case of error or query timeout
if (responseContext != null) {
context.merge(ResponseContext.deserialize(responseContext, objectMapper));
}
continueReading = enqueue(response.getContent(), 0L);
} catch (final IOException e) {
log.error(e, "Error parsing response context from url [%s]", url);
return ClientResponse.finished(new InputStream() {
@Override
public int read() throws IOException {
throw e;
}
});
} catch (InterruptedException e) {
log.error(e, "Queue appending interrupted");
Thread.currentThread().interrupt();
throw new RuntimeException(e);
}
totalByteCount.addAndGet(response.getContent().readableBytes());
return ClientResponse.finished(new SequenceInputStream(new Enumeration<InputStream>() {
@Override
public boolean hasMoreElements() {
if (fail.get() != null) {
throw new RE(fail.get());
}
checkQueryTimeout();
// Then the stream should be spouting good InputStreams.
synchronized (done) {
return !done.get() || !queue.isEmpty();
}
}
@Override
public InputStream nextElement() {
if (fail.get() != null) {
throw new RE(fail.get());
}
try {
return dequeue();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
}
}
}), continueReading);
}
@Override
public ClientResponse<InputStream> handleChunk(ClientResponse<InputStream> clientResponse, HttpChunk chunk, long chunkNum) {
checkQueryTimeout();
final ChannelBuffer channelBuffer = chunk.getContent();
final int bytes = channelBuffer.readableBytes();
checkTotalBytesLimit(bytes);
boolean continueReading = true;
if (bytes > 0) {
try {
continueReading = enqueue(channelBuffer, chunkNum);
} catch (InterruptedException e) {
log.error(e, "Unable to put finalizing input stream into Sequence queue for url [%s]", url);
Thread.currentThread().interrupt();
throw new RuntimeException(e);
}
totalByteCount.addAndGet(bytes);
}
return ClientResponse.finished(clientResponse.getObj(), continueReading);
}
@Override
public ClientResponse<InputStream> done(ClientResponse<InputStream> clientResponse) {
long stopTimeNs = System.nanoTime();
long nodeTimeNs = stopTimeNs - requestStartTimeNs;
final long nodeTimeMs = TimeUnit.NANOSECONDS.toMillis(nodeTimeNs);
log.debug("Completed queryId[%s] request to url[%s] with %,d bytes returned in %,d millis [%,f b/s].", query.getId(), url, totalByteCount.get(), nodeTimeMs, // Floating math; division by zero will yield Inf, not exception
totalByteCount.get() / (0.001 * nodeTimeMs));
QueryMetrics<? super Query<T>> responseMetrics = acquireResponseMetrics();
responseMetrics.reportNodeTime(nodeTimeNs);
responseMetrics.reportNodeBytes(totalByteCount.get());
if (usingBackpressure) {
responseMetrics.reportBackPressureTime(channelSuspendedTime.get());
}
responseMetrics.emit(emitter);
synchronized (done) {
try {
// An empty byte array is put at the end to give the SequenceInputStream.close() as something to close out
// after done is set to true, regardless of the rest of the stream's state.
queue.put(InputStreamHolder.fromChannelBuffer(ChannelBuffers.EMPTY_BUFFER, Long.MAX_VALUE));
} catch (InterruptedException e) {
log.error(e, "Unable to put finalizing input stream into Sequence queue for url [%s]", url);
Thread.currentThread().interrupt();
throw new RuntimeException(e);
} finally {
done.set(true);
}
}
return ClientResponse.finished(clientResponse.getObj());
}
@Override
public void exceptionCaught(final ClientResponse<InputStream> clientResponse, final Throwable e) {
String msg = StringUtils.format("Query[%s] url[%s] failed with exception msg [%s]", query.getId(), url, e.getMessage());
setupResponseReadFailure(msg, e);
}
private void setupResponseReadFailure(String msg, Throwable th) {
fail.set(msg);
queue.clear();
queue.offer(InputStreamHolder.fromStream(new InputStream() {
@Override
public int read() throws IOException {
if (th != null) {
throw new IOException(msg, th);
} else {
throw new IOException(msg);
}
}
}, -1, 0));
}
// Returns remaining timeout or throws exception if timeout already elapsed.
private long checkQueryTimeout() {
long timeLeft = timeoutAt - System.currentTimeMillis();
if (timeLeft <= 0) {
String msg = StringUtils.format("Query[%s] url[%s] timed out.", query.getId(), url);
setupResponseReadFailure(msg, null);
throw new QueryTimeoutException(msg);
} else {
return timeLeft;
}
}
private void checkTotalBytesLimit(long bytes) {
if (maxScatterGatherBytes < Long.MAX_VALUE && totalBytesGathered.addAndGet(bytes) > maxScatterGatherBytes) {
String msg = StringUtils.format("Query[%s] url[%s] max scatter-gather bytes limit reached.", query.getId(), url);
setupResponseReadFailure(msg, null);
throw new ResourceLimitExceededException(msg);
}
}
};
long timeLeft = timeoutAt - System.currentTimeMillis();
if (timeLeft <= 0) {
throw new QueryTimeoutException(StringUtils.nonStrictFormat("Query[%s] url[%s] timed out.", query.getId(), url));
}
future = httpClient.go(new Request(HttpMethod.POST, new URL(url)).setContent(objectMapper.writeValueAsBytes(QueryContexts.withTimeout(query, timeLeft))).setHeader(HttpHeaders.Names.CONTENT_TYPE, isSmile ? SmileMediaTypes.APPLICATION_JACKSON_SMILE : MediaType.APPLICATION_JSON), responseHandler, Duration.millis(timeLeft));
queryWatcher.registerQueryFuture(query, future);
openConnections.getAndIncrement();
Futures.addCallback(future, new FutureCallback<InputStream>() {
@Override
public void onSuccess(InputStream result) {
openConnections.getAndDecrement();
}
@Override
public void onFailure(Throwable t) {
openConnections.getAndDecrement();
if (future.isCancelled()) {
cancelQuery(query, cancelUrl);
}
}
}, // The callback is non-blocking and quick, so it's OK to schedule it using directExecutor()
Execs.directExecutor());
} catch (IOException e) {
throw new RuntimeException(e);
}
Sequence<T> retVal = new BaseSequence<>(new BaseSequence.IteratorMaker<T, JsonParserIterator<T>>() {
@Override
public JsonParserIterator<T> make() {
return new JsonParserIterator<T>(queryResultType, future, url, query, host, toolChest.decorateObjectMapper(objectMapper, query));
}
@Override
public void cleanup(JsonParserIterator<T> iterFromMake) {
CloseableUtils.closeAndWrapExceptions(iterFromMake);
}
});
// avoid the cost of de-serializing and then re-serializing again when adding to cache
if (!isBySegment) {
retVal = Sequences.map(retVal, toolChest.makePreComputeManipulatorFn(query, MetricManipulatorFns.deserializing()));
}
return retVal;
}
use of org.jboss.netty.handler.codec.http.HttpChunk in project voldemort by voldemort.
the class AbstractRestRequestHandler method messageReceived.
@Override
public void messageReceived(ChannelHandlerContext ctx, MessageEvent messageEvent) throws Exception {
RestRequestValidator requestValidator;
if (!readingChunks) {
// Construct the Request from messageEvent
HttpRequest request = this.request = (HttpRequest) messageEvent.getMessage();
String requestURI = this.request.getUri();
if (logger.isDebugEnabled()) {
logger.debug("Request URI: " + requestURI);
}
if (request.isChunked()) {
readingChunks = true;
} else {
// Instantiate the appropriate error handler
HttpMethod httpMethod = request.getMethod();
if (httpMethod.equals(HttpMethod.GET)) {
if (logger.isDebugEnabled()) {
logger.debug("Received a Http GET request at " + System.currentTimeMillis() + " ms");
}
requestValidator = new RestGetRequestValidator(request, messageEvent);
} else if (httpMethod.equals(HttpMethod.POST)) {
if (logger.isDebugEnabled()) {
logger.debug("Received a Http POST request at " + System.currentTimeMillis() + " ms");
}
requestValidator = new RestPutRequestValidator(request, messageEvent, this.isVectorClockOptional);
} else if (httpMethod.equals(HttpMethod.DELETE)) {
if (logger.isDebugEnabled()) {
logger.debug("Received a Http DELETE request at " + System.currentTimeMillis() + " ms");
}
requestValidator = new RestDeleteRequestValidator(request, messageEvent, this.isVectorClockOptional);
} else {
String errorMessage = "Illegal Http request received at " + System.currentTimeMillis() + " ms";
logger.error(errorMessage);
RestErrorHandler.writeErrorResponse(messageEvent, BAD_REQUEST, errorMessage);
return;
}
registerRequest(requestValidator, ctx, messageEvent);
}
} else {
HttpChunk chunk = (HttpChunk) messageEvent.getMessage();
if (chunk.isLast()) {
readingChunks = false;
}
}
}
Aggregations