use of com.nike.riposte.server.http.ProxyRouterEndpoint in project riposte by Nike-Inc.
the class ProxyRouterEndpointExecutionHandler method doChannelRead.
@Override
public PipelineContinuationBehavior doChannelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
HttpProcessingState state = ChannelAttributes.getHttpProcessingStateForChannel(ctx).get();
Endpoint<?> endpoint = state.getEndpointForExecution();
if (shouldHandleDoChannelReadMessage(msg, endpoint)) {
ProxyRouterProcessingState proxyRouterState = getOrCreateProxyRouterProcessingState(ctx);
ProxyRouterEndpoint endpointProxyRouter = ((ProxyRouterEndpoint) endpoint);
RequestInfo<?> requestInfo = state.getRequestInfo();
if (msg instanceof HttpRequest) {
if (requestInfo instanceof RiposteInternalRequestInfo) {
// Tell this RequestInfo that we'll be managing the release of content chunks, so that when
// RequestInfo.releaseAllResources() is called we don't have extra reference count removals.
((RiposteInternalRequestInfo) requestInfo).contentChunksWillBeReleasedExternally();
}
// We're supposed to start streaming. There may be pre-endpoint-execution validation logic or other work
// that needs to happen before the endpoint is executed, so set up the CompletableFuture for the
// endpoint call to only execute if the pre-endpoint-execution validation/work chain is successful.
CompletableFuture<DownstreamRequestFirstChunkInfo> firstChunkFuture = state.getPreEndpointExecutionWorkChain().thenCompose(functionWithTracingAndMdc(aVoid -> endpointProxyRouter.getDownstreamRequestFirstChunkInfo(requestInfo, longRunningTaskExecutor, ctx), ctx));
Long endpointTimeoutOverride = endpointProxyRouter.completableFutureTimeoutOverrideMillis();
long callTimeoutValueToUse = (endpointTimeoutOverride == null) ? defaultCompletableFutureTimeoutMillis : endpointTimeoutOverride;
// When the first chunk is ready, stream it downstream and set up what happens afterward.
firstChunkFuture.whenComplete((downstreamRequestFirstChunkInfo, throwable) -> {
Optional<ManualModeTask<HttpResponse>> circuitBreakerManualTask = getCircuitBreaker(downstreamRequestFirstChunkInfo, ctx).map(CircuitBreaker::newManualModeTask);
StreamingCallback callback = new StreamingCallbackForCtx(ctx, circuitBreakerManualTask, endpointProxyRouter, requestInfo, proxyRouterState);
if (throwable != null) {
// Something blew up trying to determine the first chunk info.
callback.unrecoverableErrorOccurred(throwable);
} else if (!ctx.channel().isOpen()) {
// The channel was closed for some reason before we were able to start streaming.
String errorMsg = "The channel from the original caller was closed before we could begin the " + "downstream call.";
Exception channelClosedException = new RuntimeException(errorMsg);
runnableWithTracingAndMdc(() -> logger.warn(errorMsg), ctx).run();
callback.unrecoverableErrorOccurred(channelClosedException);
} else {
try {
// Ok we have the first chunk info. Start by setting the downstream call info in the request
// info (i.e. for access logs if desired)
requestInfo.addRequestAttribute(DOWNSTREAM_CALL_PATH_REQUEST_ATTR_KEY, HttpUtils.extractPath(downstreamRequestFirstChunkInfo.firstChunk.getUri()));
// Try our circuit breaker (if we have one).
Throwable circuitBreakerException = null;
try {
circuitBreakerManualTask.ifPresent(ManualModeTask::throwExceptionIfCircuitBreakerIsOpen);
} catch (Throwable t) {
circuitBreakerException = t;
}
if (circuitBreakerException == null) {
// No circuit breaker, or the breaker is closed. We can now stream the first chunk info.
String downstreamHost = downstreamRequestFirstChunkInfo.host;
int downstreamPort = downstreamRequestFirstChunkInfo.port;
HttpRequest downstreamRequestFirstChunk = downstreamRequestFirstChunkInfo.firstChunk;
boolean isSecureHttpsCall = downstreamRequestFirstChunkInfo.isHttps;
boolean relaxedHttpsValidation = downstreamRequestFirstChunkInfo.relaxedHttpsValidation;
// Tell the proxyRouterState about the streaming callback so that
// callback.unrecoverableErrorOccurred(...) can be called in the case of an error
// on subsequent chunks.
proxyRouterState.setStreamingCallback(callback);
// Setup the streaming channel future with everything it needs to kick off the
// downstream request.
proxyRouterState.setStreamingStartTimeNanos(System.nanoTime());
CompletableFuture<StreamingChannel> streamingChannel = streamingAsyncHttpClient.streamDownstreamCall(downstreamHost, downstreamPort, downstreamRequestFirstChunk, isSecureHttpsCall, relaxedHttpsValidation, callback, callTimeoutValueToUse, ctx);
// Tell the streaming channel future what to do when it completes.
streamingChannel = streamingChannel.whenComplete((sc, cause) -> {
if (cause == null) {
// Successfully connected and sent the first chunk. We can now safely let
// the remaining content chunks through for streaming.
proxyRouterState.triggerChunkProcessing(sc);
} else {
// Something blew up while connecting to the downstream server.
callback.unrecoverableErrorOccurred(cause);
}
});
// Set the streaming channel future on the state so it can be connected to.
proxyRouterState.setStreamingChannelCompletableFuture(streamingChannel);
} else {
// Circuit breaker is tripped (or otherwise threw an unexpected exception). Immediately
// short circuit the error back to the client.
callback.unrecoverableErrorOccurred(circuitBreakerException);
}
} catch (Throwable t) {
callback.unrecoverableErrorOccurred(t);
}
}
});
} else if (msg instanceof HttpContent) {
HttpContent msgContent = (HttpContent) msg;
// chunk-streaming behavior and subsequent cleanup for the given HttpContent.
if (!releaseContentChunkIfStreamAlreadyFailed(msgContent, proxyRouterState)) {
registerChunkStreamingAction(proxyRouterState, msgContent, ctx);
}
}
return PipelineContinuationBehavior.DO_NOT_FIRE_CONTINUE_EVENT;
}
return PipelineContinuationBehavior.CONTINUE;
}
Aggregations