use of com.linkedin.data.codec.entitystream.StreamDataCodec in project rest.li by linkedin.
the class StreamRestLiServer method handleStructuredDataResourceRequest.
private void handleStructuredDataResourceRequest(StreamRequest request, RoutingResult routingResult, Callback<?> callback, Function<ContentType, Callback<RestLiResponse>> restLiResponseCallbackConstructor, Consumer<RestRequest> fallbackRequestProcessor) {
ContentType reqContentType, respContentType;
try {
// TODO: We should throw exception instead of defaulting to JSON when the request content type is non-null and
// unrecognized. This behavior was inadvertently changed in commit d149605e4181349b64180bdfe0b4d24a294dc6f6
// when this logic was moved from DataMapUtils.readMapWithExceptions() to DataMapConverter.dataMapToByteString().
reqContentType = ContentType.getContentType(request.getHeader(RestConstants.HEADER_CONTENT_TYPE)).orElse(ContentType.JSON);
String respMimeType = routingResult.getContext().getResponseMimeType();
respContentType = ContentType.getResponseContentType(respMimeType, request.getURI(), request.getHeaders()).orElseThrow(() -> new RestLiServiceException(HttpStatus.S_406_NOT_ACCEPTABLE, "Requested mime type for encoding is not supported. Mimetype: " + respMimeType));
} catch (MimeTypeParseException e) {
callback.onError(e);
return;
}
StreamDataCodec reqCodec = reqContentType.getStreamCodec();
StreamDataCodec respCodec = respContentType.getStreamCodec();
if (_useStreamCodec && reqCodec != null && respCodec != null) {
final RequestContext requestContext = routingResult.getContext().getRawRequestContext();
TimingContextUtil.beginTiming(requestContext, FrameworkTimingKeys.SERVER_REQUEST_RESTLI_DESERIALIZATION.key());
reqCodec.decodeMap(EntityStreamAdapters.toGenericEntityStream(request.getEntityStream())).handle((dataMap, e) -> {
TimingContextUtil.endTiming(requestContext, FrameworkTimingKeys.SERVER_REQUEST_RESTLI_DESERIALIZATION.key());
Throwable error = null;
if (e == null) {
try {
handleResourceRequest(request, routingResult, dataMap, restLiResponseCallbackConstructor.apply(respContentType));
} catch (Throwable throwable) {
error = throwable;
}
} else {
error = buildPreRoutingStreamException(new RoutingException("Cannot parse request entity", HttpStatus.S_400_BAD_REQUEST.getCode(), e), request);
}
if (error != null) {
log.error("Fail to handle structured stream request", error);
callback.onError(error);
}
// handle function requires a return statement although there is no more completion stage.
return null;
});
} else {
// Fallback to fully-buffered request and response processing.
Messages.toRestRequest(request).handle((restRequest, e) -> {
if (e == null) {
try {
fallbackRequestProcessor.accept(restRequest);
} catch (Throwable throwable) {
e = throwable;
}
}
if (e != null) {
log.error("Fail to handle structured toRest request", e);
callback.onError(e);
}
// handle function requires a return statement although there is no more completion stage.
return null;
});
}
}
use of com.linkedin.data.codec.entitystream.StreamDataCodec in project rest.li by linkedin.
the class RestResponseDecoder method decodeResponse.
public void decodeResponse(final StreamResponse streamResponse, final Callback<Response<T>> responseCallback) throws RestLiDecodingException {
// Determine content type and take appropriate action.
// If 'multipart/related', then use MultiPartMIMEReader to read first part (which can be json or pson).
final String contentTypeString = streamResponse.getHeader(RestConstants.HEADER_CONTENT_TYPE);
if (contentTypeString != null) {
ContentType contentType = null;
try {
contentType = new ContentType(contentTypeString);
} catch (ParseException parseException) {
responseCallback.onError(new RestLiDecodingException("Could not decode Content-Type header in response", parseException));
return;
}
if (contentType.getBaseType().equalsIgnoreCase(RestConstants.HEADER_VALUE_MULTIPART_RELATED)) {
final MultiPartMIMEReader multiPartMIMEReader = MultiPartMIMEReader.createAndAcquireStream(streamResponse);
final TopLevelReaderCallback topLevelReaderCallback = new TopLevelReaderCallback(responseCallback, streamResponse, multiPartMIMEReader);
multiPartMIMEReader.registerReaderCallback(topLevelReaderCallback);
return;
}
}
// Otherwise if the whole body is json/pson then read everything in.
StreamDataCodec streamDataCodec = null;
try {
streamDataCodec = getContentType(streamResponse.getHeaders().get(RestConstants.HEADER_CONTENT_TYPE)).orElse(JSON).getStreamCodec();
} catch (MimeTypeParseException e) {
responseCallback.onError(e);
return;
}
if (streamDataCodec != null) {
CompletionStage<DataMap> dataMapCompletionStage = streamDataCodec.decodeMap(EntityStreamAdapters.toGenericEntityStream(streamResponse.getEntityStream()));
dataMapCompletionStage.handle((dataMap, e) -> {
if (e != null) {
responseCallback.onError(new RestLiDecodingException("Could not decode REST response", e));
return null;
}
try {
responseCallback.onSuccess(createResponse(streamResponse.getHeaders(), streamResponse.getStatus(), dataMap, streamResponse.getCookies()));
} catch (Throwable throwable) {
responseCallback.onError(throwable);
}
// handle function requires a return statement although there is no more completion stage.
return null;
});
} else {
final FullEntityReader fullEntityReader = new FullEntityReader(new Callback<ByteString>() {
@Override
public void onError(Throwable e) {
responseCallback.onError(e);
}
@Override
public void onSuccess(ByteString result) {
try {
responseCallback.onSuccess(createResponse(streamResponse.getHeaders(), streamResponse.getStatus(), result, streamResponse.getCookies()));
} catch (Exception exception) {
onError(exception);
}
}
});
streamResponse.getEntityStream().setReader(fullEntityReader);
}
}
Aggregations