use of org.springframework.web.servlet.mvc.method.annotation.StreamingResponseBody in project data-prep by Talend.
the class CommandHelper method toStreaming.
public static ResponseEntity<StreamingResponseBody> toStreaming(final GenericCommand<InputStream> command) {
final Observable<InputStream> stream = command.toObservable();
return stream.map(is -> {
// Content for the response entity
final StreamingResponseBody body = outputStream -> {
try {
IOUtils.copyLarge(is, outputStream);
outputStream.flush();
} catch (IOException e) {
try {
is.close();
} catch (IOException closingException) {
LOGGER.warn("could not close command result, a http connection may be leaked !", closingException);
}
LOGGER.error("Unable to fully copy command result '{}'.", command.getClass(), e);
}
};
// copy all headers from the command response so that the mime-type is correctly forwarded. Command has
// the correct headers due to call to toBlocking() below.
final MultiValueMap<String, String> headers = new HttpHeaders();
final HttpStatus status = command.getStatus();
for (Header header : command.getCommandResponseHeaders()) {
headers.put(header.getName(), Collections.singletonList(header.getValue()));
}
return new ResponseEntity<>(body, headers, status == null ? HttpStatus.OK : status);
}).toBlocking().first();
}
use of org.springframework.web.servlet.mvc.method.annotation.StreamingResponseBody in project data-prep by Talend.
the class CommandHelperTest method testCommandToStreamingWithHeader.
@Test
public void testCommandToStreamingWithHeader() throws Exception {
GenericCommand<InputStream> command = new CommandHelperTestCommand();
final ResponseEntity<StreamingResponseBody> responseEntity = CommandHelper.toStreaming(command);
final ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
responseEntity.getBody().writeTo(outputStream);
assertEquals("test", new String(outputStream.toByteArray()));
assertEquals(HttpStatus.NO_CONTENT, responseEntity.getStatusCode());
assertEquals("custom value", responseEntity.getHeaders().get("custom").get(0));
}
use of org.springframework.web.servlet.mvc.method.annotation.StreamingResponseBody in project data-prep by Talend.
the class CommandHelperTest method testCommandToStreamingWithNoHeader.
@Test
public void testCommandToStreamingWithNoHeader() throws Exception {
HystrixCommand<InputStream> command = new CommandHelperTestCommand();
final StreamingResponseBody responseBody = CommandHelper.toStreaming(command);
final ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
responseBody.writeTo(outputStream);
assertEquals("test", new String(outputStream.toByteArray()));
}
use of org.springframework.web.servlet.mvc.method.annotation.StreamingResponseBody in project data-prep by Talend.
the class TransformationService method aggregate.
/**
* Compute the given aggregation.
*
* @param rawParams the aggregation rawParams as body rawParams.
*/
// @formatter:off
@RequestMapping(value = "/aggregate", method = POST, consumes = APPLICATION_JSON_VALUE)
@ApiOperation(value = "Compute the aggregation according to the request body rawParams", consumes = APPLICATION_JSON_VALUE)
@VolumeMetered
public AggregationResult aggregate(@ApiParam(value = "The aggregation rawParams in json") @RequestBody final String rawParams) {
// @formatter:on
// parse the aggregation parameters
final AggregationParameters parameters;
try {
parameters = mapper.readerFor(AggregationParameters.class).readValue(rawParams);
LOG.debug("Aggregation requested {}", parameters);
} catch (IOException e) {
throw new TDPException(CommonErrorCodes.BAD_AGGREGATION_PARAMETERS, e);
}
InputStream contentToAggregate;
// get the content of the preparation (internal call with piped streams)
if (StringUtils.isNotBlank(parameters.getPreparationId())) {
try {
PipedOutputStream temp = new PipedOutputStream();
contentToAggregate = new PipedInputStream(temp);
// because of piped streams, processing must be asynchronous
Runnable r = () -> {
try {
final ExportParameters exportParameters = new ExportParameters();
exportParameters.setPreparationId(parameters.getPreparationId());
exportParameters.setDatasetId(parameters.getDatasetId());
final String filter = parameters.getFilter();
if (filter != null) {
if (filter.isEmpty()) {
throw new TDPException(CommonErrorCodes.UNABLE_TO_AGGREGATE, new IllegalArgumentException("Source should not be empty"));
}
exportParameters.setFilter(mapper.readTree(filter));
}
exportParameters.setExportType(JSON);
exportParameters.setStepId(parameters.getStepId());
final StreamingResponseBody body = executeSampleExportStrategy(exportParameters);
body.writeTo(temp);
} catch (IOException e) {
throw new TDPException(CommonErrorCodes.UNABLE_TO_AGGREGATE, e);
}
};
executor.execute(r);
} catch (IOException e) {
throw new TDPException(CommonErrorCodes.UNABLE_TO_AGGREGATE, e);
}
} else {
final DataSetGet dataSetGet = context.getBean(DataSetGet.class, parameters.getDatasetId(), false, true);
contentToAggregate = dataSetGet.execute();
}
// apply the aggregation
try (JsonParser parser = mapper.getFactory().createParser(new InputStreamReader(contentToAggregate, UTF_8))) {
final DataSet dataSet = mapper.readerFor(DataSet.class).readValue(parser);
return aggregationService.aggregate(parameters, dataSet);
} catch (IOException e) {
throw new TDPException(CommonErrorCodes.UNABLE_TO_PARSE_JSON, e);
} finally {
// don't forget to release the connection
if (contentToAggregate != null) {
try {
contentToAggregate.close();
} catch (IOException e) {
LOG.warn("Could not close dataset input stream while aggregating", e);
}
}
}
}
use of org.springframework.web.servlet.mvc.method.annotation.StreamingResponseBody in project data-prep by Talend.
the class CachedExportStrategy method execute.
@Override
public StreamingResponseBody execute(ExportParameters parameters) {
final TransformationCacheKey contentKey = getCacheKey(parameters);
//
ExportUtils.setExportHeaders(//
parameters.getExportName(), //
parameters.getArguments().get(ExportFormat.PREFIX + CSVFormat.ParametersCSV.ENCODING), getFormat(parameters.getExportType()));
return outputStream -> {
try (InputStream cachedContent = contentCache.get(contentKey)) {
IOUtils.copy(cachedContent, outputStream);
}
};
}
Aggregations