use of org.springframework.core.io.buffer.DataBufferFactory in project spring-integration by spring-projects.
the class WebFluxDslTests method testWebFluxFlowWithReplyPayloadToFlux.
@Test
public void testWebFluxFlowWithReplyPayloadToFlux() {
ClientHttpConnector httpConnector = new HttpHandlerConnector((request, response) -> {
response.setStatusCode(HttpStatus.OK);
response.getHeaders().setContentType(MediaType.TEXT_PLAIN);
DataBufferFactory bufferFactory = response.bufferFactory();
return response.writeWith(Mono.just(bufferFactory.wrap("FOO\nBAR\n".getBytes()))).then(Mono.defer(response::setComplete));
});
WebClient webClient = WebClient.builder().clientConnector(httpConnector).build();
new DirectFieldAccessor(this.webFluxWithReplyPayloadToFlux).setPropertyValue("webClient", webClient);
QueueChannel replyChannel = new QueueChannel();
Message<String> testMessage = MessageBuilder.withPayload("test").setReplyChannel(replyChannel).build();
this.webFluxFlowWithReplyPayloadToFluxInput.send(testMessage);
Message<?> receive = replyChannel.receive(10_000);
assertNotNull(receive);
assertThat(receive.getPayload(), instanceOf(Flux.class));
@SuppressWarnings("unchecked") Flux<String> response = (Flux<String>) receive.getPayload();
StepVerifier.create(response).expectNext("FOO", "BAR").verifyComplete();
}
use of org.springframework.core.io.buffer.DataBufferFactory in project spring-data-mongodb by spring-projects.
the class ReactiveGridFsTemplateTests method considersSkipLimitWhenQueryingFiles.
// DATAMONGO-765
@Test
public void considersSkipLimitWhenQueryingFiles() {
DataBufferFactory bufferFactory = new DefaultDataBufferFactory();
DataBuffer buffer = bufferFactory.allocateBuffer(0);
//
Flux.just(//
"a", //
"aa", //
"aaa", //
"b", //
"bb", //
"bbb", //
"c", //
"cc", //
"ccc", "d", "dd", //
"ddd").flatMap(//
fileName -> operations.store(Mono.just(buffer), fileName)).as(//
StepVerifier::create).expectNextCount(//
12).verifyComplete();
PageRequest pageRequest = PageRequest.of(2, 3, Sort.Direction.ASC, "filename");
//
operations.find(new Query().with(pageRequest)).map(//
GridFSFile::getFilename).as(//
StepVerifier::create).expectNext("c", "cc", //
"ccc").verifyComplete();
}
use of org.springframework.core.io.buffer.DataBufferFactory in project spring-framework by spring-projects.
the class ScriptUtils method executeSqlScript.
/**
* Execute the given SQL script.
* <p>Statement separators and comments will be removed before executing
* individual statements within the supplied script.
* <p><strong>Warning</strong>: this method does <em>not</em> release the
* provided {@link Connection}.
* @param connection the R2DBC connection to use to execute the script; already
* configured and ready to use
* @param resource the resource (potentially associated with a specific encoding)
* to load the SQL script from
* @param dataBufferFactory the factory to create data buffers with
* @param continueOnError whether or not to continue without throwing an exception
* in the event of an error
* @param ignoreFailedDrops whether or not to continue in the event of specifically
* an error on a {@code DROP} statement
* @param commentPrefixes the prefixes that identify single-line comments in the
* SQL script (typically "--")
* @param separator the script statement separator; defaults to
* {@value #DEFAULT_STATEMENT_SEPARATOR} if not specified and falls back to
* {@value #FALLBACK_STATEMENT_SEPARATOR} as a last resort; may be set to
* {@value #EOF_STATEMENT_SEPARATOR} to signal that the script contains a
* single statement without a separator
* @param blockCommentStartDelimiter the <em>start</em> block comment delimiter
* @param blockCommentEndDelimiter the <em>end</em> block comment delimiter
* @throws ScriptException if an error occurred while executing the SQL script
* @see #DEFAULT_STATEMENT_SEPARATOR
* @see #FALLBACK_STATEMENT_SEPARATOR
* @see #EOF_STATEMENT_SEPARATOR
* @see org.springframework.r2dbc.connection.ConnectionFactoryUtils#getConnection
* @see org.springframework.r2dbc.connection.ConnectionFactoryUtils#releaseConnection
*/
public static Mono<Void> executeSqlScript(Connection connection, EncodedResource resource, DataBufferFactory dataBufferFactory, boolean continueOnError, boolean ignoreFailedDrops, String[] commentPrefixes, @Nullable String separator, String blockCommentStartDelimiter, String blockCommentEndDelimiter) throws ScriptException {
if (logger.isDebugEnabled()) {
logger.debug("Executing SQL script from " + resource);
}
long startTime = System.currentTimeMillis();
Mono<String> inputScript = readScript(resource, dataBufferFactory, separator).onErrorMap(IOException.class, ex -> new CannotReadScriptException(resource, ex));
AtomicInteger statementNumber = new AtomicInteger();
Flux<Void> executeScript = inputScript.flatMapIterable(script -> {
String separatorToUse = separator;
if (separatorToUse == null) {
separatorToUse = DEFAULT_STATEMENT_SEPARATOR;
}
if (!EOF_STATEMENT_SEPARATOR.equals(separatorToUse) && !containsStatementSeparator(resource, script, separatorToUse, commentPrefixes, blockCommentStartDelimiter, blockCommentEndDelimiter)) {
separatorToUse = FALLBACK_STATEMENT_SEPARATOR;
}
return splitSqlScript(resource, script, separatorToUse, commentPrefixes, blockCommentStartDelimiter, blockCommentEndDelimiter);
}).concatMap(statement -> {
statementNumber.incrementAndGet();
return runStatement(statement, connection, resource, continueOnError, ignoreFailedDrops, statementNumber);
});
if (logger.isDebugEnabled()) {
executeScript = executeScript.doOnComplete(() -> {
long elapsedTime = System.currentTimeMillis() - startTime;
logger.debug("Executed SQL script from " + resource + " in " + elapsedTime + " ms.");
});
}
return executeScript.onErrorMap(ex -> !(ex instanceof ScriptException), ex -> new UncategorizedScriptException("Failed to execute database script from resource [" + resource + "]", ex)).then();
}
use of org.springframework.core.io.buffer.DataBufferFactory in project spring-framework by spring-projects.
the class MultipartHttpMessageWriter method encodePart.
@SuppressWarnings("unchecked")
private <T> Flux<DataBuffer> encodePart(byte[] boundary, String name, T value, DataBufferFactory factory) {
MultipartHttpOutputMessage message = new MultipartHttpOutputMessage(factory);
HttpHeaders headers = message.getHeaders();
T body;
ResolvableType resolvableType = null;
if (value instanceof HttpEntity) {
HttpEntity<T> httpEntity = (HttpEntity<T>) value;
headers.putAll(httpEntity.getHeaders());
body = httpEntity.getBody();
Assert.state(body != null, "MultipartHttpMessageWriter only supports HttpEntity with body");
if (httpEntity instanceof ResolvableTypeProvider) {
resolvableType = ((ResolvableTypeProvider) httpEntity).getResolvableType();
}
} else {
body = value;
}
if (resolvableType == null) {
resolvableType = ResolvableType.forClass(body.getClass());
}
if (!headers.containsKey(HttpHeaders.CONTENT_DISPOSITION)) {
if (body instanceof Resource) {
headers.setContentDispositionFormData(name, ((Resource) body).getFilename());
} else if (resolvableType.resolve() == Resource.class) {
body = (T) Mono.from((Publisher<?>) body).doOnNext(o -> headers.setContentDispositionFormData(name, ((Resource) o).getFilename()));
} else {
headers.setContentDispositionFormData(name, null);
}
}
MediaType contentType = headers.getContentType();
final ResolvableType finalBodyType = resolvableType;
Optional<HttpMessageWriter<?>> writer = this.partWriters.stream().filter(partWriter -> partWriter.canWrite(finalBodyType, contentType)).findFirst();
if (!writer.isPresent()) {
return Flux.error(new CodecException("No suitable writer found for part: " + name));
}
Publisher<T> bodyPublisher = body instanceof Publisher ? (Publisher<T>) body : Mono.just(body);
// The writer will call MultipartHttpOutputMessage#write which doesn't actually write
// but only stores the body Flux and returns Mono.empty().
Mono<Void> partContentReady = ((HttpMessageWriter<T>) writer.get()).write(bodyPublisher, resolvableType, contentType, message, DEFAULT_HINTS);
// After partContentReady, we can access the part content from MultipartHttpOutputMessage
// and use it for writing to the actual request body
Flux<DataBuffer> partContent = partContentReady.thenMany(Flux.defer(message::getBody));
return Flux.concat(generateBoundaryLine(boundary, factory), partContent, generateNewLine(factory));
}
use of org.springframework.core.io.buffer.DataBufferFactory in project spring-framework by spring-projects.
the class JettyRequestUpgradeStrategy method upgrade.
@Override
public Mono<Void> upgrade(ServerWebExchange exchange, WebSocketHandler handler, @Nullable String subProtocol, Supplier<HandshakeInfo> handshakeInfoFactory) {
ServerHttpRequest request = exchange.getRequest();
ServerHttpResponse response = exchange.getResponse();
HttpServletRequest servletRequest = ServerHttpRequestDecorator.getNativeRequest(request);
HttpServletResponse servletResponse = ServerHttpResponseDecorator.getNativeResponse(response);
ServletContext servletContext = servletRequest.getServletContext();
HandshakeInfo handshakeInfo = handshakeInfoFactory.get();
DataBufferFactory factory = response.bufferFactory();
// Trigger WebFlux preCommit actions before upgrade
return exchange.getResponse().setComplete().then(Mono.deferContextual(contextView -> {
JettyWebSocketHandlerAdapter adapter = new JettyWebSocketHandlerAdapter(ContextWebSocketHandler.decorate(handler, contextView), session -> new JettyWebSocketSession(session, handshakeInfo, factory));
JettyWebSocketCreator webSocketCreator = (upgradeRequest, upgradeResponse) -> {
if (subProtocol != null) {
upgradeResponse.setAcceptedSubProtocol(subProtocol);
}
return adapter;
};
JettyWebSocketServerContainer container = JettyWebSocketServerContainer.getContainer(servletContext);
try {
container.upgrade(webSocketCreator, servletRequest, servletResponse);
} catch (Exception ex) {
return Mono.error(ex);
}
return Mono.empty();
}));
}
Aggregations