use of io.netty.util.concurrent.FutureListener in project netty by netty.
the class SslHandlerTest method testCloseNotify.
private static void testCloseNotify(SslProvider provider, final long closeNotifyReadTimeout, final boolean timeout) throws Exception {
SelfSignedCertificate ssc = new SelfSignedCertificate();
final SslContext sslServerCtx = SslContextBuilder.forServer(ssc.certificate(), ssc.privateKey()).sslProvider(provider).build();
final SslContext sslClientCtx = SslContextBuilder.forClient().trustManager(InsecureTrustManagerFactory.INSTANCE).sslProvider(provider).build();
EventLoopGroup group = new NioEventLoopGroup();
Channel sc = null;
Channel cc = null;
try {
final Promise<Channel> clientPromise = group.next().newPromise();
final Promise<Channel> serverPromise = group.next().newPromise();
sc = new ServerBootstrap().group(group).channel(NioServerSocketChannel.class).childHandler(new ChannelInitializer<Channel>() {
@Override
protected void initChannel(Channel ch) throws Exception {
SslHandler handler = sslServerCtx.newHandler(ch.alloc());
handler.setCloseNotifyReadTimeoutMillis(closeNotifyReadTimeout);
handler.sslCloseFuture().addListener(new PromiseNotifier<Channel, Future<Channel>>(serverPromise));
handler.handshakeFuture().addListener(new FutureListener<Channel>() {
@Override
public void operationComplete(Future<Channel> future) {
if (!future.isSuccess()) {
// Something bad happened during handshake fail the promise!
serverPromise.tryFailure(future.cause());
}
}
});
ch.pipeline().addLast(handler);
}
}).bind(new InetSocketAddress(0)).syncUninterruptibly().channel();
cc = new Bootstrap().group(group).channel(NioSocketChannel.class).handler(new ChannelInitializer<Channel>() {
@Override
protected void initChannel(Channel ch) throws Exception {
final AtomicBoolean closeSent = new AtomicBoolean();
if (timeout) {
ch.pipeline().addFirst(new ChannelInboundHandlerAdapter() {
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
if (closeSent.get()) {
// Drop data on the floor so we will get a timeout while waiting for the
// close_notify.
ReferenceCountUtil.release(msg);
} else {
super.channelRead(ctx, msg);
}
}
});
}
SslHandler handler = sslClientCtx.newHandler(ch.alloc());
handler.setCloseNotifyReadTimeoutMillis(closeNotifyReadTimeout);
handler.sslCloseFuture().addListener(new PromiseNotifier<Channel, Future<Channel>>(clientPromise));
handler.handshakeFuture().addListener(new FutureListener<Channel>() {
@Override
public void operationComplete(Future<Channel> future) {
if (future.isSuccess()) {
closeSent.compareAndSet(false, true);
future.getNow().close();
} else {
// Something bad happened during handshake fail the promise!
clientPromise.tryFailure(future.cause());
}
}
});
ch.pipeline().addLast(handler);
}
}).connect(sc.localAddress()).syncUninterruptibly().channel();
serverPromise.awaitUninterruptibly();
clientPromise.awaitUninterruptibly();
// Server always received the close_notify as the client triggers the close sequence.
assertTrue(serverPromise.isSuccess());
// Depending on if we wait for the response or not the promise will be failed or not.
if (closeNotifyReadTimeout > 0 && !timeout) {
assertTrue(clientPromise.isSuccess());
} else {
assertFalse(clientPromise.isSuccess());
}
} finally {
if (cc != null) {
cc.close().syncUninterruptibly();
}
if (sc != null) {
sc.close().syncUninterruptibly();
}
group.shutdownGracefully();
ReferenceCountUtil.release(sslServerCtx);
ReferenceCountUtil.release(sslClientCtx);
}
}
use of io.netty.util.concurrent.FutureListener in project redisson by redisson.
the class RedisClientTest method testConnectAsync.
@Test
public void testConnectAsync() throws InterruptedException {
RedisClient c = new RedisClient(RedisRunner.getDefaultRedisServerBindAddressAndPort());
RFuture<RedisConnection> f = c.connectAsync();
final CountDownLatch l = new CountDownLatch(1);
f.addListener((FutureListener<RedisConnection>) future -> {
RedisConnection conn = future.get();
assertThat(conn.sync(RedisCommands.PING)).isEqualTo("PONG");
l.countDown();
});
assertThat(l.await(10, TimeUnit.SECONDS)).isTrue();
}
use of io.netty.util.concurrent.FutureListener in project redisson by redisson.
the class ConnectionPool method scheduleCheck.
private void scheduleCheck(final ClientConnectionsEntry entry) {
connectionManager.getConnectionEventsHub().fireDisconnect(entry.getClient().getAddr());
connectionManager.newTimeout(new TimerTask() {
@Override
public void run(Timeout timeout) throws Exception {
if (entry.getFreezeReason() != FreezeReason.RECONNECT || !entry.isFreezed()) {
return;
}
RFuture<RedisConnection> connectionFuture = entry.getClient().connectAsync();
connectionFuture.addListener(new FutureListener<RedisConnection>() {
@Override
public void operationComplete(Future<RedisConnection> future) throws Exception {
if (entry.getFreezeReason() != FreezeReason.RECONNECT || !entry.isFreezed()) {
return;
}
if (!future.isSuccess()) {
scheduleCheck(entry);
return;
}
final RedisConnection c = future.getNow();
if (!c.isActive()) {
c.closeAsync();
scheduleCheck(entry);
return;
}
final FutureListener<String> pingListener = new FutureListener<String>() {
@Override
public void operationComplete(Future<String> future) throws Exception {
try {
if (entry.getFreezeReason() != FreezeReason.RECONNECT || !entry.isFreezed()) {
return;
}
if (future.isSuccess() && "PONG".equals(future.getNow())) {
entry.resetFailedAttempts();
RPromise<Void> promise = connectionManager.newPromise();
promise.addListener(new FutureListener<Void>() {
@Override
public void operationComplete(Future<Void> future) throws Exception {
if (entry.getNodeType() == NodeType.SLAVE) {
masterSlaveEntry.slaveUp(entry.getClient().getAddr().getHostName(), entry.getClient().getAddr().getPort(), FreezeReason.RECONNECT);
log.info("slave {} successfully reconnected", entry.getClient().getAddr());
} else {
synchronized (entry) {
if (entry.getFreezeReason() == FreezeReason.RECONNECT) {
entry.setFreezed(false);
entry.setFreezeReason(null);
log.info("host {} successfully reconnected", entry.getClient().getAddr());
}
}
}
}
});
initConnections(entry, promise, false);
} else {
scheduleCheck(entry);
}
} finally {
c.closeAsync();
}
}
};
if (entry.getConfig().getPassword() != null) {
RFuture<Void> temp = c.async(RedisCommands.AUTH, config.getPassword());
FutureListener<Void> listener = new FutureListener<Void>() {
@Override
public void operationComplete(Future<Void> future) throws Exception {
ping(c, pingListener);
}
};
temp.addListener(listener);
} else {
ping(c, pingListener);
}
}
});
}
}, config.getReconnectionTimeout(), TimeUnit.MILLISECONDS);
}
use of io.netty.util.concurrent.FutureListener in project redisson by redisson.
the class CommandAsyncService method evalAllAsync.
public <T, R> RFuture<R> evalAllAsync(boolean readOnlyMode, RedisCommand<T> command, final SlotCallback<T, R> callback, String script, List<Object> keys, Object... params) {
final RPromise<R> mainPromise = connectionManager.newPromise();
final Set<MasterSlaveEntry> entries = connectionManager.getEntrySet();
final AtomicInteger counter = new AtomicInteger(entries.size());
FutureListener<T> listener = new FutureListener<T>() {
@Override
public void operationComplete(Future<T> future) throws Exception {
if (!future.isSuccess()) {
mainPromise.tryFailure(future.cause());
return;
}
callback.onSlotResult(future.getNow());
if (counter.decrementAndGet() == 0 && !mainPromise.isDone()) {
mainPromise.trySuccess(callback.onFinish());
}
}
};
List<Object> args = new ArrayList<Object>(2 + keys.size() + params.length);
args.add(script);
args.add(keys.size());
args.addAll(keys);
args.addAll(Arrays.asList(params));
for (MasterSlaveEntry entry : entries) {
RPromise<T> promise = connectionManager.newPromise();
promise.addListener(listener);
async(readOnlyMode, new NodeSource(entry), connectionManager.getCodec(), command, args.toArray(), promise, 0);
}
return mainPromise;
}
use of io.netty.util.concurrent.FutureListener in project redisson by redisson.
the class CommandAsyncService method handleBlockingOperations.
private <R, V> void handleBlockingOperations(final AsyncDetails<V, R> details, final RedisConnection connection, Long popTimeout) {
final FutureListener<Boolean> listener = new FutureListener<Boolean>() {
@Override
public void operationComplete(Future<Boolean> future) throws Exception {
details.getMainPromise().tryFailure(new RedissonShutdownException("Redisson is shutdown"));
}
};
final AtomicBoolean canceledByScheduler = new AtomicBoolean();
final Timeout scheduledFuture;
if (popTimeout != 0) {
// to handle cases when connection has been lost
final Channel orignalChannel = connection.getChannel();
scheduledFuture = connectionManager.newTimeout(new TimerTask() {
@Override
public void run(Timeout timeout) throws Exception {
// and connection is still active
if (orignalChannel == connection.getChannel() && connection.isActive()) {
return;
}
canceledByScheduler.set(true);
details.getAttemptPromise().trySuccess(null);
}
}, popTimeout, TimeUnit.SECONDS);
} else {
scheduledFuture = null;
}
details.getMainPromise().addListener(new FutureListener<R>() {
@Override
public void operationComplete(Future<R> future) throws Exception {
if (scheduledFuture != null) {
scheduledFuture.cancel();
}
synchronized (listener) {
connectionManager.getShutdownPromise().removeListener(listener);
}
// handling cancel operation for commands from skipTimeout collection
if ((future.isCancelled() && details.getAttemptPromise().cancel(true)) || canceledByScheduler.get()) {
connection.forceFastReconnectAsync();
return;
}
if (future.cause() instanceof RedissonShutdownException) {
details.getAttemptPromise().tryFailure(future.cause());
}
}
});
synchronized (listener) {
if (!details.getMainPromise().isDone()) {
connectionManager.getShutdownPromise().addListener(listener);
}
}
}
Aggregations