use of com.netflix.loadbalancer.ServerStats in project ribbon by Netflix.
the class LoadBalancerCommand method submit.
/**
* Create an {@link Observable} that once subscribed execute network call asynchronously with a server chosen by load balancer.
* If there are any errors that are indicated as retriable by the {@link RetryHandler}, they will be consumed internally by the
* function and will not be observed by the {@link Observer} subscribed to the returned {@link Observable}. If number of retries has
* exceeds the maximal allowed, a final error will be emitted by the returned {@link Observable}. Otherwise, the first successful
* result during execution and retries will be emitted.
*/
public Observable<T> submit(final ServerOperation<T> operation) {
final ExecutionInfoContext context = new ExecutionInfoContext();
if (listenerInvoker != null) {
try {
listenerInvoker.onExecutionStart();
} catch (AbortExecutionException e) {
return Observable.error(e);
}
}
final int maxRetrysSame = retryHandler.getMaxRetriesOnSameServer();
final int maxRetrysNext = retryHandler.getMaxRetriesOnNextServer();
// Use the load balancer
Observable<T> o = (server == null ? selectServer() : Observable.just(server)).concatMap(new Func1<Server, Observable<T>>() {
@Override
public // Called for each server being selected
Observable<T> call(Server server) {
context.setServer(server);
final ServerStats stats = loadBalancerContext.getServerStats(server);
// Called for each attempt and retry
Observable<T> o = Observable.just(server).concatMap(new Func1<Server, Observable<T>>() {
@Override
public Observable<T> call(final Server server) {
context.incAttemptCount();
loadBalancerContext.noteOpenConnection(stats);
if (listenerInvoker != null) {
try {
listenerInvoker.onStartWithServer(context.toExecutionInfo());
} catch (AbortExecutionException e) {
return Observable.error(e);
}
}
final Stopwatch tracer = loadBalancerContext.getExecuteTracer().start();
return operation.call(server).doOnEach(new Observer<T>() {
private T entity;
@Override
public void onCompleted() {
recordStats(tracer, stats, entity, null);
// TODO: What to do if onNext or onError are never called?
}
@Override
public void onError(Throwable e) {
recordStats(tracer, stats, null, e);
logger.debug("Got error {} when executed on server {}", e, server);
if (listenerInvoker != null) {
listenerInvoker.onExceptionWithServer(e, context.toExecutionInfo());
}
}
@Override
public void onNext(T entity) {
this.entity = entity;
if (listenerInvoker != null) {
listenerInvoker.onExecutionSuccess(entity, context.toExecutionInfo());
}
}
private void recordStats(Stopwatch tracer, ServerStats stats, Object entity, Throwable exception) {
tracer.stop();
loadBalancerContext.noteRequestCompletion(stats, entity, exception, tracer.getDuration(TimeUnit.MILLISECONDS), retryHandler);
}
});
}
});
if (maxRetrysSame > 0)
o = o.retry(retryPolicy(maxRetrysSame, true));
return o;
}
});
if (maxRetrysNext > 0 && server == null)
o = o.retry(retryPolicy(maxRetrysNext, false));
return o.onErrorResumeNext(new Func1<Throwable, Observable<T>>() {
@Override
public Observable<T> call(Throwable e) {
if (context.getAttemptCount() > 0) {
if (maxRetrysNext > 0 && context.getServerAttemptCount() == (maxRetrysNext + 1)) {
e = new ClientException(ClientException.ErrorType.NUMBEROF_RETRIES_NEXTSERVER_EXCEEDED, "Number of retries on next server exceeded max " + maxRetrysNext + " retries, while making a call for: " + context.getServer(), e);
} else if (maxRetrysSame > 0 && context.getAttemptCount() == (maxRetrysSame + 1)) {
e = new ClientException(ClientException.ErrorType.NUMBEROF_RETRIES_EXEEDED, "Number of retries exceeded max " + maxRetrysSame + " retries, while making a call for: " + context.getServer(), e);
}
}
if (listenerInvoker != null) {
listenerInvoker.onExecutionFailed(e, context.toFinalExecutionInfo());
}
return Observable.error(e);
}
});
}
use of com.netflix.loadbalancer.ServerStats in project ribbon by Netflix.
the class RetryTest method postReadTimeout.
@Test
public void postReadTimeout() throws Exception {
URI localUrl = new URI("/noresponse");
HttpRequest request = HttpRequest.newBuilder().uri(localUrl).verb(Verb.POST).build();
try {
client.executeWithLoadBalancer(request, DefaultClientConfigImpl.getEmptyConfig().set(CommonClientConfigKey.MaxAutoRetriesNextServer, 2));
fail("Exception expected");
} catch (ClientException e) {
// NOPMD
}
ServerStats stats = lb.getLoadBalancerStats().getSingleServerStat(localServer);
assertEquals(1, stats.getSuccessiveConnectionFailureCount());
}
use of com.netflix.loadbalancer.ServerStats in project ribbon by Netflix.
the class NettyClientTest method testLoadBalancingPostWithReadTimeout.
@Test
public void testLoadBalancingPostWithReadTimeout() throws Exception {
MockWebServer server = new MockWebServer();
String content = "{\"name\": \"ribbon\", \"age\": 2}";
server.enqueue(new MockResponse().setResponseCode(200).setHeader("Content-type", "application/json").setBody(content));
server.play();
IClientConfig config = DefaultClientConfigImpl.getClientConfigWithDefaultValues().set(CommonClientConfigKey.ReadTimeout, 100);
HttpClientRequest<ByteBuf> request = HttpClientRequest.createPost("/testAsync/postTimeout").withContent(SerializationUtils.serializeToBytes(JacksonCodec.getInstance(), EmbeddedResources.defaultPerson, null)).withHeader("Content-type", "application/json");
NettyHttpLoadBalancerErrorHandler errorHandler = new NettyHttpLoadBalancerErrorHandler(1, 3, true);
BaseLoadBalancer lb = new BaseLoadBalancer(new DummyPing(), new AvailabilityFilteringRule());
LoadBalancingHttpClient<ByteBuf, ByteBuf> lbObservables = RibbonTransport.newHttpClient(lb, config, errorHandler);
Server goodServer = new Server("localhost:" + server.getPort());
Server badServer = new Server("localhost:" + port);
List<Server> servers = Lists.newArrayList(badServer, badServer, badServer, goodServer);
lb.setServersList(servers);
RetryHandler handler = new RequestSpecificRetryHandler(true, true, errorHandler, null) {
@Override
public boolean isRetriableException(Throwable e, boolean sameServer) {
return true;
}
};
Observable<Person> observableWithRetries = getPersonObservable(lbObservables.submit(request, handler, null));
ObserverWithLatch<Person> observer = new ObserverWithLatch<Person>();
observableWithRetries.subscribe(observer);
observer.await();
if (observer.error != null) {
observer.error.printStackTrace();
}
assertEquals("ribbon", observer.obj.name);
assertEquals(2, observer.obj.age);
ServerStats stats = lbObservables.getServerStats(badServer);
server.shutdown();
assertEquals(4, stats.getTotalRequestsCount());
assertEquals(0, stats.getActiveRequestsCount());
assertEquals(4, stats.getSuccessiveConnectionFailureCount());
stats = lbObservables.getServerStats(goodServer);
// two requests to bad server because retry same server is set to 1
assertEquals(1, stats.getTotalRequestsCount());
assertEquals(0, stats.getActiveRequestsCount());
assertEquals(0, stats.getSuccessiveConnectionFailureCount());
}
use of com.netflix.loadbalancer.ServerStats in project ribbon by Netflix.
the class NettyClientTest method testLoadBalancingPostWithNoRetrySameServer.
@Test
public void testLoadBalancingPostWithNoRetrySameServer() throws Exception {
MockWebServer server = new MockWebServer();
String content = "{\"name\": \"ribbon\", \"age\": 2}";
server.enqueue(new MockResponse().setResponseCode(200).setHeader("Content-type", "application/json").setBody(content));
server.play();
IClientConfig config = DefaultClientConfigImpl.getClientConfigWithDefaultValues().set(CommonClientConfigKey.ReadTimeout, 100);
HttpClientRequest<ByteBuf> request = HttpClientRequest.createPost("/testAsync/postTimeout").withContent(SerializationUtils.serializeToBytes(JacksonCodec.getInstance(), EmbeddedResources.defaultPerson, null)).withHeader("Content-type", "application/json");
NettyHttpLoadBalancerErrorHandler errorHandler = new NettyHttpLoadBalancerErrorHandler(0, 3, true);
BaseLoadBalancer lb = new BaseLoadBalancer(new DummyPing(), new AvailabilityFilteringRule());
LoadBalancingHttpClient<ByteBuf, ByteBuf> lbObservables = RibbonTransport.newHttpClient(lb, config, errorHandler);
Server goodServer = new Server("localhost:" + server.getPort());
Server badServer = new Server("localhost:" + port);
List<Server> servers = Lists.newArrayList(badServer, badServer, badServer, goodServer);
lb.setServersList(servers);
RetryHandler handler = new RequestSpecificRetryHandler(true, true, errorHandler, null) {
@Override
public boolean isRetriableException(Throwable e, boolean sameServer) {
return true;
}
};
Observable<Person> observableWithRetries = getPersonObservable(lbObservables.submit(request, handler, null));
ObserverWithLatch<Person> observer = new ObserverWithLatch<Person>();
observableWithRetries.subscribe(observer);
observer.await();
if (observer.error != null) {
observer.error.printStackTrace();
}
server.shutdown();
assertEquals("ribbon", observer.obj.name);
assertEquals(2, observer.obj.age);
ServerStats stats = lbObservables.getServerStats(badServer);
assertEquals(2, stats.getTotalRequestsCount());
assertEquals(0, stats.getActiveRequestsCount());
assertEquals(2, stats.getSuccessiveConnectionFailureCount());
stats = lbObservables.getServerStats(goodServer);
assertEquals(1, stats.getTotalRequestsCount());
assertEquals(0, stats.getActiveRequestsCount());
assertEquals(0, stats.getSuccessiveConnectionFailureCount());
}
use of com.netflix.loadbalancer.ServerStats in project ribbon by Netflix.
the class NettyClientTest method testLoadBalancingObservablesWithReadTimeout.
@Test
public void testLoadBalancingObservablesWithReadTimeout() throws Exception {
NettyHttpLoadBalancerErrorHandler errorHandler = new NettyHttpLoadBalancerErrorHandler(1, 3, true);
MockWebServer server = new MockWebServer();
String content = "{\"name\": \"ribbon\", \"age\": 2}";
server.enqueue(new MockResponse().setResponseCode(200).setHeader("Content-type", "application/json").setBody(content));
server.play();
IClientConfig config = DefaultClientConfigImpl.getClientConfigWithDefaultValues().set(CommonClientConfigKey.ReadTimeout, 100);
HttpClientRequest<ByteBuf> request = HttpClientRequest.createGet("/testAsync/readTimeout");
BaseLoadBalancer lb = new BaseLoadBalancer(new DummyPing(), new AvailabilityFilteringRule());
LoadBalancingHttpClient<ByteBuf, ByteBuf> lbObservables = RibbonTransport.newHttpClient(lb, config, errorHandler);
Server goodServer = new Server("localhost:" + server.getPort());
Server badServer = new Server("localhost:" + port);
lb.setServersList(Lists.newArrayList(goodServer, badServer, badServer, goodServer));
Observable<Person> observableWithRetries = getPersonObservable(lbObservables.submit(request));
ObserverWithLatch<Person> observer = new ObserverWithLatch<Person>();
observableWithRetries.subscribe(observer);
observer.await();
if (observer.error != null) {
observer.error.printStackTrace();
}
assertEquals("ribbon", observer.obj.name);
assertEquals(2, observer.obj.age);
ServerStats stats = lbObservables.getServerStats(badServer);
server.shutdown();
final HttpClientListener listener = lbObservables.getListener();
waitUntilTrueOrTimeout(1000, new Func0<Boolean>() {
@Override
public Boolean call() {
return listener.getPoolReleases() == 5;
}
});
assertEquals(0, listener.getPoolReuse());
// two requests to bad server because retry same server is set to 1
assertEquals(4, stats.getTotalRequestsCount());
assertEquals(0, stats.getActiveRequestsCount());
assertEquals(4, stats.getSuccessiveConnectionFailureCount());
stats = lbObservables.getServerStats(goodServer);
assertEquals(1, stats.getTotalRequestsCount());
assertEquals(0, stats.getActiveRequestsCount());
assertEquals(0, stats.getSuccessiveConnectionFailureCount());
}
Aggregations