use of com.palantir.dialogue.EndpointChannel in project dialogue by palantir.
the class UserAgentEndpointChannelTest method testServiceNameIsNotValidConjureAgent.
@Test
public void testServiceNameIsNotValidConjureAgent() {
EndpointChannel channel = UserAgentEndpointChannel.create(delegate, new Endpoint() {
@Override
public void renderPath(Map<String, String> _params, UrlBuilder _url) {
}
@Override
public HttpMethod httpMethod() {
return HttpMethod.GET;
}
@Override
public String serviceName() {
return "Service_Name";
}
@Override
public String endpointName() {
return "endpoint";
}
@Override
public String version() {
return "4.5.6";
}
}, baseAgent);
// Special case: In IDEs, tests are run against classes (not JARs) and thus don't carry versions.
String dialogueVersion = Optional.ofNullable(Channel.class.getPackage().getImplementationVersion()).orElse("0.0.0");
channel.execute(request);
verify(delegate).execute(requestCaptor.capture());
assertThat(requestCaptor.getValue().headerParams().get("user-agent")).containsExactly("test-class/1.2.3 dialogue/" + dialogueVersion);
}
use of com.palantir.dialogue.EndpointChannel in project dialogue by palantir.
the class SimulationTest method server_side_rate_limits_with_sticky_clients_steady_vs_bursty_client.
@SimulationCase
void server_side_rate_limits_with_sticky_clients_steady_vs_bursty_client(Strategy strategy) {
// 1 server
// 2 types of clients sharing a DialogueChannel
// - client that sends a request once a second
// - client that burst sends 10k requests instantly
// Assuming:
// * server concurrency limit of 1
// * 5ms to serve a request
//
// Serving the bursty client by itself would take 50s. That is fine for that client, because it
// is trying to do a lot. However, we should not make the slow-and-steady client wait 50s to send it's request.
int numServers = 1;
int concurrencyLimit = 1;
Duration responseTime = Duration.ofMillis(5);
Duration benchmarkDuration = Duration.ofMinutes(1);
Duration timeBetweenSlowAndSteadyRequests = Duration.ofSeconds(1);
long numSlowAndSteady = benchmarkDuration.toNanos() / timeBetweenSlowAndSteadyRequests.toNanos();
assertThat(numSlowAndSteady).isEqualTo(60);
Duration timeBetweenBurstRequests = Duration.ofNanos(50);
long numBurst = 10_000;
long totalNumRequests = numSlowAndSteady + numBurst;
assertThat(totalNumRequests).isEqualTo(10060);
servers = servers(IntStream.range(0, numServers).mapToObj(i -> SimulationServer.builder().serverName("node" + i).simulation(simulation).handler(h -> h.respond200UntilCapacity(429, concurrencyLimit).responseTime(responseTime)).build()).toArray(SimulationServer[]::new));
Supplier<Channel> stickyChannelSupplier = strategy.getSticky2NonReloading(simulation, servers.get());
Benchmark builder = Benchmark.builder().simulation(simulation);
EndpointChannel slowAndSteadyChannel = builder.addEndpointChannel("slowAndSteady", DEFAULT_ENDPOINT, stickyChannelSupplier.get());
EndpointChannel oneShotBurstChannel = builder.addEndpointChannel("oneShotBurst", DEFAULT_ENDPOINT, stickyChannelSupplier.get());
Stream<ScheduledRequest> slowAndSteadyChannelRequests = builder.infiniteRequests(timeBetweenSlowAndSteadyRequests, () -> slowAndSteadyChannel).limit(numSlowAndSteady);
Stream<ScheduledRequest> oneShotBurstChannelRequests = builder.infiniteRequests(timeBetweenBurstRequests, () -> oneShotBurstChannel).limit(numBurst);
st = strategy;
result = builder.mergeRequestStreams(slowAndSteadyChannelRequests, oneShotBurstChannelRequests).stopWhenNumReceived(totalNumRequests).abortAfter(benchmarkDuration.plus(Duration.ofMinutes(1))).run();
}
Aggregations