use of com.linkedin.r2.filter.compression.ClientCompressionFilter in project rest.li by linkedin.
the class TestRestCompressionEcho method compressionEchoData.
@DataProvider
public Object[][] compressionEchoData() {
EncodingType[] encodings = new EncodingType[] { EncodingType.GZIP, EncodingType.SNAPPY, EncodingType.IDENTITY };
Object[][] args = new Object[4 * encodings.length * encodings.length][2];
int cur = 0;
for (EncodingType requestEncoding : encodings) {
for (EncodingType acceptEncoding : encodings) {
RestFilter clientCompressionFilter = new ClientCompressionFilter(requestEncoding, new CompressionConfig(THRESHOLD), new EncodingType[] { acceptEncoding }, new CompressionConfig(THRESHOLD), Arrays.asList(new String[] { "*" }));
TransportClientFactory factory = new HttpClientFactory.Builder().setFilterChain(FilterChains.createRestChain(clientCompressionFilter)).build();
Client http1Client = new TransportClientAdapter(factory.getClient(getHttp1ClientProperties()), REST_OVER_STREAM);
Client http2Client = new TransportClientAdapter(factory.getClient(getHttp2ClientProperties()), REST_OVER_STREAM);
args[cur][0] = http1Client;
args[cur][1] = LARGE_BYTES_NUM;
args[cur + 1][0] = http2Client;
args[cur + 1][1] = LARGE_BYTES_NUM;
cur += 2;
_clientFactories.add(factory);
_clients.add(http1Client);
_clients.add(http2Client);
}
}
// test data that won't trigger compression
for (EncodingType requestEncoding : encodings) {
for (EncodingType acceptEncoding : encodings) {
RestFilter clientCompressionFilter = new ClientCompressionFilter(requestEncoding, new CompressionConfig(THRESHOLD), new EncodingType[] { acceptEncoding }, new CompressionConfig(THRESHOLD), Arrays.asList(new String[] { "*" }));
TransportClientFactory factory = new HttpClientFactory.Builder().setFilterChain(FilterChains.createRestChain(clientCompressionFilter)).build();
Client http1Client = new TransportClientAdapter(factory.getClient(getHttp1ClientProperties()), REST_OVER_STREAM);
Client http2Client = new TransportClientAdapter(factory.getClient(getHttp2ClientProperties()), REST_OVER_STREAM);
args[cur][0] = http1Client;
args[cur][1] = SMALL_BYTES_NUM;
args[cur + 1][0] = http2Client;
args[cur + 1][1] = SMALL_BYTES_NUM;
cur += 2;
_clientFactories.add(factory);
_clients.add(http1Client);
_clients.add(http2Client);
}
}
return args;
}
use of com.linkedin.r2.filter.compression.ClientCompressionFilter in project rest.li by linkedin.
the class TestRequestCompression method requestCompressionData.
@DataProvider
public Object[][] requestCompressionData() {
StreamEncodingType[] encodings = new StreamEncodingType[] { StreamEncodingType.GZIP, StreamEncodingType.DEFLATE, StreamEncodingType.SNAPPY_FRAMED, StreamEncodingType.BZIP2 };
String[] protocols = new String[] { HttpProtocolVersion.HTTP_1_1.name(), HttpProtocolVersion.HTTP_2.name() };
Object[][] args = new Object[encodings.length * protocols.length][2];
int cur = 0;
for (StreamEncodingType requestEncoding : encodings) {
for (String protocol : protocols) {
StreamFilter clientCompressionFilter = new ClientStreamCompressionFilter(requestEncoding, new CompressionConfig(THRESHOLD), null, new CompressionConfig(THRESHOLD), Arrays.asList(new String[] { "*" }), _executor);
TransportClientFactory factory = new HttpClientFactory.Builder().setFilterChain(FilterChains.createStreamChain(clientCompressionFilter)).build();
HashMap<String, String> properties = new HashMap<>();
properties.put(HttpClientFactory.HTTP_PROTOCOL_VERSION, protocol);
Client client = new TransportClientAdapter(factory.getClient(properties), true);
args[cur][0] = client;
args[cur][1] = URI.create("/" + requestEncoding.getHttpName());
cur++;
_clientFactories.add(factory);
_clients.add(client);
}
}
return args;
}
use of com.linkedin.r2.filter.compression.ClientCompressionFilter in project rest.li by linkedin.
the class TestRestCompressionEcho method compressionEchoData.
@DataProvider
public Object[][] compressionEchoData() throws Exception {
EncodingType[] encodings = new EncodingType[] { EncodingType.GZIP, EncodingType.SNAPPY, EncodingType.IDENTITY };
Object[][] args = new Object[2 * encodings.length * encodings.length][2];
int cur = 0;
for (EncodingType requestEncoding : encodings) {
for (EncodingType acceptEncoding : encodings) {
RestFilter clientCompressionFilter = new ClientCompressionFilter(requestEncoding, new CompressionConfig(THRESHOLD), new EncodingType[] { acceptEncoding }, new CompressionConfig(THRESHOLD), Arrays.asList(new String[] { "*" }));
Client client = _clientProvider.createClient(FilterChains.createRestChain(clientCompressionFilter), getHttpClientProperties());
args[cur][0] = client;
args[cur][1] = LARGE_BYTES_NUM;
cur++;
_clients.add(client);
}
}
// test data that won't trigger compression
for (EncodingType requestEncoding : encodings) {
for (EncodingType acceptEncoding : encodings) {
RestFilter clientCompressionFilter = new ClientCompressionFilter(requestEncoding, new CompressionConfig(THRESHOLD), new EncodingType[] { acceptEncoding }, new CompressionConfig(THRESHOLD), Arrays.asList(new String[] { "*" }));
Client client = _clientProvider.createClient(FilterChains.createRestChain(clientCompressionFilter), getHttpClientProperties());
args[cur][0] = client;
args[cur][1] = SMALL_BYTES_NUM;
cur++;
_clients.add(client);
}
}
return args;
}
use of com.linkedin.r2.filter.compression.ClientCompressionFilter in project rest.li by linkedin.
the class TestCompressionEcho method compressionEchoData.
@DataProvider
public Object[][] compressionEchoData() throws Exception {
StreamEncodingType[] encodings = new StreamEncodingType[] { StreamEncodingType.GZIP, StreamEncodingType.DEFLATE, StreamEncodingType.SNAPPY_FRAMED, StreamEncodingType.BZIP2, StreamEncodingType.IDENTITY };
Object[][] args = new Object[2 * encodings.length * encodings.length][2];
int cur = 0;
for (StreamEncodingType requestEncoding : encodings) {
for (StreamEncodingType acceptEncoding : encodings) {
StreamFilter clientCompressionFilter = new ClientStreamCompressionFilter(requestEncoding, new CompressionConfig(THRESHOLD), new StreamEncodingType[] { acceptEncoding }, new CompressionConfig(THRESHOLD), Arrays.asList(new String[] { "*" }), _executor);
Client client = createClient(FilterChains.createStreamChain(clientCompressionFilter));
args[cur][0] = client;
args[cur][1] = LARGE_BYTES_NUM;
cur++;
_clients.add(client);
}
}
// test data that won't trigger compression
for (StreamEncodingType requestEncoding : encodings) {
for (StreamEncodingType acceptEncoding : encodings) {
StreamFilter clientCompressionFilter = new ClientStreamCompressionFilter(requestEncoding, new CompressionConfig(THRESHOLD), new StreamEncodingType[] { acceptEncoding }, new CompressionConfig(THRESHOLD), Arrays.asList(new String[] { "*" }), _executor);
Client client = createClient(FilterChains.createStreamChain(clientCompressionFilter));
args[cur][0] = client;
args[cur][1] = SMALL_BYTES_NUM;
cur++;
_clients.add(client);
}
}
return args;
}
use of com.linkedin.r2.filter.compression.ClientCompressionFilter in project rest.li by linkedin.
the class TestClientStreamCompressionFilter method testRequestCompressionRules.
@Test(dataProvider = "requestData")
public void testRequestCompressionRules(CompressionConfig requestCompressionConfig, CompressionOption requestCompressionOverride, boolean headerShouldBePresent, String operation) throws CompressionException, URISyntaxException, InterruptedException, ExecutionException, TimeoutException {
Executor executor = Executors.newCachedThreadPool();
ClientStreamCompressionFilter clientCompressionFilter = new ClientStreamCompressionFilter(StreamEncodingType.GZIP.getHttpName(), requestCompressionConfig, ACCEPT_COMPRESSIONS, new CompressionConfig(Integer.MAX_VALUE), Arrays.asList(ClientCompressionHelper.COMPRESS_ALL_RESPONSES_INDICATOR), executor);
// The entity should be compressible for this test.
int original = 100;
byte[] entity = new byte[original];
Arrays.fill(entity, (byte) 'A');
StreamRequest streamRequest = new StreamRequestBuilder(new URI(URI)).setMethod(RestMethod.POST).build(EntityStreams.newEntityStream(new ByteStringWriter(ByteString.copy(entity))));
int compressed = EncodingType.GZIP.getCompressor().deflate(new ByteArrayInputStream(entity)).length;
RequestContext context = new RequestContext();
if (operation != null) {
context.putLocalAttr(R2Constants.OPERATION, operation);
}
context.putLocalAttr(R2Constants.REQUEST_COMPRESSION_OVERRIDE, requestCompressionOverride);
int entityLength = headerShouldBePresent ? compressed : original;
FutureCallback<ByteString> callback = new FutureCallback<>();
FullEntityReader reader = new FullEntityReader(callback);
HeaderCaptureFilter captureFilter = new HeaderCaptureFilter(HttpConstants.CONTENT_ENCODING, headerShouldBePresent, entityLength, reader);
clientCompressionFilter.onStreamRequest(streamRequest, context, Collections.<String, String>emptyMap(), captureFilter);
ByteString entityRead = callback.get(10, TimeUnit.SECONDS);
Assert.assertEquals(entityRead.length(), entityLength);
}
Aggregations