use of java.io.UncheckedIOException in project jetty.project by eclipse.
the class SlowClientsTest method testSlowClientsWithSmallThreadPool.
@Test(timeout = 10000)
public void testSlowClientsWithSmallThreadPool() throws Exception {
File keystore = MavenTestingUtils.getTestResourceFile("keystore");
SslContextFactory sslContextFactory = new SslContextFactory();
sslContextFactory.setKeyStorePath(keystore.getAbsolutePath());
sslContextFactory.setKeyStorePassword("storepwd");
sslContextFactory.setKeyManagerPassword("keypwd");
int maxThreads = 6;
int contentLength = 8 * 1024 * 1024;
QueuedThreadPool serverThreads = new QueuedThreadPool(maxThreads);
serverThreads.setDetailedDump(true);
Server server = new Server(serverThreads);
try {
ServerConnector connector = new ServerConnector(server, 1, 1, sslContextFactory);
connector.setPort(8888);
server.addConnector(connector);
server.setHandler(new AbstractHandler() {
@Override
public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException {
baseRequest.setHandled(true);
logger.info("SERVING {}", target);
// Write some big content.
response.getOutputStream().write(new byte[contentLength]);
logger.info("SERVED {}", target);
}
});
server.start();
SSLContext sslContext = sslContextFactory.getSslContext();
CompletableFuture[] futures = new CompletableFuture[2 * maxThreads];
ExecutorService executor = Executors.newFixedThreadPool(futures.length);
for (int i = 0; i < futures.length; i++) {
int k = i;
futures[i] = CompletableFuture.runAsync(() -> {
try (SSLSocket socket = (SSLSocket) sslContext.getSocketFactory().createSocket("localhost", connector.getLocalPort())) {
socket.setSoTimeout(contentLength / 1024);
OutputStream output = socket.getOutputStream();
String target = "/" + k;
String request = "GET " + target + " HTTP/1.1\r\n" + "Host: localhost\r\n" + "Connection: close\r\n" + "\r\n";
output.write(request.getBytes(StandardCharsets.UTF_8));
output.flush();
while (serverThreads.getIdleThreads() > 0) Thread.sleep(50);
InputStream input = socket.getInputStream();
while (true) {
int read = input.read();
if (read < 0)
break;
}
logger.info("FINISHED {}", target);
} catch (IOException x) {
throw new UncheckedIOException(x);
} catch (InterruptedException x) {
throw new UncheckedIOException(new InterruptedIOException());
}
}, executor);
}
CompletableFuture.allOf(futures).join();
} finally {
server.stop();
}
}
use of java.io.UncheckedIOException in project elasticsearch by elastic.
the class AbstractSimpleTransportTestCase method testTcpHandshakeConnectionReset.
public void testTcpHandshakeConnectionReset() throws IOException, InterruptedException {
try (ServerSocket socket = new MockServerSocket()) {
socket.bind(new InetSocketAddress(InetAddress.getLocalHost(), 0), 1);
socket.setReuseAddress(true);
DiscoveryNode dummy = new DiscoveryNode("TEST", new TransportAddress(socket.getInetAddress(), socket.getLocalPort()), emptyMap(), emptySet(), version0);
Thread t = new Thread() {
@Override
public void run() {
try (Socket accept = socket.accept()) {
if (randomBoolean()) {
// sometimes wait until the other side sends the message
accept.getInputStream().read();
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
};
t.start();
ConnectionProfile.Builder builder = new ConnectionProfile.Builder();
builder.addConnections(1, TransportRequestOptions.Type.BULK, TransportRequestOptions.Type.PING, TransportRequestOptions.Type.RECOVERY, TransportRequestOptions.Type.REG, TransportRequestOptions.Type.STATE);
builder.setHandshakeTimeout(TimeValue.timeValueHours(1));
ConnectTransportException ex = expectThrows(ConnectTransportException.class, () -> serviceA.connectToNode(dummy, builder.build()));
assertEquals(ex.getMessage(), "[][" + dummy.getAddress() + "] general node connection failure");
assertThat(ex.getCause().getMessage(), startsWith("handshake failed"));
t.join();
}
}
use of java.io.UncheckedIOException in project elasticsearch by elastic.
the class Mapping method toString.
@Override
public String toString() {
try {
XContentBuilder builder = XContentFactory.jsonBuilder().startObject();
toXContent(builder, new ToXContent.MapParams(emptyMap()));
return builder.endObject().string();
} catch (IOException bogus) {
throw new UncheckedIOException(bogus);
}
}
use of java.io.UncheckedIOException in project elasticsearch by elastic.
the class DfsQueryPhaseTests method testFailPhaseOnException.
public void testFailPhaseOnException() throws IOException {
AtomicArray<DfsSearchResult> results = new AtomicArray<>(2);
AtomicReference<AtomicArray<QuerySearchResultProvider>> responseRef = new AtomicReference<>();
results.set(0, new DfsSearchResult(1, new SearchShardTarget("node1", new Index("test", "na"), 0)));
results.set(1, new DfsSearchResult(2, new SearchShardTarget("node2", new Index("test", "na"), 0)));
results.get(0).termsStatistics(new Term[0], new TermStatistics[0]);
results.get(1).termsStatistics(new Term[0], new TermStatistics[0]);
SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, BigArrays.NON_RECYCLING_INSTANCE, null);
SearchTransportService searchTransportService = new SearchTransportService(Settings.builder().put("search.remote.connect", false).build(), null, null) {
@Override
public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest request, SearchTask task, ActionListener<QuerySearchResult> listener) {
if (request.id() == 1) {
QuerySearchResult queryResult = new QuerySearchResult(123, new SearchShardTarget("node1", new Index("test", "na"), 0));
queryResult.topDocs(new TopDocs(1, new ScoreDoc[] { new ScoreDoc(42, 1.0F) }, 2.0F), new DocValueFormat[0]);
// the size of the result set
queryResult.size(2);
listener.onResponse(queryResult);
} else if (request.id() == 2) {
throw new UncheckedIOException(new MockDirectoryWrapper.FakeIOException());
} else {
fail("no such request ID: " + request.id());
}
}
};
MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2);
mockSearchPhaseContext.searchTransport = searchTransportService;
DfsQueryPhase phase = new DfsQueryPhase(results, controller, (response) -> new SearchPhase("test") {
@Override
public void run() throws IOException {
responseRef.set(response.results);
}
}, mockSearchPhaseContext);
assertEquals("dfs_query", phase.getName());
expectThrows(UncheckedIOException.class, () -> phase.run());
// phase execution will clean up on the contexts
assertTrue(mockSearchPhaseContext.releasedSearchContexts.isEmpty());
}
use of java.io.UncheckedIOException in project elasticsearch by elastic.
the class DefaultSearchContext method preProcess.
/**
* Should be called before executing the main query and after all other parameters have been set.
*/
@Override
public void preProcess(boolean rewrite) {
if (hasOnlySuggest()) {
return;
}
long from = from() == -1 ? 0 : from();
long size = size() == -1 ? 10 : size();
long resultWindow = from + size;
int maxResultWindow = indexService.getIndexSettings().getMaxResultWindow();
if (resultWindow > maxResultWindow) {
if (scrollContext == null) {
throw new QueryPhaseExecutionException(this, "Result window is too large, from + size must be less than or equal to: [" + maxResultWindow + "] but was [" + resultWindow + "]. See the scroll api for a more efficient way to request large data sets. " + "This limit can be set by changing the [" + IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey() + "] index level setting.");
}
throw new QueryPhaseExecutionException(this, "Batch size is too large, size must be less than or equal to: [" + maxResultWindow + "] but was [" + resultWindow + "]. Scroll batch sizes cost as much memory as result windows so they are controlled by the [" + IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey() + "] index level setting.");
}
if (rescore != null) {
int maxWindow = indexService.getIndexSettings().getMaxRescoreWindow();
for (RescoreSearchContext rescoreContext : rescore) {
if (rescoreContext.window() > maxWindow) {
throw new QueryPhaseExecutionException(this, "Rescore window [" + rescoreContext.window() + "] is too large. It must " + "be less than [" + maxWindow + "]. This prevents allocating massive heaps for storing the results to be " + "rescored. This limit can be set by changing the [" + IndexSettings.MAX_RESCORE_WINDOW_SETTING.getKey() + "] index level setting.");
}
}
}
if (sliceBuilder != null) {
int sliceLimit = indexService.getIndexSettings().getMaxSlicesPerScroll();
int numSlices = sliceBuilder.getMax();
if (numSlices > sliceLimit) {
throw new QueryPhaseExecutionException(this, "The number of slices [" + numSlices + "] is too large. It must " + "be less than [" + sliceLimit + "]. This limit can be set by changing the [" + IndexSettings.MAX_SLICES_PER_SCROLL.getKey() + "] index level setting.");
}
}
// initialize the filtering alias based on the provided filters
try {
final QueryBuilder queryBuilder = request.filteringAliases();
aliasFilter = queryBuilder == null ? null : queryBuilder.toFilter(queryShardContext);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
if (query() == null) {
parsedQuery(ParsedQuery.parsedMatchAllQuery());
}
if (queryBoost() != AbstractQueryBuilder.DEFAULT_BOOST) {
parsedQuery(new ParsedQuery(new FunctionScoreQuery(query(), new WeightFactorFunction(queryBoost)), parsedQuery()));
}
this.query = buildFilteredQuery(query);
if (rewrite) {
try {
this.query = searcher.rewrite(query);
} catch (IOException e) {
throw new QueryPhaseExecutionException(this, "Failed to rewrite main query", e);
}
}
}
Aggregations