use of java.util.concurrent.ArrayBlockingQueue in project jetty.project by eclipse.
the class SelectChannelEndPointTest method testRejectedExecution.
// TODO make this test reliable
@Test
@Ignore
public void testRejectedExecution() throws Exception {
_manager.stop();
_threadPool.stop();
final CountDownLatch latch = new CountDownLatch(1);
BlockingQueue<Runnable> q = new ArrayBlockingQueue<>(4);
_threadPool = new QueuedThreadPool(4, 4, 60000, q);
_manager = new SelectorManager(_threadPool, _scheduler, 1) {
@Override
protected EndPoint newEndPoint(SelectableChannel channel, ManagedSelector selector, SelectionKey selectionKey) throws IOException {
SocketChannelEndPoint endp = new SocketChannelEndPoint(channel, selector, selectionKey, getScheduler());
_lastEndPoint = endp;
_lastEndPointLatch.countDown();
return endp;
}
@Override
public Connection newConnection(SelectableChannel channel, EndPoint endpoint, Object attachment) throws IOException {
return new TestConnection(endpoint, latch);
}
};
_threadPool.start();
_manager.start();
AtomicInteger timeout = new AtomicInteger();
AtomicInteger rejections = new AtomicInteger();
AtomicInteger echoed = new AtomicInteger();
CountDownLatch closed = new CountDownLatch(20);
for (int i = 0; i < 20; i++) {
new Thread() {
public void run() {
try (Socket client = newClient()) {
client.setSoTimeout(5000);
SocketChannel server = _connector.accept();
server.configureBlocking(false);
_manager.accept(server);
// Write client to server
client.getOutputStream().write("HelloWorld".getBytes(StandardCharsets.UTF_8));
client.getOutputStream().flush();
client.shutdownOutput();
// Verify echo server to client
for (char c : "HelloWorld".toCharArray()) {
int b = client.getInputStream().read();
assertTrue(b > 0);
assertEquals(c, (char) b);
}
assertEquals(-1, client.getInputStream().read());
echoed.incrementAndGet();
} catch (SocketTimeoutException x) {
x.printStackTrace();
timeout.incrementAndGet();
} catch (Throwable x) {
rejections.incrementAndGet();
} finally {
closed.countDown();
}
}
}.start();
}
// unblock the handling
latch.countDown();
// wait for all clients to complete or fail
closed.await();
// assert some clients must have been rejected
Assert.assertThat(rejections.get(), Matchers.greaterThan(0));
// but not all of them
Assert.assertThat(rejections.get(), Matchers.lessThan(20));
// none should have timed out
Assert.assertThat(timeout.get(), Matchers.equalTo(0));
// and the rest should have worked
Assert.assertThat(echoed.get(), Matchers.equalTo(20 - rejections.get()));
// and the selector is still working for new requests
try (Socket client = newClient()) {
client.setSoTimeout(5000);
SocketChannel server = _connector.accept();
server.configureBlocking(false);
_manager.accept(server);
// Write client to server
client.getOutputStream().write("HelloWorld".getBytes(StandardCharsets.UTF_8));
client.getOutputStream().flush();
client.shutdownOutput();
// Verify echo server to client
for (char c : "HelloWorld".toCharArray()) {
int b = client.getInputStream().read();
assertTrue(b > 0);
assertEquals(c, (char) b);
}
assertEquals(-1, client.getInputStream().read());
}
}
use of java.util.concurrent.ArrayBlockingQueue in project druid by druid-io.
the class ChainedExecutionQueryRunnerTest method testQueryTimeout.
@Test(timeout = 60000)
public void testQueryTimeout() throws Exception {
ExecutorService exec = PrioritizedExecutorService.create(new Lifecycle(), new DruidProcessingConfig() {
@Override
public String getFormatString() {
return "test";
}
@Override
public int getNumThreads() {
return 2;
}
});
final CountDownLatch queriesStarted = new CountDownLatch(2);
final CountDownLatch queriesInterrupted = new CountDownLatch(2);
final CountDownLatch queryIsRegistered = new CountDownLatch(1);
Capture<ListenableFuture> capturedFuture = new Capture<>();
QueryWatcher watcher = EasyMock.createStrictMock(QueryWatcher.class);
watcher.registerQuery(EasyMock.<Query>anyObject(), EasyMock.and(EasyMock.<ListenableFuture>anyObject(), EasyMock.capture(capturedFuture)));
EasyMock.expectLastCall().andAnswer(new IAnswer<Void>() {
@Override
public Void answer() throws Throwable {
queryIsRegistered.countDown();
return null;
}
}).once();
EasyMock.replay(watcher);
ArrayBlockingQueue<DyingQueryRunner> interrupted = new ArrayBlockingQueue<>(3);
Set<DyingQueryRunner> runners = Sets.newHashSet(new DyingQueryRunner(queriesStarted, queriesInterrupted, interrupted), new DyingQueryRunner(queriesStarted, queriesInterrupted, interrupted), new DyingQueryRunner(queriesStarted, queriesInterrupted, interrupted));
ChainedExecutionQueryRunner chainedRunner = new ChainedExecutionQueryRunner<>(exec, watcher, Lists.<QueryRunner<Integer>>newArrayList(runners));
HashMap<String, Object> context = new HashMap<String, Object>();
final Sequence seq = chainedRunner.run(Druids.newTimeseriesQueryBuilder().dataSource("test").intervals("2014/2015").aggregators(Lists.<AggregatorFactory>newArrayList(new CountAggregatorFactory("count"))).context(ImmutableMap.<String, Object>of(QueryContextKeys.TIMEOUT, 100, "queryId", "test")).build(), context);
Future resultFuture = Executors.newFixedThreadPool(1).submit(new Runnable() {
@Override
public void run() {
Sequences.toList(seq, Lists.newArrayList());
}
});
// wait for query to register and start
queryIsRegistered.await();
queriesStarted.await();
Assert.assertTrue(capturedFuture.hasCaptured());
ListenableFuture future = capturedFuture.getValue();
// wait for query to time out
QueryInterruptedException cause = null;
try {
resultFuture.get();
} catch (ExecutionException e) {
Assert.assertTrue(e.getCause() instanceof QueryInterruptedException);
Assert.assertEquals("Query timeout", ((QueryInterruptedException) e.getCause()).getErrorCode());
cause = (QueryInterruptedException) e.getCause();
}
queriesInterrupted.await();
Assert.assertNotNull(cause);
Assert.assertTrue(future.isCancelled());
DyingQueryRunner interrupted1 = interrupted.poll();
synchronized (interrupted1) {
Assert.assertTrue("runner 1 started", interrupted1.hasStarted);
Assert.assertTrue("runner 1 interrupted", interrupted1.interrupted);
}
DyingQueryRunner interrupted2 = interrupted.poll();
synchronized (interrupted2) {
Assert.assertTrue("runner 2 started", interrupted2.hasStarted);
Assert.assertTrue("runner 2 interrupted", interrupted2.interrupted);
}
runners.remove(interrupted1);
runners.remove(interrupted2);
DyingQueryRunner remainingRunner = runners.iterator().next();
synchronized (remainingRunner) {
Assert.assertTrue("runner 3 should be interrupted or not have started", !remainingRunner.hasStarted || remainingRunner.interrupted);
}
Assert.assertFalse("runner 1 not completed", interrupted1.hasCompleted);
Assert.assertFalse("runner 2 not completed", interrupted2.hasCompleted);
Assert.assertFalse("runner 3 not completed", remainingRunner.hasCompleted);
EasyMock.verify(watcher);
}
use of java.util.concurrent.ArrayBlockingQueue in project elasticsearch by elastic.
the class TranslogTests method testCloseConcurrently.
public void testCloseConcurrently() throws Throwable {
final int opsPerThread = randomIntBetween(10, 200);
int threadCount = 2 + randomInt(5);
logger.info("testing with [{}] threads, each doing [{}] ops", threadCount, opsPerThread);
final BlockingQueue<LocationOperation> writtenOperations = new ArrayBlockingQueue<>(threadCount * opsPerThread);
Thread[] threads = new Thread[threadCount];
final Exception[] threadExceptions = new Exception[threadCount];
final CountDownLatch downLatch = new CountDownLatch(1);
for (int i = 0; i < threadCount; i++) {
final int threadId = i;
threads[i] = new TranslogThread(translog, downLatch, opsPerThread, threadId, writtenOperations, threadExceptions);
threads[i].setDaemon(true);
threads[i].start();
}
downLatch.countDown();
translog.close();
for (int i = 0; i < threadCount; i++) {
if (threadExceptions[i] != null) {
if ((threadExceptions[i] instanceof AlreadyClosedException) == false) {
throw threadExceptions[i];
}
}
threads[i].join(60 * 1000);
}
}
use of java.util.concurrent.ArrayBlockingQueue in project storm by apache.
the class SaslTransportPlugin method getServer.
@Override
public TServer getServer(TProcessor processor) throws IOException, TTransportException {
int port = type.getPort(storm_conf);
Integer socketTimeout = type.getSocketTimeOut(storm_conf);
TTransportFactory serverTransportFactory = getServerTransportFactory();
TServerSocket serverTransport = null;
if (socketTimeout != null) {
serverTransport = new TServerSocket(port, socketTimeout);
} else {
serverTransport = new TServerSocket(port);
}
int numWorkerThreads = type.getNumThreads(storm_conf);
Integer queueSize = type.getQueueSize(storm_conf);
TThreadPoolServer.Args server_args = new TThreadPoolServer.Args(serverTransport).processor(new TUGIWrapProcessor(processor)).minWorkerThreads(numWorkerThreads).maxWorkerThreads(numWorkerThreads).protocolFactory(new TBinaryProtocol.Factory(false, true));
if (serverTransportFactory != null) {
server_args.transportFactory(serverTransportFactory);
}
BlockingQueue workQueue = new SynchronousQueue();
if (queueSize != null) {
workQueue = new ArrayBlockingQueue(queueSize);
}
ThreadPoolExecutor executorService = new ExtendedThreadPoolExecutor(numWorkerThreads, numWorkerThreads, 60, TimeUnit.SECONDS, workQueue);
server_args.executorService(executorService);
return new TThreadPoolServer(server_args);
}
use of java.util.concurrent.ArrayBlockingQueue in project storm by apache.
the class SimpleTransportPlugin method getServer.
@Override
public TServer getServer(TProcessor processor) throws IOException, TTransportException {
int port = type.getPort(storm_conf);
TNonblockingServerSocket serverTransport = new TNonblockingServerSocket(port);
int numWorkerThreads = type.getNumThreads(storm_conf);
int maxBufferSize = type.getMaxBufferSize(storm_conf);
Integer queueSize = type.getQueueSize(storm_conf);
THsHaServer.Args server_args = new THsHaServer.Args(serverTransport).processor(new SimpleWrapProcessor(processor)).maxWorkerThreads(numWorkerThreads).protocolFactory(new TBinaryProtocol.Factory(false, true, maxBufferSize, -1));
server_args.maxReadBufferBytes = maxBufferSize;
if (queueSize != null) {
server_args.executorService(new ThreadPoolExecutor(numWorkerThreads, numWorkerThreads, 60, TimeUnit.SECONDS, new ArrayBlockingQueue(queueSize)));
}
//construct THsHaServer
return new THsHaServer(server_args);
}
Aggregations