use of java.util.concurrent.SynchronousQueue in project activemq-artemis by apache.
the class ActiveMQServerImpl method initializeExecutorServices.
/**
* Sets up ActiveMQ Artemis Executor Services.
*/
private void initializeExecutorServices() {
/* We check to see if a Thread Pool is supplied in the InjectedObjectRegistry. If so we created a new Ordered
* Executor based on the provided Thread pool. Otherwise we create a new ThreadPool.
*/
if (serviceRegistry.getExecutorService() == null) {
ThreadFactory tFactory = AccessController.doPrivileged(new PrivilegedAction<ThreadFactory>() {
@Override
public ThreadFactory run() {
return new ActiveMQThreadFactory("ActiveMQ-server-" + this.toString(), false, ClientSessionFactoryImpl.class.getClassLoader());
}
});
if (configuration.getThreadPoolMaxSize() == -1) {
threadPool = new ThreadPoolExecutor(0, Integer.MAX_VALUE, 60L, TimeUnit.SECONDS, new SynchronousQueue<Runnable>(), tFactory);
} else {
threadPool = new ActiveMQThreadPoolExecutor(0, configuration.getThreadPoolMaxSize(), 60L, TimeUnit.SECONDS, tFactory);
}
} else {
threadPool = serviceRegistry.getExecutorService();
this.threadPoolSupplied = true;
}
this.executorFactory = new OrderedExecutorFactory(threadPool);
if (serviceRegistry.getIOExecutorService() != null) {
this.ioExecutorFactory = new OrderedExecutorFactory(serviceRegistry.getIOExecutorService());
} else {
ThreadFactory tFactory = AccessController.doPrivileged(new PrivilegedAction<ThreadFactory>() {
@Override
public ThreadFactory run() {
return new ActiveMQThreadFactory("ActiveMQ-IO-server-" + this.toString(), false, ClientSessionFactoryImpl.class.getClassLoader());
}
});
this.ioExecutorPool = new ThreadPoolExecutor(0, Integer.MAX_VALUE, 60L, TimeUnit.SECONDS, new SynchronousQueue<Runnable>(), tFactory);
this.ioExecutorFactory = new OrderedExecutorFactory(ioExecutorPool);
}
/* We check to see if a Scheduled Executor Service is provided in the InjectedObjectRegistry. If so we use this
* Scheduled ExecutorService otherwise we create a new one.
*/
if (serviceRegistry.getScheduledExecutorService() == null) {
ThreadFactory tFactory = AccessController.doPrivileged(new PrivilegedAction<ThreadFactory>() {
@Override
public ThreadFactory run() {
return new ActiveMQThreadFactory("ActiveMQ-scheduled-threads", false, ClientSessionFactoryImpl.class.getClassLoader());
}
});
scheduledPool = new ScheduledThreadPoolExecutor(configuration.getScheduledThreadPoolMaxSize(), tFactory);
} else {
this.scheduledPoolSupplied = true;
this.scheduledPool = serviceRegistry.getScheduledExecutorService();
}
}
use of java.util.concurrent.SynchronousQueue in project cdap by caskdata.
the class AbstractProgramController method addListener.
@Override
public final Cancellable addListener(Listener listener, final Executor listenerExecutor) {
Preconditions.checkNotNull(listener, "Listener shouldn't be null.");
Preconditions.checkNotNull(listenerExecutor, "Executor shouldn't be null.");
final ListenerCaller caller = new ListenerCaller(listener, listenerExecutor);
final Cancellable cancellable = new Cancellable() {
@Override
public void cancel() {
// Simply remove the listener from the map through the executor and block on the completion
Futures.getUnchecked(executor.submit(new Runnable() {
@Override
public void run() {
listeners.remove(caller);
}
}));
}
};
try {
// Use a synchronous queue to communicate the Cancellable to return
final SynchronousQueue<Cancellable> result = new SynchronousQueue<>();
// Use the single thread executor to add the listener and call init
executor.submit(new Callable<Void>() {
@Override
public Void call() throws Exception {
Cancellable existing = listeners.get(caller);
if (existing == null) {
listeners.put(caller, cancellable);
result.put(cancellable);
caller.init(getState(), getFailureCause());
} else {
result.put(existing);
}
return null;
}
});
return result.take();
} catch (Exception e) {
// there shouldn't be interrupted exception as well.
throw Throwables.propagate(Throwables.getRootCause(e));
}
}
use of java.util.concurrent.SynchronousQueue in project streamsupport by stefan-zobel.
the class RecursiveActionTest method testJoinIgnoresInterruptsOutsideForkJoinPool.
/**
* join/quietlyJoin of a forked task when not in ForkJoinPool
* succeeds in the presence of interrupts
*/
public void testJoinIgnoresInterruptsOutsideForkJoinPool() {
final SynchronousQueue<FibAction[]> sq = new SynchronousQueue<>();
@SuppressWarnings("serial") RecursiveAction a = new CheckedRecursiveAction() {
protected void realCompute() throws InterruptedException {
FibAction[] fibActions = new FibAction[6];
for (int i = 0; i < fibActions.length; i++) fibActions[i] = new FibAction(8);
fibActions[1].cancel(false);
fibActions[2].completeExceptionally(new FJException());
fibActions[4].cancel(true);
fibActions[5].completeExceptionally(new FJException());
for (int i = 0; i < fibActions.length; i++) fibActions[i].fork();
sq.put(fibActions);
helpQuiesce();
}
};
Runnable r = new CheckedRunnable() {
public void realRun() throws InterruptedException {
FibAction[] fibActions = sq.take();
FibAction f;
final Thread myself = Thread.currentThread();
// test join() ------------
f = fibActions[0];
assertFalse(ForkJoinTask.inForkJoinPool());
myself.interrupt();
assertTrue(myself.isInterrupted());
assertNull(f.join());
assertTrue(Thread.interrupted());
assertEquals(21, f.result);
checkCompletedNormally(f);
f = fibActions[1];
myself.interrupt();
assertTrue(myself.isInterrupted());
try {
f.join();
shouldThrow();
} catch (CancellationException success) {
assertTrue(Thread.interrupted());
checkCancelled(f);
}
f = fibActions[2];
myself.interrupt();
assertTrue(myself.isInterrupted());
try {
f.join();
shouldThrow();
} catch (FJException success) {
assertTrue(Thread.interrupted());
checkCompletedAbnormally(f, success);
}
// test quietlyJoin() ---------
f = fibActions[3];
myself.interrupt();
assertTrue(myself.isInterrupted());
f.quietlyJoin();
assertTrue(Thread.interrupted());
assertEquals(21, f.result);
checkCompletedNormally(f);
f = fibActions[4];
myself.interrupt();
assertTrue(myself.isInterrupted());
f.quietlyJoin();
assertTrue(Thread.interrupted());
checkCancelled(f);
f = fibActions[5];
myself.interrupt();
assertTrue(myself.isInterrupted());
f.quietlyJoin();
assertTrue(Thread.interrupted());
assertTrue(f.getException() instanceof FJException);
checkCompletedAbnormally(f, f.getException());
}
};
Thread t;
t = newStartedThread(r);
testInvokeOnPool(mainPool(), a);
awaitTermination(t);
a.reinitialize();
t = newStartedThread(r);
testInvokeOnPool(singletonPool(), a);
awaitTermination(t);
}
use of java.util.concurrent.SynchronousQueue in project teiid by teiid.
the class ExecutorUtils method newFixedThreadPool.
public static ExecutorService newFixedThreadPool(int nThreads, int maxQueue, String name) {
ThreadPoolExecutor tpe = new ThreadPoolExecutor(nThreads, nThreads, 60L, TimeUnit.SECONDS, maxQueue == 0 ? new SynchronousQueue<Runnable>() : new LinkedBlockingQueue<Runnable>(maxQueue), new NamedThreadFactory(name));
tpe.allowCoreThreadTimeOut(true);
return tpe;
}
use of java.util.concurrent.SynchronousQueue in project ksql by confluentinc.
the class StreamedQueryResourceTest method testStreamQuery.
@Test
public void testStreamQuery() throws Throwable {
final AtomicReference<Throwable> threadException = new AtomicReference<>(null);
final Thread.UncaughtExceptionHandler threadExceptionHandler = (thread, exception) -> threadException.compareAndSet(null, exception);
final String queryString = "SELECT * FROM test_stream;";
final SynchronousQueue<KeyValue<String, GenericRow>> rowQueue = new SynchronousQueue<>();
final LinkedList<GenericRow> writtenRows = new LinkedList<>();
final Thread rowQueuePopulatorThread = new Thread(new Runnable() {
@Override
public void run() {
try {
for (int i = 0; ; i++) {
String key = Integer.toString(i);
GenericRow value = new GenericRow(Collections.singletonList(i));
synchronized (writtenRows) {
writtenRows.add(value);
}
rowQueue.put(new KeyValue<>(key, value));
}
} catch (InterruptedException exception) {
// This should happen during the test, so it's fine
}
}
}, "Row Queue Populator");
rowQueuePopulatorThread.setUncaughtExceptionHandler(threadExceptionHandler);
rowQueuePopulatorThread.start();
final KafkaStreams mockKafkaStreams = mock(KafkaStreams.class);
mockKafkaStreams.start();
expectLastCall();
mockKafkaStreams.setUncaughtExceptionHandler(anyObject(Thread.UncaughtExceptionHandler.class));
expectLastCall();
expect(mockKafkaStreams.state()).andReturn(KafkaStreams.State.NOT_RUNNING);
mockKafkaStreams.close();
expectLastCall();
mockKafkaStreams.cleanUp();
expectLastCall();
final OutputNode mockOutputNode = mock(OutputNode.class);
expect(mockOutputNode.getSchema()).andReturn(SchemaBuilder.struct().field("f1", SchemaBuilder.INT32_SCHEMA));
final Map<String, Object> requestStreamsProperties = Collections.emptyMap();
KsqlEngine mockKsqlEngine = mock(KsqlEngine.class);
KafkaTopicClient mockKafkaTopicClient = mock(KafkaTopicClientImpl.class);
expect(mockKsqlEngine.getTopicClient()).andReturn(mockKafkaTopicClient);
final QueuedQueryMetadata queuedQueryMetadata = new QueuedQueryMetadata(queryString, mockKafkaStreams, mockOutputNode, "", rowQueue, DataSource.DataSourceType.KSTREAM, "", mockKafkaTopicClient, null);
expect(mockKsqlEngine.buildMultipleQueries(queryString, requestStreamsProperties)).andReturn(Collections.singletonList(queuedQueryMetadata));
mockKsqlEngine.removeTemporaryQuery(queuedQueryMetadata);
expectLastCall();
StatementParser mockStatementParser = mock(StatementParser.class);
expect(mockStatementParser.parseSingleStatement(queryString)).andReturn(mock(Query.class));
replay(mockKsqlEngine, mockStatementParser, mockKafkaStreams, mockOutputNode);
StreamedQueryResource testResource = new StreamedQueryResource(mockKsqlEngine, mockStatementParser, 1000);
Response response = testResource.streamQuery(new KsqlRequest(queryString, requestStreamsProperties));
PipedOutputStream responseOutputStream = new EOFPipedOutputStream();
PipedInputStream responseInputStream = new PipedInputStream(responseOutputStream, 1);
StreamingOutput responseStream = (StreamingOutput) response.getEntity();
final Thread queryWriterThread = new Thread(new Runnable() {
@Override
public void run() {
try {
responseStream.write(responseOutputStream);
} catch (EOFException exception) {
// It's fine
} catch (IOException exception) {
throw new RuntimeException(exception);
}
}
}, "Query Writer");
queryWriterThread.setUncaughtExceptionHandler(threadExceptionHandler);
queryWriterThread.start();
Scanner responseScanner = new Scanner(responseInputStream);
ObjectMapper objectMapper = new ObjectMapper();
for (int i = 0; i < 5; i++) {
if (!responseScanner.hasNextLine()) {
throw new Exception("Response input stream failed to have expected line available");
}
String responseLine = responseScanner.nextLine();
if (responseLine.trim().isEmpty()) {
i--;
} else {
GenericRow expectedRow;
synchronized (writtenRows) {
expectedRow = writtenRows.poll();
}
GenericRow testRow = objectMapper.readValue(responseLine, StreamedRow.class).getRow();
assertEquals(expectedRow, testRow);
}
}
responseOutputStream.close();
queryWriterThread.join();
rowQueuePopulatorThread.interrupt();
rowQueuePopulatorThread.join();
// Definitely want to make sure that the Kafka Streams instance has been closed and cleaned up
verify(mockKafkaStreams);
// If one of the other threads has somehow managed to throw an exception without breaking things up until this
// point, we throw that exception now in the main thread and cause the test to fail
Throwable exception = threadException.get();
if (exception != null) {
throw exception;
}
}
Aggregations