use of java.util.concurrent.ConcurrentHashMap in project jetty.project by eclipse.
the class MetaInfConfiguration method scanForFragment.
/**
* Scan for META-INF/web-fragment.xml file in the given jar.
*
* @param context the context for the scan
* @param jar the jar resource to scan for fragements in
* @param cache the resource cache
* @throws Exception if unable to scan for fragments
*/
public void scanForFragment(WebAppContext context, Resource jar, ConcurrentHashMap<Resource, Resource> cache) throws Exception {
Resource webFrag = null;
if (cache != null && cache.containsKey(jar)) {
webFrag = cache.get(jar);
if (webFrag == EmptyResource.INSTANCE) {
if (LOG.isDebugEnabled())
LOG.debug(jar + " cached as containing no META-INF/web-fragment.xml");
return;
} else if (LOG.isDebugEnabled())
LOG.debug(jar + " META-INF/web-fragment.xml found in cache ");
} else {
//not using caches or not in the cache so check for the web-fragment.xml
if (LOG.isDebugEnabled())
LOG.debug(jar + " META-INF/web-fragment.xml checked");
if (jar.isDirectory()) {
//TODO ????
webFrag = jar.addPath("/META-INF/web-fragment.xml");
} else {
URI uri = jar.getURI();
webFrag = Resource.newResource(uriJarPrefix(uri, "!/META-INF/web-fragment.xml"));
}
if (!webFrag.exists() || webFrag.isDirectory()) {
webFrag.close();
webFrag = EmptyResource.INSTANCE;
}
if (cache != null) {
//web-fragment.xml doesn't exist: put token in cache to signal we've seen the jar
Resource old = cache.putIfAbsent(jar, webFrag);
if (old != null)
webFrag = old;
else if (LOG.isDebugEnabled())
LOG.debug(jar + " META-INF/web-fragment.xml cache updated");
}
if (webFrag == EmptyResource.INSTANCE)
return;
}
Map<Resource, Resource> fragments = (Map<Resource, Resource>) context.getAttribute(METAINF_FRAGMENTS);
if (fragments == null) {
fragments = new HashMap<Resource, Resource>();
context.setAttribute(METAINF_FRAGMENTS, fragments);
}
fragments.put(jar, webFrag);
if (LOG.isDebugEnabled())
LOG.debug(webFrag + " added to context");
}
use of java.util.concurrent.ConcurrentHashMap in project vert.x by eclipse.
the class Http1xTest method testSharedServersRoundRobin.
@Test
public void testSharedServersRoundRobin() throws Exception {
client.close();
server.close();
client = vertx.createHttpClient(new HttpClientOptions().setKeepAlive(false));
int numServers = 5;
int numRequests = numServers * 100;
List<HttpServer> servers = new ArrayList<>();
Set<HttpServer> connectedServers = Collections.newSetFromMap(new ConcurrentHashMap<>());
Map<HttpServer, Integer> requestCount = new ConcurrentHashMap<>();
CountDownLatch latchListen = new CountDownLatch(numServers);
CountDownLatch latchConns = new CountDownLatch(numRequests);
Set<Context> contexts = new ConcurrentHashSet<>();
for (int i = 0; i < numServers; i++) {
HttpServer theServer = vertx.createHttpServer(new HttpServerOptions().setPort(DEFAULT_HTTP_PORT));
servers.add(theServer);
final AtomicReference<Context> context = new AtomicReference<>();
theServer.requestHandler(req -> {
Context ctx = Vertx.currentContext();
if (context.get() != null) {
assertSame(ctx, context.get());
} else {
context.set(ctx);
contexts.add(ctx);
}
connectedServers.add(theServer);
Integer cnt = requestCount.get(theServer);
int icnt = cnt == null ? 0 : cnt;
icnt++;
requestCount.put(theServer, icnt);
latchConns.countDown();
req.response().end();
}).listen(onSuccess(s -> {
assertEquals(DEFAULT_HTTP_PORT, s.actualPort());
latchListen.countDown();
}));
}
awaitLatch(latchListen);
// Create a bunch of connections
CountDownLatch latchClient = new CountDownLatch(numRequests);
for (int i = 0; i < numRequests; i++) {
client.request(HttpMethod.GET, DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, DEFAULT_TEST_URI, res -> latchClient.countDown()).end();
}
assertTrue(latchClient.await(10, TimeUnit.SECONDS));
assertTrue(latchConns.await(10, TimeUnit.SECONDS));
assertEquals(numServers, connectedServers.size());
for (HttpServer server : servers) {
assertTrue(connectedServers.contains(server));
}
assertEquals(numServers, requestCount.size());
for (int cnt : requestCount.values()) {
assertEquals(numRequests / numServers, cnt);
}
assertEquals(numServers, contexts.size());
CountDownLatch closeLatch = new CountDownLatch(numServers);
for (HttpServer server : servers) {
server.close(ar -> {
assertTrue(ar.succeeded());
closeLatch.countDown();
});
}
assertTrue(closeLatch.await(10, TimeUnit.SECONDS));
testComplete();
}
use of java.util.concurrent.ConcurrentHashMap in project druid by druid-io.
the class KafkaLookupExtractorFactoryTest method testStartStop.
@Test
public void testStartStop() {
final KafkaStream<String, String> kafkaStream = PowerMock.createStrictMock(KafkaStream.class);
final ConsumerIterator<String, String> consumerIterator = PowerMock.createStrictMock(ConsumerIterator.class);
final ConsumerConnector consumerConnector = PowerMock.createStrictMock(ConsumerConnector.class);
EasyMock.expect(consumerConnector.createMessageStreamsByFilter(EasyMock.anyObject(TopicFilter.class), EasyMock.anyInt(), EasyMock.eq(DEFAULT_STRING_DECODER), EasyMock.eq(DEFAULT_STRING_DECODER))).andReturn(ImmutableList.of(kafkaStream)).once();
EasyMock.expect(kafkaStream.iterator()).andReturn(consumerIterator).anyTimes();
EasyMock.expect(consumerIterator.hasNext()).andAnswer(getBlockingAnswer()).anyTimes();
EasyMock.expect(cacheManager.createCache()).andReturn(cacheHandler).once();
EasyMock.expect(cacheHandler.getCache()).andReturn(new ConcurrentHashMap<String, String>()).once();
cacheHandler.close();
EasyMock.expectLastCall();
final AtomicBoolean threadWasInterrupted = new AtomicBoolean(false);
consumerConnector.shutdown();
EasyMock.expectLastCall().andAnswer(new IAnswer<Object>() {
@Override
public Object answer() throws Throwable {
threadWasInterrupted.set(Thread.currentThread().isInterrupted());
return null;
}
}).times(2);
PowerMock.replay(cacheManager, cacheHandler, kafkaStream, consumerConnector, consumerIterator);
final KafkaLookupExtractorFactory factory = new KafkaLookupExtractorFactory(cacheManager, TOPIC, ImmutableMap.of("zookeeper.connect", "localhost"), 10_000L, false) {
@Override
ConsumerConnector buildConnector(Properties properties) {
return consumerConnector;
}
};
Assert.assertTrue(factory.start());
Assert.assertTrue(factory.close());
Assert.assertTrue(factory.getFuture().isDone());
Assert.assertFalse(threadWasInterrupted.get());
PowerMock.verify(cacheManager, cacheHandler);
}
use of java.util.concurrent.ConcurrentHashMap in project druid by druid-io.
the class KafkaLookupExtractorFactoryTest method testStartStopStart.
@Test
public void testStartStopStart() {
final KafkaStream<String, String> kafkaStream = PowerMock.createStrictMock(KafkaStream.class);
final ConsumerIterator<String, String> consumerIterator = PowerMock.createStrictMock(ConsumerIterator.class);
final ConsumerConnector consumerConnector = PowerMock.createStrictMock(ConsumerConnector.class);
EasyMock.expect(consumerConnector.createMessageStreamsByFilter(EasyMock.anyObject(TopicFilter.class), EasyMock.anyInt(), EasyMock.eq(DEFAULT_STRING_DECODER), EasyMock.eq(DEFAULT_STRING_DECODER))).andReturn(ImmutableList.of(kafkaStream)).once();
EasyMock.expect(kafkaStream.iterator()).andReturn(consumerIterator).anyTimes();
EasyMock.expect(consumerIterator.hasNext()).andAnswer(getBlockingAnswer()).anyTimes();
EasyMock.expect(cacheManager.createCache()).andReturn(cacheHandler).once();
EasyMock.expect(cacheHandler.getCache()).andReturn(new ConcurrentHashMap<String, String>()).once();
cacheHandler.close();
EasyMock.expectLastCall().once();
consumerConnector.shutdown();
EasyMock.expectLastCall().times(2);
PowerMock.replay(cacheManager, cacheHandler, kafkaStream, consumerConnector, consumerIterator);
final KafkaLookupExtractorFactory factory = new KafkaLookupExtractorFactory(cacheManager, TOPIC, ImmutableMap.of("zookeeper.connect", "localhost")) {
@Override
ConsumerConnector buildConnector(Properties properties) {
return consumerConnector;
}
};
Assert.assertTrue(factory.start());
Assert.assertTrue(factory.close());
Assert.assertFalse(factory.start());
PowerMock.verify(cacheManager, cacheHandler);
}
use of java.util.concurrent.ConcurrentHashMap in project druid by druid-io.
the class KafkaLookupExtractorFactoryTest method testStartStartStop.
@Test
public void testStartStartStop() {
final KafkaStream<String, String> kafkaStream = PowerMock.createStrictMock(KafkaStream.class);
final ConsumerIterator<String, String> consumerIterator = PowerMock.createStrictMock(ConsumerIterator.class);
final ConsumerConnector consumerConnector = PowerMock.createStrictMock(ConsumerConnector.class);
EasyMock.expect(consumerConnector.createMessageStreamsByFilter(EasyMock.anyObject(TopicFilter.class), EasyMock.anyInt(), EasyMock.eq(DEFAULT_STRING_DECODER), EasyMock.eq(DEFAULT_STRING_DECODER))).andReturn(ImmutableList.of(kafkaStream)).once();
EasyMock.expect(kafkaStream.iterator()).andReturn(consumerIterator).anyTimes();
EasyMock.expect(consumerIterator.hasNext()).andAnswer(getBlockingAnswer()).anyTimes();
EasyMock.expect(cacheManager.createCache()).andReturn(cacheHandler).once();
EasyMock.expect(cacheHandler.getCache()).andReturn(new ConcurrentHashMap<String, String>()).once();
cacheHandler.close();
EasyMock.expectLastCall().once();
consumerConnector.shutdown();
EasyMock.expectLastCall().times(3);
PowerMock.replay(cacheManager, cacheHandler, kafkaStream, consumerConnector, consumerIterator);
final KafkaLookupExtractorFactory factory = new KafkaLookupExtractorFactory(cacheManager, TOPIC, ImmutableMap.of("zookeeper.connect", "localhost"), 10_000L, false) {
@Override
ConsumerConnector buildConnector(Properties properties) {
return consumerConnector;
}
};
Assert.assertTrue(factory.start());
Assert.assertTrue(factory.start());
Assert.assertTrue(factory.close());
Assert.assertTrue(factory.close());
PowerMock.verify(cacheManager, cacheHandler);
}
Aggregations