Search in sources :

Example 1 with ConcurrentHashMap

use of java.util.concurrent.ConcurrentHashMap in project jetty.project by eclipse.

the class MetaInfConfiguration method scanForFragment.

/**
     * Scan for META-INF/web-fragment.xml file in the given jar.
     * 
     * @param context the context for the scan
     * @param jar the jar resource to scan for fragements in
     * @param cache the resource cache
     * @throws Exception if unable to scan for fragments
     */
public void scanForFragment(WebAppContext context, Resource jar, ConcurrentHashMap<Resource, Resource> cache) throws Exception {
    Resource webFrag = null;
    if (cache != null && cache.containsKey(jar)) {
        webFrag = cache.get(jar);
        if (webFrag == EmptyResource.INSTANCE) {
            if (LOG.isDebugEnabled())
                LOG.debug(jar + " cached as containing no META-INF/web-fragment.xml");
            return;
        } else if (LOG.isDebugEnabled())
            LOG.debug(jar + " META-INF/web-fragment.xml found in cache ");
    } else {
        //not using caches or not in the cache so check for the web-fragment.xml
        if (LOG.isDebugEnabled())
            LOG.debug(jar + " META-INF/web-fragment.xml checked");
        if (jar.isDirectory()) {
            //TODO   ????
            webFrag = jar.addPath("/META-INF/web-fragment.xml");
        } else {
            URI uri = jar.getURI();
            webFrag = Resource.newResource(uriJarPrefix(uri, "!/META-INF/web-fragment.xml"));
        }
        if (!webFrag.exists() || webFrag.isDirectory()) {
            webFrag.close();
            webFrag = EmptyResource.INSTANCE;
        }
        if (cache != null) {
            //web-fragment.xml doesn't exist: put token in cache to signal we've seen the jar               
            Resource old = cache.putIfAbsent(jar, webFrag);
            if (old != null)
                webFrag = old;
            else if (LOG.isDebugEnabled())
                LOG.debug(jar + " META-INF/web-fragment.xml cache updated");
        }
        if (webFrag == EmptyResource.INSTANCE)
            return;
    }
    Map<Resource, Resource> fragments = (Map<Resource, Resource>) context.getAttribute(METAINF_FRAGMENTS);
    if (fragments == null) {
        fragments = new HashMap<Resource, Resource>();
        context.setAttribute(METAINF_FRAGMENTS, fragments);
    }
    fragments.put(jar, webFrag);
    if (LOG.isDebugEnabled())
        LOG.debug(webFrag + " added to context");
}
Also used : Resource(org.eclipse.jetty.util.resource.Resource) EmptyResource(org.eclipse.jetty.util.resource.EmptyResource) URI(java.net.URI) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) Map(java.util.Map)

Example 2 with ConcurrentHashMap

use of java.util.concurrent.ConcurrentHashMap in project vert.x by eclipse.

the class Http1xTest method testSharedServersRoundRobin.

@Test
public void testSharedServersRoundRobin() throws Exception {
    client.close();
    server.close();
    client = vertx.createHttpClient(new HttpClientOptions().setKeepAlive(false));
    int numServers = 5;
    int numRequests = numServers * 100;
    List<HttpServer> servers = new ArrayList<>();
    Set<HttpServer> connectedServers = Collections.newSetFromMap(new ConcurrentHashMap<>());
    Map<HttpServer, Integer> requestCount = new ConcurrentHashMap<>();
    CountDownLatch latchListen = new CountDownLatch(numServers);
    CountDownLatch latchConns = new CountDownLatch(numRequests);
    Set<Context> contexts = new ConcurrentHashSet<>();
    for (int i = 0; i < numServers; i++) {
        HttpServer theServer = vertx.createHttpServer(new HttpServerOptions().setPort(DEFAULT_HTTP_PORT));
        servers.add(theServer);
        final AtomicReference<Context> context = new AtomicReference<>();
        theServer.requestHandler(req -> {
            Context ctx = Vertx.currentContext();
            if (context.get() != null) {
                assertSame(ctx, context.get());
            } else {
                context.set(ctx);
                contexts.add(ctx);
            }
            connectedServers.add(theServer);
            Integer cnt = requestCount.get(theServer);
            int icnt = cnt == null ? 0 : cnt;
            icnt++;
            requestCount.put(theServer, icnt);
            latchConns.countDown();
            req.response().end();
        }).listen(onSuccess(s -> {
            assertEquals(DEFAULT_HTTP_PORT, s.actualPort());
            latchListen.countDown();
        }));
    }
    awaitLatch(latchListen);
    // Create a bunch of connections
    CountDownLatch latchClient = new CountDownLatch(numRequests);
    for (int i = 0; i < numRequests; i++) {
        client.request(HttpMethod.GET, DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, DEFAULT_TEST_URI, res -> latchClient.countDown()).end();
    }
    assertTrue(latchClient.await(10, TimeUnit.SECONDS));
    assertTrue(latchConns.await(10, TimeUnit.SECONDS));
    assertEquals(numServers, connectedServers.size());
    for (HttpServer server : servers) {
        assertTrue(connectedServers.contains(server));
    }
    assertEquals(numServers, requestCount.size());
    for (int cnt : requestCount.values()) {
        assertEquals(numRequests / numServers, cnt);
    }
    assertEquals(numServers, contexts.size());
    CountDownLatch closeLatch = new CountDownLatch(numServers);
    for (HttpServer server : servers) {
        server.close(ar -> {
            assertTrue(ar.succeeded());
            closeLatch.countDown();
        });
    }
    assertTrue(closeLatch.await(10, TimeUnit.SECONDS));
    testComplete();
}
Also used : IntStream(java.util.stream.IntStream) java.util(java.util) io.vertx.core(io.vertx.core) io.vertx.core.impl(io.vertx.core.impl) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) TimeoutException(java.util.concurrent.TimeoutException) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) TooLongFrameException(io.netty.handler.codec.TooLongFrameException) Test(org.junit.Test) CompletableFuture(java.util.concurrent.CompletableFuture) io.vertx.core.net(io.vertx.core.net) AtomicReference(java.util.concurrent.atomic.AtomicReference) io.vertx.core.http(io.vertx.core.http) TimeUnit(java.util.concurrent.TimeUnit) Consumer(java.util.function.Consumer) JsonArray(io.vertx.core.json.JsonArray) CountDownLatch(java.util.concurrent.CountDownLatch) HttpClientRequestImpl(io.vertx.core.http.impl.HttpClientRequestImpl) Buffer(io.vertx.core.buffer.Buffer) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) TestUtils(io.vertx.test.core.TestUtils) RecordParser(io.vertx.core.parsetools.RecordParser) Pump(io.vertx.core.streams.Pump) JsonObject(io.vertx.core.json.JsonObject) AtomicReference(java.util.concurrent.atomic.AtomicReference) CountDownLatch(java.util.concurrent.CountDownLatch) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Test(org.junit.Test)

Example 3 with ConcurrentHashMap

use of java.util.concurrent.ConcurrentHashMap in project druid by druid-io.

the class KafkaLookupExtractorFactoryTest method testStartStop.

@Test
public void testStartStop() {
    final KafkaStream<String, String> kafkaStream = PowerMock.createStrictMock(KafkaStream.class);
    final ConsumerIterator<String, String> consumerIterator = PowerMock.createStrictMock(ConsumerIterator.class);
    final ConsumerConnector consumerConnector = PowerMock.createStrictMock(ConsumerConnector.class);
    EasyMock.expect(consumerConnector.createMessageStreamsByFilter(EasyMock.anyObject(TopicFilter.class), EasyMock.anyInt(), EasyMock.eq(DEFAULT_STRING_DECODER), EasyMock.eq(DEFAULT_STRING_DECODER))).andReturn(ImmutableList.of(kafkaStream)).once();
    EasyMock.expect(kafkaStream.iterator()).andReturn(consumerIterator).anyTimes();
    EasyMock.expect(consumerIterator.hasNext()).andAnswer(getBlockingAnswer()).anyTimes();
    EasyMock.expect(cacheManager.createCache()).andReturn(cacheHandler).once();
    EasyMock.expect(cacheHandler.getCache()).andReturn(new ConcurrentHashMap<String, String>()).once();
    cacheHandler.close();
    EasyMock.expectLastCall();
    final AtomicBoolean threadWasInterrupted = new AtomicBoolean(false);
    consumerConnector.shutdown();
    EasyMock.expectLastCall().andAnswer(new IAnswer<Object>() {

        @Override
        public Object answer() throws Throwable {
            threadWasInterrupted.set(Thread.currentThread().isInterrupted());
            return null;
        }
    }).times(2);
    PowerMock.replay(cacheManager, cacheHandler, kafkaStream, consumerConnector, consumerIterator);
    final KafkaLookupExtractorFactory factory = new KafkaLookupExtractorFactory(cacheManager, TOPIC, ImmutableMap.of("zookeeper.connect", "localhost"), 10_000L, false) {

        @Override
        ConsumerConnector buildConnector(Properties properties) {
            return consumerConnector;
        }
    };
    Assert.assertTrue(factory.start());
    Assert.assertTrue(factory.close());
    Assert.assertTrue(factory.getFuture().isDone());
    Assert.assertFalse(threadWasInterrupted.get());
    PowerMock.verify(cacheManager, cacheHandler);
}
Also used : AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) IAnswer(org.easymock.IAnswer) ConsumerConnector(kafka.javaapi.consumer.ConsumerConnector) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Properties(java.util.Properties) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Example 4 with ConcurrentHashMap

use of java.util.concurrent.ConcurrentHashMap in project druid by druid-io.

the class KafkaLookupExtractorFactoryTest method testStartStopStart.

@Test
public void testStartStopStart() {
    final KafkaStream<String, String> kafkaStream = PowerMock.createStrictMock(KafkaStream.class);
    final ConsumerIterator<String, String> consumerIterator = PowerMock.createStrictMock(ConsumerIterator.class);
    final ConsumerConnector consumerConnector = PowerMock.createStrictMock(ConsumerConnector.class);
    EasyMock.expect(consumerConnector.createMessageStreamsByFilter(EasyMock.anyObject(TopicFilter.class), EasyMock.anyInt(), EasyMock.eq(DEFAULT_STRING_DECODER), EasyMock.eq(DEFAULT_STRING_DECODER))).andReturn(ImmutableList.of(kafkaStream)).once();
    EasyMock.expect(kafkaStream.iterator()).andReturn(consumerIterator).anyTimes();
    EasyMock.expect(consumerIterator.hasNext()).andAnswer(getBlockingAnswer()).anyTimes();
    EasyMock.expect(cacheManager.createCache()).andReturn(cacheHandler).once();
    EasyMock.expect(cacheHandler.getCache()).andReturn(new ConcurrentHashMap<String, String>()).once();
    cacheHandler.close();
    EasyMock.expectLastCall().once();
    consumerConnector.shutdown();
    EasyMock.expectLastCall().times(2);
    PowerMock.replay(cacheManager, cacheHandler, kafkaStream, consumerConnector, consumerIterator);
    final KafkaLookupExtractorFactory factory = new KafkaLookupExtractorFactory(cacheManager, TOPIC, ImmutableMap.of("zookeeper.connect", "localhost")) {

        @Override
        ConsumerConnector buildConnector(Properties properties) {
            return consumerConnector;
        }
    };
    Assert.assertTrue(factory.start());
    Assert.assertTrue(factory.close());
    Assert.assertFalse(factory.start());
    PowerMock.verify(cacheManager, cacheHandler);
}
Also used : ConsumerConnector(kafka.javaapi.consumer.ConsumerConnector) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Properties(java.util.Properties) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Example 5 with ConcurrentHashMap

use of java.util.concurrent.ConcurrentHashMap in project druid by druid-io.

the class KafkaLookupExtractorFactoryTest method testStartStartStop.

@Test
public void testStartStartStop() {
    final KafkaStream<String, String> kafkaStream = PowerMock.createStrictMock(KafkaStream.class);
    final ConsumerIterator<String, String> consumerIterator = PowerMock.createStrictMock(ConsumerIterator.class);
    final ConsumerConnector consumerConnector = PowerMock.createStrictMock(ConsumerConnector.class);
    EasyMock.expect(consumerConnector.createMessageStreamsByFilter(EasyMock.anyObject(TopicFilter.class), EasyMock.anyInt(), EasyMock.eq(DEFAULT_STRING_DECODER), EasyMock.eq(DEFAULT_STRING_DECODER))).andReturn(ImmutableList.of(kafkaStream)).once();
    EasyMock.expect(kafkaStream.iterator()).andReturn(consumerIterator).anyTimes();
    EasyMock.expect(consumerIterator.hasNext()).andAnswer(getBlockingAnswer()).anyTimes();
    EasyMock.expect(cacheManager.createCache()).andReturn(cacheHandler).once();
    EasyMock.expect(cacheHandler.getCache()).andReturn(new ConcurrentHashMap<String, String>()).once();
    cacheHandler.close();
    EasyMock.expectLastCall().once();
    consumerConnector.shutdown();
    EasyMock.expectLastCall().times(3);
    PowerMock.replay(cacheManager, cacheHandler, kafkaStream, consumerConnector, consumerIterator);
    final KafkaLookupExtractorFactory factory = new KafkaLookupExtractorFactory(cacheManager, TOPIC, ImmutableMap.of("zookeeper.connect", "localhost"), 10_000L, false) {

        @Override
        ConsumerConnector buildConnector(Properties properties) {
            return consumerConnector;
        }
    };
    Assert.assertTrue(factory.start());
    Assert.assertTrue(factory.start());
    Assert.assertTrue(factory.close());
    Assert.assertTrue(factory.close());
    PowerMock.verify(cacheManager, cacheHandler);
}
Also used : ConsumerConnector(kafka.javaapi.consumer.ConsumerConnector) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Properties(java.util.Properties) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Aggregations

ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)1141 Map (java.util.Map)295 Test (org.junit.Test)269 ArrayList (java.util.ArrayList)240 HashMap (java.util.HashMap)192 List (java.util.List)177 Set (java.util.Set)142 IOException (java.io.IOException)132 HashSet (java.util.HashSet)120 CountDownLatch (java.util.concurrent.CountDownLatch)105 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)104 ConcurrentMap (java.util.concurrent.ConcurrentMap)75 Collection (java.util.Collection)74 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)70 AtomicLong (java.util.concurrent.atomic.AtomicLong)66 Collections (java.util.Collections)63 ExecutorService (java.util.concurrent.ExecutorService)58 Arrays (java.util.Arrays)53 File (java.io.File)45 Random (java.util.Random)45