Search in sources :

Example 26 with ThreadFactoryBuilder

use of com.google.common.util.concurrent.ThreadFactoryBuilder in project weave by continuuity.

the class TrackerService method startUp.

@Override
protected void startUp() throws Exception {
    Executor bossThreads = Executors.newFixedThreadPool(NUM_BOSS_THREADS, new ThreadFactoryBuilder().setDaemon(true).setNameFormat("boss-thread").build());
    Executor workerThreads = Executors.newCachedThreadPool(new ThreadFactoryBuilder().setDaemon(true).setNameFormat("worker-thread#%d").build());
    ChannelFactory factory = new NioServerSocketChannelFactory(bossThreads, workerThreads);
    bootstrap = new ServerBootstrap(factory);
    bootstrap.setPipelineFactory(new ChannelPipelineFactory() {

        public ChannelPipeline getPipeline() {
            ChannelPipeline pipeline = Channels.pipeline();
            pipeline.addLast("decoder", new HttpRequestDecoder());
            pipeline.addLast("aggregator", new HttpChunkAggregator(MAX_INPUT_SIZE));
            pipeline.addLast("encoder", new HttpResponseEncoder());
            pipeline.addLast("compressor", new HttpContentCompressor());
            pipeline.addLast("handler", new ReportHandler(resourceReport));
            return pipeline;
        }
    });
    Channel channel = bootstrap.bind(new InetSocketAddress(host, 0));
    bindAddress = (InetSocketAddress) channel.getLocalAddress();
    url = URI.create(String.format("http://%s:%d", host, bindAddress.getPort())).resolve(TrackerService.PATH).toURL();
    channelGroup.add(channel);
}
Also used : NioServerSocketChannelFactory(org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory) HttpContentCompressor(org.jboss.netty.handler.codec.http.HttpContentCompressor) InetSocketAddress(java.net.InetSocketAddress) Channel(org.jboss.netty.channel.Channel) HttpChunkAggregator(org.jboss.netty.handler.codec.http.HttpChunkAggregator) NioServerSocketChannelFactory(org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory) ChannelFactory(org.jboss.netty.channel.ChannelFactory) ServerBootstrap(org.jboss.netty.bootstrap.ServerBootstrap) ChannelPipeline(org.jboss.netty.channel.ChannelPipeline) HttpResponseEncoder(org.jboss.netty.handler.codec.http.HttpResponseEncoder) Executor(java.util.concurrent.Executor) HttpRequestDecoder(org.jboss.netty.handler.codec.http.HttpRequestDecoder) ThreadFactoryBuilder(com.google.common.util.concurrent.ThreadFactoryBuilder) ChannelPipelineFactory(org.jboss.netty.channel.ChannelPipelineFactory)

Example 27 with ThreadFactoryBuilder

use of com.google.common.util.concurrent.ThreadFactoryBuilder in project che by eclipse.

the class WSocketEventBusClient method start.

@PostConstruct
void start() {
    if (start.compareAndSet(false, true)) {
        if (policy != null) {
            eventService.subscribe(new EventSubscriber<Object>() {

                @Override
                public void onEvent(Object event) {
                    propagate(event);
                }
            });
        }
        if (eventSubscriptions != null) {
            final Map<URI, Set<String>> cfg = new HashMap<>();
            for (Pair<String, String> service : eventSubscriptions) {
                try {
                    final URI key = new URI(service.first);
                    Set<String> values = cfg.get(key);
                    if (values == null) {
                        cfg.put(key, values = new LinkedHashSet<>());
                    }
                    if (service.second != null) {
                        values.add(service.second);
                    }
                } catch (URISyntaxException e) {
                    LOG.error(e.getMessage(), e);
                }
            }
            if (!cfg.isEmpty()) {
                executor = Executors.newCachedThreadPool(new ThreadFactoryBuilder().setNameFormat("WSocketEventBusClient-%d").setUncaughtExceptionHandler(LoggingUncaughtExceptionHandler.getInstance()).setDaemon(true).build());
                for (Map.Entry<URI, Set<String>> entry : cfg.entrySet()) {
                    executor.execute(new ConnectTask(entry.getKey(), entry.getValue()));
                }
            }
        }
    }
}
Also used : LinkedHashSet(java.util.LinkedHashSet) HashSet(java.util.HashSet) LinkedHashSet(java.util.LinkedHashSet) Set(java.util.Set) HashMap(java.util.HashMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) URISyntaxException(java.net.URISyntaxException) URI(java.net.URI) ThreadFactoryBuilder(com.google.common.util.concurrent.ThreadFactoryBuilder) HashMap(java.util.HashMap) ConcurrentMap(java.util.concurrent.ConcurrentMap) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) PostConstruct(javax.annotation.PostConstruct)

Example 28 with ThreadFactoryBuilder

use of com.google.common.util.concurrent.ThreadFactoryBuilder in project che by eclipse.

the class ServerContainerInitializeListener method createExecutor.

protected ExecutorService createExecutor(final ServletContext servletContext) {
    final EverrestConfiguration everrestConfiguration = getEverrestConfiguration(servletContext);
    final String threadNameFormat = "everrest.WSConnection." + servletContext.getServletContextName() + "-%d";
    return Executors.newFixedThreadPool(everrestConfiguration.getAsynchronousPoolSize(), new ThreadFactoryBuilder().setNameFormat(threadNameFormat).setUncaughtExceptionHandler(LoggingUncaughtExceptionHandler.getInstance()).setDaemon(true).build());
}
Also used : EverrestConfiguration(org.everrest.core.impl.EverrestConfiguration) ThreadFactoryBuilder(com.google.common.util.concurrent.ThreadFactoryBuilder)

Example 29 with ThreadFactoryBuilder

use of com.google.common.util.concurrent.ThreadFactoryBuilder in project dropwizard by dropwizard.

the class LifecycleEnvironmentTest method executorServiceThreadFactory.

@Test
public void executorServiceThreadFactory() throws ExecutionException, InterruptedException {
    final String expectedName = "DropWizard ThreadFactory Test";
    final String expectedNamePattern = expectedName + "-%d";
    final ThreadFactory tfactory = (new ThreadFactoryBuilder()).setDaemon(false).setNameFormat(expectedNamePattern).build();
    final ExecutorService executorService = environment.executorService("Dropwizard Service", tfactory).build();
    final Future<Boolean> isFactoryInUse = executorService.submit(() -> Thread.currentThread().getName().startsWith(expectedName));
    assertThat(isFactoryInUse.get()).isTrue();
}
Also used : ThreadFactory(java.util.concurrent.ThreadFactory) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) ExecutorService(java.util.concurrent.ExecutorService) ThreadFactoryBuilder(com.google.common.util.concurrent.ThreadFactoryBuilder) Test(org.junit.Test)

Example 30 with ThreadFactoryBuilder

use of com.google.common.util.concurrent.ThreadFactoryBuilder in project druid by druid-io.

the class IncrementalIndexTest method testConcurrentAddRead.

@Test(timeout = 60_000L)
public void testConcurrentAddRead() throws InterruptedException, ExecutionException {
    final int dimensionCount = 5;
    final ArrayList<AggregatorFactory> ingestAggregatorFactories = new ArrayList<>(dimensionCount + 1);
    ingestAggregatorFactories.add(new CountAggregatorFactory("rows"));
    for (int i = 0; i < dimensionCount; ++i) {
        ingestAggregatorFactories.add(new LongSumAggregatorFactory(String.format("sumResult%s", i), String.format("Dim_%s", i)));
        ingestAggregatorFactories.add(new DoubleSumAggregatorFactory(String.format("doubleSumResult%s", i), String.format("Dim_%s", i)));
    }
    final ArrayList<AggregatorFactory> queryAggregatorFactories = new ArrayList<>(dimensionCount + 1);
    queryAggregatorFactories.add(new CountAggregatorFactory("rows"));
    for (int i = 0; i < dimensionCount; ++i) {
        queryAggregatorFactories.add(new LongSumAggregatorFactory(String.format("sumResult%s", i), String.format("sumResult%s", i)));
        queryAggregatorFactories.add(new DoubleSumAggregatorFactory(String.format("doubleSumResult%s", i), String.format("doubleSumResult%s", i)));
    }
    final IncrementalIndex index = closer.closeLater(indexCreator.createIndex(ingestAggregatorFactories.toArray(new AggregatorFactory[dimensionCount])));
    final int concurrentThreads = 2;
    final int elementsPerThread = 10_000;
    final ListeningExecutorService indexExecutor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(concurrentThreads, new ThreadFactoryBuilder().setDaemon(false).setNameFormat("index-executor-%d").setPriority(Thread.MIN_PRIORITY).build()));
    final ListeningExecutorService queryExecutor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(concurrentThreads, new ThreadFactoryBuilder().setDaemon(false).setNameFormat("query-executor-%d").build()));
    final long timestamp = System.currentTimeMillis();
    final Interval queryInterval = new Interval("1900-01-01T00:00:00Z/2900-01-01T00:00:00Z");
    final List<ListenableFuture<?>> indexFutures = Lists.newArrayListWithExpectedSize(concurrentThreads);
    final List<ListenableFuture<?>> queryFutures = Lists.newArrayListWithExpectedSize(concurrentThreads);
    final Segment incrementalIndexSegment = new IncrementalIndexSegment(index, null);
    final QueryRunnerFactory factory = new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), new TimeseriesQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
    final AtomicInteger currentlyRunning = new AtomicInteger(0);
    final AtomicInteger concurrentlyRan = new AtomicInteger(0);
    final AtomicInteger someoneRan = new AtomicInteger(0);
    final CountDownLatch startLatch = new CountDownLatch(1);
    final CountDownLatch readyLatch = new CountDownLatch(concurrentThreads * 2);
    final AtomicInteger queriesAccumualted = new AtomicInteger(0);
    for (int j = 0; j < concurrentThreads; j++) {
        indexFutures.add(indexExecutor.submit(new Runnable() {

            @Override
            public void run() {
                readyLatch.countDown();
                try {
                    startLatch.await();
                } catch (InterruptedException e) {
                    Thread.currentThread().interrupt();
                    throw Throwables.propagate(e);
                }
                currentlyRunning.incrementAndGet();
                try {
                    for (int i = 0; i < elementsPerThread; i++) {
                        index.add(getLongRow(timestamp + i, i, dimensionCount));
                        someoneRan.incrementAndGet();
                    }
                } catch (IndexSizeExceededException e) {
                    throw Throwables.propagate(e);
                }
                currentlyRunning.decrementAndGet();
            }
        }));
        final TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("xxx").granularity(Granularities.ALL).intervals(ImmutableList.of(queryInterval)).aggregators(queryAggregatorFactories).build();
        queryFutures.add(queryExecutor.submit(new Runnable() {

            @Override
            public void run() {
                readyLatch.countDown();
                try {
                    startLatch.await();
                } catch (InterruptedException e) {
                    Thread.currentThread().interrupt();
                    throw Throwables.propagate(e);
                }
                while (concurrentlyRan.get() == 0) {
                    QueryRunner<Result<TimeseriesResultValue>> runner = new FinalizeResultsQueryRunner<Result<TimeseriesResultValue>>(factory.createRunner(incrementalIndexSegment), factory.getToolchest());
                    Map<String, Object> context = new HashMap<String, Object>();
                    Sequence<Result<TimeseriesResultValue>> sequence = runner.run(query, context);
                    for (Double result : sequence.accumulate(new Double[0], new Accumulator<Double[], Result<TimeseriesResultValue>>() {

                        @Override
                        public Double[] accumulate(Double[] accumulated, Result<TimeseriesResultValue> in) {
                            if (currentlyRunning.get() > 0) {
                                concurrentlyRan.incrementAndGet();
                            }
                            queriesAccumualted.incrementAndGet();
                            return Lists.asList(in.getValue().getDoubleMetric("doubleSumResult0"), accumulated).toArray(new Double[accumulated.length + 1]);
                        }
                    })) {
                        final Integer maxValueExpected = someoneRan.get() + concurrentThreads;
                        if (maxValueExpected > 0) {
                            // Eventually consistent, but should be somewhere in that range
                            // Actual result is validated after all writes are guaranteed done.
                            Assert.assertTrue(String.format("%d >= %g >= 0 violated", maxValueExpected, result), result >= 0 && result <= maxValueExpected);
                        }
                    }
                }
            }
        }));
    }
    readyLatch.await();
    startLatch.countDown();
    List<ListenableFuture<?>> allFutures = new ArrayList<>(queryFutures.size() + indexFutures.size());
    allFutures.addAll(queryFutures);
    allFutures.addAll(indexFutures);
    Futures.allAsList(allFutures).get();
    Assert.assertTrue("Queries ran too fast", queriesAccumualted.get() > 0);
    Assert.assertTrue("Did not hit concurrency, please try again", concurrentlyRan.get() > 0);
    queryExecutor.shutdown();
    indexExecutor.shutdown();
    QueryRunner<Result<TimeseriesResultValue>> runner = new FinalizeResultsQueryRunner<Result<TimeseriesResultValue>>(factory.createRunner(incrementalIndexSegment), factory.getToolchest());
    TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("xxx").granularity(Granularities.ALL).intervals(ImmutableList.of(queryInterval)).aggregators(queryAggregatorFactories).build();
    Map<String, Object> context = new HashMap<String, Object>();
    List<Result<TimeseriesResultValue>> results = Sequences.toList(runner.run(query, context), new LinkedList<Result<TimeseriesResultValue>>());
    boolean isRollup = index.isRollup();
    for (Result<TimeseriesResultValue> result : results) {
        Assert.assertEquals(elementsPerThread * (isRollup ? 1 : concurrentThreads), result.getValue().getLongMetric("rows").intValue());
        for (int i = 0; i < dimensionCount; ++i) {
            Assert.assertEquals(String.format("Failed long sum on dimension %d", i), elementsPerThread * concurrentThreads, result.getValue().getLongMetric(String.format("sumResult%s", i)).intValue());
            Assert.assertEquals(String.format("Failed double sum on dimension %d", i), elementsPerThread * concurrentThreads, result.getValue().getDoubleMetric(String.format("doubleSumResult%s", i)).intValue());
        }
    }
}
Also used : TimeseriesResultValue(io.druid.query.timeseries.TimeseriesResultValue) IncrementalIndexSegment(io.druid.segment.IncrementalIndexSegment) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) TimeseriesQueryQueryToolChest(io.druid.query.timeseries.TimeseriesQueryQueryToolChest) IncrementalIndexSegment(io.druid.segment.IncrementalIndexSegment) Segment(io.druid.segment.Segment) Result(io.druid.query.Result) TimeseriesQueryEngine(io.druid.query.timeseries.TimeseriesQueryEngine) ThreadFactoryBuilder(com.google.common.util.concurrent.ThreadFactoryBuilder) DoubleSumAggregatorFactory(io.druid.query.aggregation.DoubleSumAggregatorFactory) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) OffheapIncrementalIndex(io.druid.segment.incremental.OffheapIncrementalIndex) IncrementalIndex(io.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) DoubleSumAggregatorFactory(io.druid.query.aggregation.DoubleSumAggregatorFactory) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) FilteredAggregatorFactory(io.druid.query.aggregation.FilteredAggregatorFactory) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) CountDownLatch(java.util.concurrent.CountDownLatch) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) TimeseriesQueryRunnerFactory(io.druid.query.timeseries.TimeseriesQueryRunnerFactory) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) QueryRunnerFactory(io.druid.query.QueryRunnerFactory) TimeseriesQueryRunnerFactory(io.druid.query.timeseries.TimeseriesQueryRunnerFactory) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) ListeningExecutorService(com.google.common.util.concurrent.ListeningExecutorService) Interval(org.joda.time.Interval) IndexSizeExceededException(io.druid.segment.incremental.IndexSizeExceededException) Test(org.junit.Test)

Aggregations

ThreadFactoryBuilder (com.google.common.util.concurrent.ThreadFactoryBuilder)124 ThreadFactory (java.util.concurrent.ThreadFactory)38 ExecutorService (java.util.concurrent.ExecutorService)35 IOException (java.io.IOException)19 ThreadPoolExecutor (java.util.concurrent.ThreadPoolExecutor)18 Future (java.util.concurrent.Future)16 ExecutionException (java.util.concurrent.ExecutionException)14 ArrayList (java.util.ArrayList)10 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)10 HashMap (java.util.HashMap)9 HashSet (java.util.HashSet)9 Callable (java.util.concurrent.Callable)9 ScheduledExecutorService (java.util.concurrent.ScheduledExecutorService)9 Path (org.apache.hadoop.fs.Path)9 Test (org.junit.Test)9 LinkedList (java.util.LinkedList)8 Map (java.util.Map)8 Before (org.junit.Before)8 LinkedBlockingQueue (java.util.concurrent.LinkedBlockingQueue)7 ScheduledThreadPoolExecutor (java.util.concurrent.ScheduledThreadPoolExecutor)7