use of com.google.common.util.concurrent.ThreadFactoryBuilder in project storm by apache.
the class HiveBolt method prepare.
@Override
public void prepare(Map conf, TopologyContext topologyContext, OutputCollector collector) {
try {
if (options.getKerberosPrincipal() == null && options.getKerberosKeytab() == null) {
kerberosEnabled = false;
} else if (options.getKerberosPrincipal() != null && options.getKerberosKeytab() != null) {
kerberosEnabled = true;
} else {
throw new IllegalArgumentException("To enable Kerberos, need to set both KerberosPrincipal " + " & KerberosKeytab");
}
if (kerberosEnabled) {
try {
ugi = HiveUtils.authenticate(options.getKerberosKeytab(), options.getKerberosPrincipal());
} catch (HiveUtils.AuthenticationFailed ex) {
LOG.error("Hive Kerberos authentication failed " + ex.getMessage(), ex);
throw new IllegalArgumentException(ex);
}
}
this.collector = collector;
this.batchHelper = new BatchHelper(options.getBatchSize(), collector);
allWriters = new ConcurrentHashMap<HiveEndPoint, HiveWriter>();
String timeoutName = "hive-bolt-%d";
this.callTimeoutPool = Executors.newFixedThreadPool(1, new ThreadFactoryBuilder().setNameFormat(timeoutName).build());
sendHeartBeat.set(true);
heartBeatTimer = new Timer();
setupHeartBeatTimer();
} catch (Exception e) {
LOG.warn("unable to make connection to hive ", e);
}
}
use of com.google.common.util.concurrent.ThreadFactoryBuilder in project che by eclipse.
the class FileWatcherService method start.
@PostConstruct
void start() throws IOException {
ThreadFactoryBuilder builder = new ThreadFactoryBuilder();
ThreadFactory factory = builder.setUncaughtExceptionHandler(LoggingUncaughtExceptionHandler.getInstance()).setNameFormat(FileWatcherService.class.getSimpleName()).setDaemon(true).build();
executor = newSingleThreadExecutor(factory);
executor.execute(this::run);
}
use of com.google.common.util.concurrent.ThreadFactoryBuilder in project druid by druid-io.
the class BenchmarkIndexibleWrites method testConcurrentWrites.
@BenchmarkOptions(warmupRounds = 100, benchmarkRounds = 100, clock = Clock.REAL_TIME, callgc = true)
@Ignore
@Test
public /**
* CALLEN - 2015-01-15 - OSX - Java 1.7.0_71-b14
BenchmarkIndexibleWrites.testConcurrentWrites[0]: [measured 100 out of 200 rounds, threads: 1 (sequential)]
round: 0.24 [+- 0.01], round.block: 0.00 [+- 0.00], round.gc: 0.02 [+- 0.00], GC.calls: 396, GC.time: 1.88, time.total: 50.60, time.warmup: 24.84, time.bench: 25.77
BenchmarkIndexibleWrites.testConcurrentWrites[1]: [measured 100 out of 200 rounds, threads: 1 (sequential)]
round: 0.15 [+- 0.01], round.block: 0.00 [+- 0.00], round.gc: 0.02 [+- 0.00], GC.calls: 396, GC.time: 2.11, time.total: 33.14, time.warmup: 16.09, time.bench: 17.05
*/
void testConcurrentWrites() throws ExecutionException, InterruptedException {
final ListeningExecutorService executorService = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(concurrentThreads, new ThreadFactoryBuilder().setDaemon(false).setNameFormat("indexible-writes-benchmark-%d").build()));
final AtomicInteger index = new AtomicInteger(0);
List<ListenableFuture<?>> futures = new LinkedList<>();
final Integer loops = totalIndexSize / concurrentThreads;
for (int i = 0; i < concurrentThreads; ++i) {
futures.add(executorService.submit(new Runnable() {
@Override
public void run() {
for (int i = 0; i < loops; ++i) {
final Integer idx = index.getAndIncrement();
concurrentIndexible.set(idx, idx);
}
}
}));
}
Futures.allAsList(futures).get();
Assert.assertTrue(String.format("Index too small %d, expected %d across %d loops", index.get(), totalIndexSize, loops), index.get() >= totalIndexSize);
for (int i = 0; i < index.get(); ++i) {
Assert.assertEquals(i, concurrentIndexible.get(i).intValue());
}
concurrentIndexible.clear();
futures.clear();
executorService.shutdown();
}
use of com.google.common.util.concurrent.ThreadFactoryBuilder in project druid by druid-io.
the class OnheapIncrementalIndexBenchmark method testConcurrentAddRead.
@Ignore
@Test
@BenchmarkOptions(callgc = true, clock = Clock.REAL_TIME, warmupRounds = 10, benchmarkRounds = 20)
public void testConcurrentAddRead() throws InterruptedException, ExecutionException, NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException {
final int taskCount = 30;
final int concurrentThreads = 3;
final int elementsPerThread = 1 << 15;
final OnheapIncrementalIndex incrementalIndex = this.incrementalIndex.getConstructor(Long.TYPE, Granularity.class, AggregatorFactory[].class, Integer.TYPE).newInstance(0, Granularities.NONE, factories, elementsPerThread * taskCount);
final ArrayList<AggregatorFactory> queryAggregatorFactories = new ArrayList<>(dimensionCount + 1);
queryAggregatorFactories.add(new CountAggregatorFactory("rows"));
for (int i = 0; i < dimensionCount; ++i) {
queryAggregatorFactories.add(new LongSumAggregatorFactory(String.format("sumResult%s", i), String.format("sumResult%s", i)));
queryAggregatorFactories.add(new DoubleSumAggregatorFactory(String.format("doubleSumResult%s", i), String.format("doubleSumResult%s", i)));
}
final ListeningExecutorService indexExecutor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(concurrentThreads, new ThreadFactoryBuilder().setDaemon(false).setNameFormat("index-executor-%d").setPriority(Thread.MIN_PRIORITY).build()));
final ListeningExecutorService queryExecutor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(concurrentThreads, new ThreadFactoryBuilder().setDaemon(false).setNameFormat("query-executor-%d").build()));
final long timestamp = System.currentTimeMillis();
final Interval queryInterval = new Interval("1900-01-01T00:00:00Z/2900-01-01T00:00:00Z");
final List<ListenableFuture<?>> indexFutures = new LinkedList<>();
final List<ListenableFuture<?>> queryFutures = new LinkedList<>();
final Segment incrementalIndexSegment = new IncrementalIndexSegment(incrementalIndex, null);
final QueryRunnerFactory factory = new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), new TimeseriesQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
final AtomicInteger currentlyRunning = new AtomicInteger(0);
final AtomicBoolean concurrentlyRan = new AtomicBoolean(false);
final AtomicBoolean someoneRan = new AtomicBoolean(false);
for (int j = 0; j < taskCount; j++) {
indexFutures.add(indexExecutor.submit(new Runnable() {
@Override
public void run() {
currentlyRunning.incrementAndGet();
try {
for (int i = 0; i < elementsPerThread; i++) {
incrementalIndex.add(getLongRow(timestamp + i, 1, dimensionCount));
}
} catch (IndexSizeExceededException e) {
throw Throwables.propagate(e);
}
currentlyRunning.decrementAndGet();
someoneRan.set(true);
}
}));
queryFutures.add(queryExecutor.submit(new Runnable() {
@Override
public void run() {
QueryRunner<Result<TimeseriesResultValue>> runner = new FinalizeResultsQueryRunner<Result<TimeseriesResultValue>>(factory.createRunner(incrementalIndexSegment), factory.getToolchest());
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("xxx").granularity(Granularities.ALL).intervals(ImmutableList.of(queryInterval)).aggregators(queryAggregatorFactories).build();
Map<String, Object> context = new HashMap<String, Object>();
for (Result<TimeseriesResultValue> result : Sequences.toList(runner.run(query, context), new LinkedList<Result<TimeseriesResultValue>>())) {
if (someoneRan.get()) {
Assert.assertTrue(result.getValue().getDoubleMetric("doubleSumResult0") > 0);
}
}
if (currentlyRunning.get() > 0) {
concurrentlyRan.set(true);
}
}
}));
}
List<ListenableFuture<?>> allFutures = new ArrayList<>(queryFutures.size() + indexFutures.size());
allFutures.addAll(queryFutures);
allFutures.addAll(indexFutures);
Futures.allAsList(allFutures).get();
//Assert.assertTrue("Did not hit concurrency, please try again", concurrentlyRan.get());
queryExecutor.shutdown();
indexExecutor.shutdown();
QueryRunner<Result<TimeseriesResultValue>> runner = new FinalizeResultsQueryRunner<Result<TimeseriesResultValue>>(factory.createRunner(incrementalIndexSegment), factory.getToolchest());
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("xxx").granularity(Granularities.ALL).intervals(ImmutableList.of(queryInterval)).aggregators(queryAggregatorFactories).build();
Map<String, Object> context = new HashMap<String, Object>();
List<Result<TimeseriesResultValue>> results = Sequences.toList(runner.run(query, context), new LinkedList<Result<TimeseriesResultValue>>());
final int expectedVal = elementsPerThread * taskCount;
for (Result<TimeseriesResultValue> result : results) {
Assert.assertEquals(elementsPerThread, result.getValue().getLongMetric("rows").intValue());
for (int i = 0; i < dimensionCount; ++i) {
Assert.assertEquals(String.format("Failed long sum on dimension %d", i), expectedVal, result.getValue().getLongMetric(String.format("sumResult%s", i)).intValue());
Assert.assertEquals(String.format("Failed double sum on dimension %d", i), expectedVal, result.getValue().getDoubleMetric(String.format("doubleSumResult%s", i)).intValue());
}
}
}
use of com.google.common.util.concurrent.ThreadFactoryBuilder in project dropwizard by dropwizard.
the class LifecycleEnvironmentTest method scheduledExecutorServiceThreadFactory.
@Test
public void scheduledExecutorServiceThreadFactory() throws ExecutionException, InterruptedException {
final String expectedName = "DropWizard ThreadFactory Test";
final String expectedNamePattern = expectedName + "-%d";
final ThreadFactory tfactory = (new ThreadFactoryBuilder()).setDaemon(false).setNameFormat(expectedNamePattern).build();
final ScheduledExecutorService executorService = environment.scheduledExecutorService("DropWizard Service", tfactory).build();
final Future<Boolean> isFactoryInUse = executorService.submit(() -> Thread.currentThread().getName().startsWith(expectedName));
assertThat(isFactoryInUse.get()).isTrue();
}
Aggregations