Search in sources :

Example 86 with ExecutionException

use of java.util.concurrent.ExecutionException in project druid by druid-io.

the class ChainedExecutionQueryRunnerTest method testQueryTimeout.

@Test(timeout = 60000)
public void testQueryTimeout() throws Exception {
    ExecutorService exec = PrioritizedExecutorService.create(new Lifecycle(), new DruidProcessingConfig() {

        @Override
        public String getFormatString() {
            return "test";
        }

        @Override
        public int getNumThreads() {
            return 2;
        }
    });
    final CountDownLatch queriesStarted = new CountDownLatch(2);
    final CountDownLatch queriesInterrupted = new CountDownLatch(2);
    final CountDownLatch queryIsRegistered = new CountDownLatch(1);
    Capture<ListenableFuture> capturedFuture = new Capture<>();
    QueryWatcher watcher = EasyMock.createStrictMock(QueryWatcher.class);
    watcher.registerQuery(EasyMock.<Query>anyObject(), EasyMock.and(EasyMock.<ListenableFuture>anyObject(), EasyMock.capture(capturedFuture)));
    EasyMock.expectLastCall().andAnswer(new IAnswer<Void>() {

        @Override
        public Void answer() throws Throwable {
            queryIsRegistered.countDown();
            return null;
        }
    }).once();
    EasyMock.replay(watcher);
    ArrayBlockingQueue<DyingQueryRunner> interrupted = new ArrayBlockingQueue<>(3);
    Set<DyingQueryRunner> runners = Sets.newHashSet(new DyingQueryRunner(queriesStarted, queriesInterrupted, interrupted), new DyingQueryRunner(queriesStarted, queriesInterrupted, interrupted), new DyingQueryRunner(queriesStarted, queriesInterrupted, interrupted));
    ChainedExecutionQueryRunner chainedRunner = new ChainedExecutionQueryRunner<>(exec, watcher, Lists.<QueryRunner<Integer>>newArrayList(runners));
    HashMap<String, Object> context = new HashMap<String, Object>();
    final Sequence seq = chainedRunner.run(Druids.newTimeseriesQueryBuilder().dataSource("test").intervals("2014/2015").aggregators(Lists.<AggregatorFactory>newArrayList(new CountAggregatorFactory("count"))).context(ImmutableMap.<String, Object>of(QueryContextKeys.TIMEOUT, 100, "queryId", "test")).build(), context);
    Future resultFuture = Executors.newFixedThreadPool(1).submit(new Runnable() {

        @Override
        public void run() {
            Sequences.toList(seq, Lists.newArrayList());
        }
    });
    // wait for query to register and start
    queryIsRegistered.await();
    queriesStarted.await();
    Assert.assertTrue(capturedFuture.hasCaptured());
    ListenableFuture future = capturedFuture.getValue();
    // wait for query to time out
    QueryInterruptedException cause = null;
    try {
        resultFuture.get();
    } catch (ExecutionException e) {
        Assert.assertTrue(e.getCause() instanceof QueryInterruptedException);
        Assert.assertEquals("Query timeout", ((QueryInterruptedException) e.getCause()).getErrorCode());
        cause = (QueryInterruptedException) e.getCause();
    }
    queriesInterrupted.await();
    Assert.assertNotNull(cause);
    Assert.assertTrue(future.isCancelled());
    DyingQueryRunner interrupted1 = interrupted.poll();
    synchronized (interrupted1) {
        Assert.assertTrue("runner 1 started", interrupted1.hasStarted);
        Assert.assertTrue("runner 1 interrupted", interrupted1.interrupted);
    }
    DyingQueryRunner interrupted2 = interrupted.poll();
    synchronized (interrupted2) {
        Assert.assertTrue("runner 2 started", interrupted2.hasStarted);
        Assert.assertTrue("runner 2 interrupted", interrupted2.interrupted);
    }
    runners.remove(interrupted1);
    runners.remove(interrupted2);
    DyingQueryRunner remainingRunner = runners.iterator().next();
    synchronized (remainingRunner) {
        Assert.assertTrue("runner 3 should be interrupted or not have started", !remainingRunner.hasStarted || remainingRunner.interrupted);
    }
    Assert.assertFalse("runner 1 not completed", interrupted1.hasCompleted);
    Assert.assertFalse("runner 2 not completed", interrupted2.hasCompleted);
    Assert.assertFalse("runner 3 not completed", remainingRunner.hasCompleted);
    EasyMock.verify(watcher);
}
Also used : HashMap(java.util.HashMap) Capture(org.easymock.Capture) ArrayBlockingQueue(java.util.concurrent.ArrayBlockingQueue) ExecutionException(java.util.concurrent.ExecutionException) Lifecycle(io.druid.java.util.common.lifecycle.Lifecycle) Sequence(io.druid.java.util.common.guava.Sequence) CountDownLatch(java.util.concurrent.CountDownLatch) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) IAnswer(org.easymock.IAnswer) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) ExecutorService(java.util.concurrent.ExecutorService) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) Future(java.util.concurrent.Future) Test(org.junit.Test)

Example 87 with ExecutionException

use of java.util.concurrent.ExecutionException in project druid by druid-io.

the class KafkaLookupExtractorFactory method start.

@Override
public boolean start() {
    synchronized (started) {
        if (started.get()) {
            LOG.warn("Already started, not starting again");
            return started.get();
        }
        if (executorService.isShutdown()) {
            LOG.warn("Already shut down, not starting again");
            return false;
        }
        final Properties kafkaProperties = new Properties();
        kafkaProperties.putAll(getKafkaProperties());
        if (kafkaProperties.containsKey("group.id")) {
            throw new IAE("Cannot set kafka property [group.id]. Property is randomly generated for you. Found [%s]", kafkaProperties.getProperty("group.id"));
        }
        if (kafkaProperties.containsKey("auto.offset.reset")) {
            throw new IAE("Cannot set kafka property [auto.offset.reset]. Property will be forced to [smallest]. Found [%s]", kafkaProperties.getProperty("auto.offset.reset"));
        }
        Preconditions.checkNotNull(kafkaProperties.getProperty("zookeeper.connect"), "zookeeper.connect required property");
        kafkaProperties.setProperty("group.id", factoryId);
        final String topic = getKafkaTopic();
        LOG.debug("About to listen to topic [%s] with group.id [%s]", topic, factoryId);
        cacheHandler = cacheManager.createCache();
        final Map<String, String> map = cacheHandler.getCache();
        mapRef.set(map);
        // Enable publish-subscribe
        kafkaProperties.setProperty("auto.offset.reset", "smallest");
        final CountDownLatch startingReads = new CountDownLatch(1);
        final ListenableFuture<?> future = executorService.submit(new Runnable() {

            @Override
            public void run() {
                while (!executorService.isShutdown()) {
                    consumerConnector = buildConnector(kafkaProperties);
                    try {
                        if (executorService.isShutdown()) {
                            break;
                        }
                        final List<KafkaStream<String, String>> streams = consumerConnector.createMessageStreamsByFilter(new Whitelist(Pattern.quote(topic)), 1, DEFAULT_STRING_DECODER, DEFAULT_STRING_DECODER);
                        if (streams == null || streams.isEmpty()) {
                            throw new IAE("Topic [%s] had no streams", topic);
                        }
                        if (streams.size() > 1) {
                            throw new ISE("Topic [%s] has %d streams! expected 1", topic, streams.size());
                        }
                        final KafkaStream<String, String> kafkaStream = streams.get(0);
                        startingReads.countDown();
                        for (final MessageAndMetadata<String, String> messageAndMetadata : kafkaStream) {
                            final String key = messageAndMetadata.key();
                            final String message = messageAndMetadata.message();
                            if (key == null || message == null) {
                                LOG.error("Bad key/message from topic [%s]: [%s]", topic, messageAndMetadata);
                                continue;
                            }
                            doubleEventCount.incrementAndGet();
                            map.put(key, message);
                            doubleEventCount.incrementAndGet();
                            LOG.trace("Placed key[%s] val[%s]", key, message);
                        }
                    } catch (Exception e) {
                        LOG.error(e, "Error reading stream for topic [%s]", topic);
                    } finally {
                        consumerConnector.shutdown();
                    }
                }
            }
        });
        Futures.addCallback(future, new FutureCallback<Object>() {

            @Override
            public void onSuccess(Object result) {
                LOG.debug("Success listening to [%s]", topic);
            }

            @Override
            public void onFailure(Throwable t) {
                if (t instanceof CancellationException) {
                    LOG.debug("Topic [%s] cancelled", topic);
                } else {
                    LOG.error(t, "Error in listening to [%s]", topic);
                }
            }
        }, MoreExecutors.sameThreadExecutor());
        this.future = future;
        final Stopwatch stopwatch = Stopwatch.createStarted();
        try {
            while (!startingReads.await(100, TimeUnit.MILLISECONDS) && connectTimeout > 0L) {
                // Don't return until we have actually connected
                if (future.isDone()) {
                    future.get();
                } else {
                    if (stopwatch.elapsed(TimeUnit.MILLISECONDS) > connectTimeout) {
                        throw new TimeoutException("Failed to connect to kafka in sufficient time");
                    }
                }
            }
        } catch (InterruptedException | ExecutionException | TimeoutException e) {
            executorService.shutdown();
            if (!future.isDone() && !future.cancel(false)) {
                LOG.warn("Could not cancel kafka listening thread");
            }
            LOG.error(e, "Failed to start kafka extraction factory");
            cacheHandler.close();
            return false;
        }
        started.set(true);
        return true;
    }
}
Also used : MessageAndMetadata(kafka.message.MessageAndMetadata) Stopwatch(com.google.common.base.Stopwatch) KafkaStream(kafka.consumer.KafkaStream) Properties(java.util.Properties) IAE(io.druid.java.util.common.IAE) CountDownLatch(java.util.concurrent.CountDownLatch) TimeoutException(java.util.concurrent.TimeoutException) CancellationException(java.util.concurrent.CancellationException) ExecutionException(java.util.concurrent.ExecutionException) CancellationException(java.util.concurrent.CancellationException) Whitelist(kafka.consumer.Whitelist) List(java.util.List) ISE(io.druid.java.util.common.ISE) ExecutionException(java.util.concurrent.ExecutionException) TimeoutException(java.util.concurrent.TimeoutException)

Example 88 with ExecutionException

use of java.util.concurrent.ExecutionException in project druid by druid-io.

the class OffHeapLoadingCache method get.

@Override
public V get(K key, final Callable<? extends V> valueLoader) throws ExecutionException {
    synchronized (key) {
        V value = cache.get(key);
        if (value != null) {
            return value;
        }
        try {
            value = valueLoader.call();
            cache.put(key, value);
            return value;
        } catch (Exception e) {
            throw new ISE(e, "got an exception while loading key [%s]", key);
        }
    }
}
Also used : ISE(io.druid.java.util.common.ISE) ExecutionException(java.util.concurrent.ExecutionException)

Example 89 with ExecutionException

use of java.util.concurrent.ExecutionException in project druid by druid-io.

the class LoadingCacheTest method testInvalidateAll.

@Test
public void testInvalidateAll() throws ExecutionException {
    loadingCache.get("key2", new Callable() {

        @Override
        public Object call() throws Exception {
            return "value2";
        }
    });
    Assert.assertEquals("value2", loadingCache.getIfPresent("key2"));
    loadingCache.invalidateAll(Lists.newArrayList("key2"));
    Assert.assertEquals(null, loadingCache.getIfPresent("key2"));
}
Also used : Callable(java.util.concurrent.Callable) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) Test(org.junit.Test)

Example 90 with ExecutionException

use of java.util.concurrent.ExecutionException in project futuroid by clemp6r.

the class AsyncTest method shouldReturnImmediateFailure.

@Test
public void shouldReturnImmediateFailure() {
    Exception exception = new Exception("an exception");
    Future<Object> future = Async.immediateFail(exception);
    assertTrue(future.isDone());
    try {
        future.get();
        fail();
    } catch (InterruptedException e) {
        fail();
    } catch (ExecutionException e) {
        assertEquals(exception, e.getCause());
    }
}
Also used : ExecutionException(java.util.concurrent.ExecutionException) ExecutionException(java.util.concurrent.ExecutionException) Test(org.junit.Test)

Aggregations

ExecutionException (java.util.concurrent.ExecutionException)1341 IOException (java.io.IOException)367 Test (org.junit.Test)335 TimeoutException (java.util.concurrent.TimeoutException)258 ArrayList (java.util.ArrayList)237 Future (java.util.concurrent.Future)218 ExecutorService (java.util.concurrent.ExecutorService)152 CountDownLatch (java.util.concurrent.CountDownLatch)103 List (java.util.List)98 CancellationException (java.util.concurrent.CancellationException)98 Callable (java.util.concurrent.Callable)97 Test (org.testng.annotations.Test)78 HashMap (java.util.HashMap)69 Map (java.util.Map)65 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)64 RejectedExecutionException (java.util.concurrent.RejectedExecutionException)63 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)56 ParallelTest (com.hazelcast.test.annotation.ParallelTest)47 QuickTest (com.hazelcast.test.annotation.QuickTest)47 UncheckedExecutionException (com.google.common.util.concurrent.UncheckedExecutionException)46