Search in sources :

Example 71 with LinkedBlockingQueue

use of java.util.concurrent.LinkedBlockingQueue in project netty by netty.

the class SingleThreadEventLoopTest method testScheduleTaskAtFixedRate.

private static void testScheduleTaskAtFixedRate(EventLoop loopA) throws InterruptedException {
    final Queue<Long> timestamps = new LinkedBlockingQueue<Long>();
    final int expectedTimeStamps = 5;
    final CountDownLatch allTimeStampsLatch = new CountDownLatch(expectedTimeStamps);
    ScheduledFuture<?> f = loopA.scheduleAtFixedRate(new Runnable() {

        @Override
        public void run() {
            timestamps.add(System.nanoTime());
            try {
                Thread.sleep(50);
            } catch (InterruptedException e) {
            // Ignore
            }
            allTimeStampsLatch.countDown();
        }
    }, 100, 100, TimeUnit.MILLISECONDS);
    allTimeStampsLatch.await();
    assertTrue(f.cancel(true));
    Thread.sleep(300);
    assertEquals(expectedTimeStamps, timestamps.size());
    // Check if the task was run without a lag.
    Long firstTimestamp = null;
    int cnt = 0;
    for (Long t : timestamps) {
        if (firstTimestamp == null) {
            firstTimestamp = t;
            continue;
        }
        long timepoint = t - firstTimestamp;
        assertThat(timepoint, is(greaterThanOrEqualTo(TimeUnit.MILLISECONDS.toNanos(100 * cnt + 80))));
        assertThat(timepoint, is(lessThan(TimeUnit.MILLISECONDS.toNanos(100 * (cnt + 1) + 20))));
        cnt++;
    }
}
Also used : AtomicLong(java.util.concurrent.atomic.AtomicLong) LinkedBlockingQueue(java.util.concurrent.LinkedBlockingQueue) CountDownLatch(java.util.concurrent.CountDownLatch)

Example 72 with LinkedBlockingQueue

use of java.util.concurrent.LinkedBlockingQueue in project netty by netty.

the class SingleThreadEventLoopTest method testScheduleTaskWithFixedDelay.

private static void testScheduleTaskWithFixedDelay(EventLoop loopA) throws InterruptedException {
    final Queue<Long> timestamps = new LinkedBlockingQueue<Long>();
    final int expectedTimeStamps = 3;
    final CountDownLatch allTimeStampsLatch = new CountDownLatch(expectedTimeStamps);
    ScheduledFuture<?> f = loopA.scheduleWithFixedDelay(new Runnable() {

        @Override
        public void run() {
            timestamps.add(System.nanoTime());
            try {
                Thread.sleep(51);
            } catch (InterruptedException e) {
            // Ignore
            }
            allTimeStampsLatch.countDown();
        }
    }, 100, 100, TimeUnit.MILLISECONDS);
    allTimeStampsLatch.await();
    assertTrue(f.cancel(true));
    Thread.sleep(300);
    assertEquals(expectedTimeStamps, timestamps.size());
    // Check if the task was run without a lag.
    Long previousTimestamp = null;
    for (Long t : timestamps) {
        if (previousTimestamp == null) {
            previousTimestamp = t;
            continue;
        }
        assertThat(t.longValue() - previousTimestamp.longValue(), is(greaterThanOrEqualTo(TimeUnit.MILLISECONDS.toNanos(150))));
        previousTimestamp = t;
    }
}
Also used : AtomicLong(java.util.concurrent.atomic.AtomicLong) LinkedBlockingQueue(java.util.concurrent.LinkedBlockingQueue) CountDownLatch(java.util.concurrent.CountDownLatch)

Example 73 with LinkedBlockingQueue

use of java.util.concurrent.LinkedBlockingQueue in project druid by druid-io.

the class DirectDruidClient method run.

@Override
public Sequence<T> run(final Query<T> query, final Map<String, Object> context) {
    QueryToolChest<T, Query<T>> toolChest = warehouse.getToolChest(query);
    boolean isBySegment = BaseQuery.getContextBySegment(query, false);
    Pair<JavaType, JavaType> types = typesMap.get(query.getClass());
    if (types == null) {
        final TypeFactory typeFactory = objectMapper.getTypeFactory();
        JavaType baseType = typeFactory.constructType(toolChest.getResultTypeReference());
        JavaType bySegmentType = typeFactory.constructParametricType(Result.class, typeFactory.constructParametricType(BySegmentResultValueClass.class, baseType));
        types = Pair.of(baseType, bySegmentType);
        typesMap.put(query.getClass(), types);
    }
    final JavaType typeRef;
    if (isBySegment) {
        typeRef = types.rhs;
    } else {
        typeRef = types.lhs;
    }
    final ListenableFuture<InputStream> future;
    final String url = String.format("http://%s/druid/v2/", host);
    final String cancelUrl = String.format("http://%s/druid/v2/%s", host, query.getId());
    try {
        log.debug("Querying queryId[%s] url[%s]", query.getId(), url);
        final long requestStartTime = System.currentTimeMillis();
        final ServiceMetricEvent.Builder builder = toolChest.makeMetricBuilder(query);
        builder.setDimension("server", host);
        final HttpResponseHandler<InputStream, InputStream> responseHandler = new HttpResponseHandler<InputStream, InputStream>() {

            private long responseStartTime;

            private final AtomicLong byteCount = new AtomicLong(0);

            private final BlockingQueue<InputStream> queue = new LinkedBlockingQueue<>();

            private final AtomicBoolean done = new AtomicBoolean(false);

            @Override
            public ClientResponse<InputStream> handleResponse(HttpResponse response) {
                log.debug("Initial response from url[%s] for queryId[%s]", url, query.getId());
                responseStartTime = System.currentTimeMillis();
                emitter.emit(builder.build("query/node/ttfb", responseStartTime - requestStartTime));
                try {
                    final String responseContext = response.headers().get("X-Druid-Response-Context");
                    // context may be null in case of error or query timeout
                    if (responseContext != null) {
                        context.putAll(objectMapper.<Map<String, Object>>readValue(responseContext, new TypeReference<Map<String, Object>>() {
                        }));
                    }
                    queue.put(new ChannelBufferInputStream(response.getContent()));
                } catch (final IOException e) {
                    log.error(e, "Error parsing response context from url [%s]", url);
                    return ClientResponse.<InputStream>finished(new InputStream() {

                        @Override
                        public int read() throws IOException {
                            throw e;
                        }
                    });
                } catch (InterruptedException e) {
                    log.error(e, "Queue appending interrupted");
                    Thread.currentThread().interrupt();
                    throw Throwables.propagate(e);
                }
                byteCount.addAndGet(response.getContent().readableBytes());
                return ClientResponse.<InputStream>finished(new SequenceInputStream(new Enumeration<InputStream>() {

                    @Override
                    public boolean hasMoreElements() {
                        // Then the stream should be spouting good InputStreams.
                        synchronized (done) {
                            return !done.get() || !queue.isEmpty();
                        }
                    }

                    @Override
                    public InputStream nextElement() {
                        try {
                            return queue.take();
                        } catch (InterruptedException e) {
                            Thread.currentThread().interrupt();
                            throw Throwables.propagate(e);
                        }
                    }
                }));
            }

            @Override
            public ClientResponse<InputStream> handleChunk(ClientResponse<InputStream> clientResponse, HttpChunk chunk) {
                final ChannelBuffer channelBuffer = chunk.getContent();
                final int bytes = channelBuffer.readableBytes();
                if (bytes > 0) {
                    try {
                        queue.put(new ChannelBufferInputStream(channelBuffer));
                    } catch (InterruptedException e) {
                        log.error(e, "Unable to put finalizing input stream into Sequence queue for url [%s]", url);
                        Thread.currentThread().interrupt();
                        throw Throwables.propagate(e);
                    }
                    byteCount.addAndGet(bytes);
                }
                return clientResponse;
            }

            @Override
            public ClientResponse<InputStream> done(ClientResponse<InputStream> clientResponse) {
                long stopTime = System.currentTimeMillis();
                log.debug("Completed queryId[%s] request to url[%s] with %,d bytes returned in %,d millis [%,f b/s].", query.getId(), url, byteCount.get(), stopTime - responseStartTime, byteCount.get() / (0.0001 * (stopTime - responseStartTime)));
                emitter.emit(builder.build("query/node/time", stopTime - requestStartTime));
                emitter.emit(builder.build("query/node/bytes", byteCount.get()));
                synchronized (done) {
                    try {
                        // An empty byte array is put at the end to give the SequenceInputStream.close() as something to close out
                        // after done is set to true, regardless of the rest of the stream's state.
                        queue.put(ByteSource.empty().openStream());
                    } catch (InterruptedException e) {
                        log.error(e, "Unable to put finalizing input stream into Sequence queue for url [%s]", url);
                        Thread.currentThread().interrupt();
                        throw Throwables.propagate(e);
                    } catch (IOException e) {
                        // This should never happen
                        throw Throwables.propagate(e);
                    } finally {
                        done.set(true);
                    }
                }
                return ClientResponse.<InputStream>finished(clientResponse.getObj());
            }

            @Override
            public void exceptionCaught(final ClientResponse<InputStream> clientResponse, final Throwable e) {
                // Don't wait for lock in case the lock had something to do with the error
                synchronized (done) {
                    done.set(true);
                    // Make a best effort to put a zero length buffer into the queue in case something is waiting on the take()
                    // If nothing is waiting on take(), this will be closed out anyways.
                    queue.offer(new InputStream() {

                        @Override
                        public int read() throws IOException {
                            throw new IOException(e);
                        }
                    });
                }
            }
        };
        future = httpClient.go(new Request(HttpMethod.POST, new URL(url)).setContent(objectMapper.writeValueAsBytes(query)).setHeader(HttpHeaders.Names.CONTENT_TYPE, isSmile ? SmileMediaTypes.APPLICATION_JACKSON_SMILE : MediaType.APPLICATION_JSON), responseHandler);
        queryWatcher.registerQuery(query, future);
        openConnections.getAndIncrement();
        Futures.addCallback(future, new FutureCallback<InputStream>() {

            @Override
            public void onSuccess(InputStream result) {
                openConnections.getAndDecrement();
            }

            @Override
            public void onFailure(Throwable t) {
                openConnections.getAndDecrement();
                if (future.isCancelled()) {
                    // forward the cancellation to underlying queriable node
                    try {
                        StatusResponseHolder res = httpClient.go(new Request(HttpMethod.DELETE, new URL(cancelUrl)).setContent(objectMapper.writeValueAsBytes(query)).setHeader(HttpHeaders.Names.CONTENT_TYPE, isSmile ? SmileMediaTypes.APPLICATION_JACKSON_SMILE : MediaType.APPLICATION_JSON), new StatusResponseHandler(Charsets.UTF_8)).get();
                        if (res.getStatus().getCode() >= 500) {
                            throw new RE("Error cancelling query[%s]: queriable node returned status[%d] [%s].", res.getStatus().getCode(), res.getStatus().getReasonPhrase());
                        }
                    } catch (IOException | ExecutionException | InterruptedException e) {
                        Throwables.propagate(e);
                    }
                }
            }
        });
    } catch (IOException e) {
        throw Throwables.propagate(e);
    }
    Sequence<T> retVal = new BaseSequence<>(new BaseSequence.IteratorMaker<T, JsonParserIterator<T>>() {

        @Override
        public JsonParserIterator<T> make() {
            return new JsonParserIterator<T>(typeRef, future, url);
        }

        @Override
        public void cleanup(JsonParserIterator<T> iterFromMake) {
            CloseQuietly.close(iterFromMake);
        }
    });
    // avoid the cost of de-serializing and then re-serializing again when adding to cache
    if (!isBySegment) {
        retVal = Sequences.map(retVal, toolChest.makePreComputeManipulatorFn(query, MetricManipulatorFns.deserializing()));
    }
    return retVal;
}
Also used : ClientResponse(com.metamx.http.client.response.ClientResponse) BaseQuery(io.druid.query.BaseQuery) Query(io.druid.query.Query) QueryInterruptedException(io.druid.query.QueryInterruptedException) URL(java.net.URL) ChannelBuffer(org.jboss.netty.buffer.ChannelBuffer) StatusResponseHolder(com.metamx.http.client.response.StatusResponseHolder) TypeReference(com.fasterxml.jackson.core.type.TypeReference) ChannelBufferInputStream(org.jboss.netty.buffer.ChannelBufferInputStream) BlockingQueue(java.util.concurrent.BlockingQueue) LinkedBlockingQueue(java.util.concurrent.LinkedBlockingQueue) Enumeration(java.util.Enumeration) ChannelBufferInputStream(org.jboss.netty.buffer.ChannelBufferInputStream) SequenceInputStream(java.io.SequenceInputStream) InputStream(java.io.InputStream) Request(com.metamx.http.client.Request) BySegmentResultValueClass(io.druid.query.BySegmentResultValueClass) HttpResponse(org.jboss.netty.handler.codec.http.HttpResponse) IOException(java.io.IOException) BaseSequence(io.druid.java.util.common.guava.BaseSequence) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) JavaType(com.fasterxml.jackson.databind.JavaType) AtomicLong(java.util.concurrent.atomic.AtomicLong) RE(io.druid.java.util.common.RE) SequenceInputStream(java.io.SequenceInputStream) ServiceMetricEvent(com.metamx.emitter.service.ServiceMetricEvent) StatusResponseHandler(com.metamx.http.client.response.StatusResponseHandler) TypeFactory(com.fasterxml.jackson.databind.type.TypeFactory) HttpResponseHandler(com.metamx.http.client.response.HttpResponseHandler) HttpChunk(org.jboss.netty.handler.codec.http.HttpChunk)

Example 74 with LinkedBlockingQueue

use of java.util.concurrent.LinkedBlockingQueue in project druid by druid-io.

the class IrcFirehoseFactory method connect.

@Override
public Firehose connect(final IrcInputRowParser firehoseParser) throws IOException {
    final IRCApi irc = new IRCApiImpl(false);
    final LinkedBlockingQueue<Pair<DateTime, ChannelPrivMsg>> queue = new LinkedBlockingQueue<Pair<DateTime, ChannelPrivMsg>>();
    irc.addListener(new VariousMessageListenerAdapter() {

        @Override
        public void onChannelMessage(ChannelPrivMsg aMsg) {
            try {
                queue.put(Pair.of(DateTime.now(), aMsg));
            } catch (InterruptedException e) {
                throw new RuntimeException("interrupted adding message to queue", e);
            }
        }
    });
    log.info("connecting to irc server [%s]", host);
    irc.connect(new IServerParameters() {

        @Override
        public String getNickname() {
            return nick;
        }

        @Override
        public List<String> getAlternativeNicknames() {
            return Lists.newArrayList(nick + UUID.randomUUID(), nick + UUID.randomUUID(), nick + UUID.randomUUID());
        }

        @Override
        public String getIdent() {
            return "druid";
        }

        @Override
        public String getRealname() {
            return nick;
        }

        @Override
        public IRCServer getServer() {
            return new IRCServer(host, false);
        }
    }, new Callback<IIRCState>() {

        @Override
        public void onSuccess(IIRCState aObject) {
            log.info("irc connection to server [%s] established", host);
            for (String chan : channels) {
                log.info("Joining channel %s", chan);
                irc.joinChannel(chan);
            }
        }

        @Override
        public void onFailure(Exception e) {
            log.error(e, "Unable to connect to irc server [%s]", host);
            throw new RuntimeException("Unable to connect to server", e);
        }
    });
    closed = false;
    return new Firehose() {

        InputRow nextRow = null;

        @Override
        public boolean hasMore() {
            try {
                while (true) {
                    Pair<DateTime, ChannelPrivMsg> nextMsg = queue.poll(100, TimeUnit.MILLISECONDS);
                    if (closed) {
                        return false;
                    }
                    if (nextMsg == null) {
                        continue;
                    }
                    try {
                        nextRow = firehoseParser.parse(nextMsg);
                        if (nextRow != null) {
                            return true;
                        }
                    } catch (IllegalArgumentException iae) {
                        log.debug("ignoring invalid message in channel [%s]", nextMsg.rhs.getChannelName());
                    }
                }
            } catch (InterruptedException e) {
                Thread.interrupted();
                throw new RuntimeException("interrupted retrieving elements from queue", e);
            }
        }

        @Override
        public InputRow nextRow() {
            return nextRow;
        }

        @Override
        public Runnable commit() {
            return new Runnable() {

                @Override
                public void run() {
                // nothing to see here
                }
            };
        }

        @Override
        public void close() throws IOException {
            try {
                log.info("disconnecting from irc server [%s]", host);
                irc.disconnect("");
            } finally {
                closed = true;
            }
        }
    };
}
Also used : VariousMessageListenerAdapter(com.ircclouds.irc.api.listeners.VariousMessageListenerAdapter) Firehose(io.druid.data.input.Firehose) IRCApiImpl(com.ircclouds.irc.api.IRCApiImpl) LinkedBlockingQueue(java.util.concurrent.LinkedBlockingQueue) DateTime(org.joda.time.DateTime) ChannelPrivMsg(com.ircclouds.irc.api.domain.messages.ChannelPrivMsg) IOException(java.io.IOException) IRCServer(com.ircclouds.irc.api.domain.IRCServer) IRCApi(com.ircclouds.irc.api.IRCApi) IIRCState(com.ircclouds.irc.api.state.IIRCState) InputRow(io.druid.data.input.InputRow) IServerParameters(com.ircclouds.irc.api.IServerParameters) List(java.util.List) Pair(io.druid.java.util.common.Pair)

Example 75 with LinkedBlockingQueue

use of java.util.concurrent.LinkedBlockingQueue in project hadoop by apache.

the class TestCallQueueManager method testSchedulerWithoutFCQ.

@Test
public void testSchedulerWithoutFCQ() throws InterruptedException {
    Configuration conf = new Configuration();
    // Test DecayedRpcScheduler without FCQ
    // Ensure the default LinkedBlockingQueue can work with DecayedRpcScheduler
    final String ns = CommonConfigurationKeys.IPC_NAMESPACE + ".0";
    final String schedulerClassName = "org.apache.hadoop.ipc.DecayRpcScheduler";
    conf.setStrings(ns + "." + CommonConfigurationKeys.IPC_SCHEDULER_IMPL_KEY, schedulerClassName);
    Class<? extends BlockingQueue<FakeCall>> queue = (Class<? extends BlockingQueue<FakeCall>>) getQueueClass(ns, conf);
    assertTrue(queue.getCanonicalName().equals("java.util.concurrent." + "LinkedBlockingQueue"));
    manager = new CallQueueManager<FakeCall>(queue, Server.getSchedulerClass(ns, conf), false, 3, "", conf);
    // LinkedBlockingQueue with a capacity of 3 can put 3 calls
    assertCanPut(manager, 3, 3);
    // LinkedBlockingQueue with a capacity of 3 can't put 1 more call
    assertCanPut(manager, 0, 1);
}
Also used : BlockingQueue(java.util.concurrent.BlockingQueue) LinkedBlockingQueue(java.util.concurrent.LinkedBlockingQueue) Configuration(org.apache.hadoop.conf.Configuration) Test(org.junit.Test)

Aggregations

LinkedBlockingQueue (java.util.concurrent.LinkedBlockingQueue)259 Test (org.junit.Test)91 ThreadPoolExecutor (java.util.concurrent.ThreadPoolExecutor)64 IOException (java.io.IOException)26 ArrayList (java.util.ArrayList)23 Emitter (io.socket.emitter.Emitter)19 JSONObject (org.json.JSONObject)19 CountDownLatch (java.util.concurrent.CountDownLatch)18 ThreadFactory (java.util.concurrent.ThreadFactory)16 ExecutorService (java.util.concurrent.ExecutorService)14 BlockingQueue (java.util.concurrent.BlockingQueue)13 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)13 List (java.util.List)12 URI (java.net.URI)11 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)11 Intent (android.content.Intent)9 HashMap (java.util.HashMap)9 ThreadFactoryBuilder (com.google.common.util.concurrent.ThreadFactoryBuilder)8 Map (java.util.Map)8 UUID (java.util.UUID)8