Search in sources :

Example 1 with ParameterizedMessage

use of org.apache.logging.log4j.message.ParameterizedMessage in project elasticsearch by elastic.

the class RemoteScrollableHitSource method execute.

private <T> void execute(String method, String uri, Map<String, String> params, HttpEntity entity, BiFunction<XContentParser, XContentType, T> parser, Consumer<? super T> listener) {
    // Preserve the thread context so headers survive after the call
    java.util.function.Supplier<ThreadContext.StoredContext> contextSupplier = threadPool.getThreadContext().newRestorableContext(true);
    class RetryHelper extends AbstractRunnable {

        private final Iterator<TimeValue> retries = backoffPolicy.iterator();

        @Override
        protected void doRun() throws Exception {
            client.performRequestAsync(method, uri, params, entity, new ResponseListener() {

                @Override
                public void onSuccess(org.elasticsearch.client.Response response) {
                    // Restore the thread context to get the precious headers
                    try (ThreadContext.StoredContext ctx = contextSupplier.get()) {
                        // eliminates compiler warning
                        assert ctx != null;
                        T parsedResponse;
                        try {
                            HttpEntity responseEntity = response.getEntity();
                            InputStream content = responseEntity.getContent();
                            XContentType xContentType = null;
                            if (responseEntity.getContentType() != null) {
                                final String mimeType = ContentType.parse(responseEntity.getContentType().getValue()).getMimeType();
                                xContentType = XContentType.fromMediaType(mimeType);
                            }
                            if (xContentType == null) {
                                try {
                                    throw new ElasticsearchException("Response didn't include Content-Type: " + bodyMessage(response.getEntity()));
                                } catch (IOException e) {
                                    ElasticsearchException ee = new ElasticsearchException("Error extracting body from response");
                                    ee.addSuppressed(e);
                                    throw ee;
                                }
                            }
                            // EMPTY is safe here because we don't call namedObject
                            try (XContentParser xContentParser = xContentType.xContent().createParser(NamedXContentRegistry.EMPTY, content)) {
                                parsedResponse = parser.apply(xContentParser, xContentType);
                            } catch (ParsingException e) {
                                /* Because we're streaming the response we can't get a copy of it here. The best we can do is hint that it
                                 * is totally wrong and we're probably not talking to Elasticsearch. */
                                throw new ElasticsearchException("Error parsing the response, remote is likely not an Elasticsearch instance", e);
                            }
                        } catch (IOException e) {
                            throw new ElasticsearchException("Error deserializing response, remote is likely not an Elasticsearch instance", e);
                        }
                        listener.accept(parsedResponse);
                    }
                }

                @Override
                public void onFailure(Exception e) {
                    try (ThreadContext.StoredContext ctx = contextSupplier.get()) {
                        // eliminates compiler warning
                        assert ctx != null;
                        if (e instanceof ResponseException) {
                            ResponseException re = (ResponseException) e;
                            if (RestStatus.TOO_MANY_REQUESTS.getStatus() == re.getResponse().getStatusLine().getStatusCode()) {
                                if (retries.hasNext()) {
                                    TimeValue delay = retries.next();
                                    logger.trace((Supplier<?>) () -> new ParameterizedMessage("retrying rejected search after [{}]", delay), e);
                                    countSearchRetry.run();
                                    threadPool.schedule(delay, ThreadPool.Names.SAME, RetryHelper.this);
                                    return;
                                }
                            }
                            e = wrapExceptionToPreserveStatus(re.getResponse().getStatusLine().getStatusCode(), re.getResponse().getEntity(), re);
                        } else if (e instanceof ContentTooLongException) {
                            e = new IllegalArgumentException("Remote responded with a chunk that was too large. Use a smaller batch size.", e);
                        }
                        fail.accept(e);
                    }
                }
            });
        }

        @Override
        public void onFailure(Exception t) {
            fail.accept(t);
        }
    }
    new RetryHelper().run();
}
Also used : AbstractRunnable(org.elasticsearch.common.util.concurrent.AbstractRunnable) HttpEntity(org.apache.http.HttpEntity) ResponseException(org.elasticsearch.client.ResponseException) ElasticsearchException(org.elasticsearch.ElasticsearchException) XContentType(org.elasticsearch.common.xcontent.XContentType) ParsingException(org.elasticsearch.common.ParsingException) Iterator(java.util.Iterator) Supplier(org.apache.logging.log4j.util.Supplier) TimeValue(org.elasticsearch.common.unit.TimeValue) InputStream(java.io.InputStream) ContentTooLongException(org.apache.http.ContentTooLongException) ResponseListener(org.elasticsearch.client.ResponseListener) IOException(java.io.IOException) ElasticsearchException(org.elasticsearch.ElasticsearchException) ResponseException(org.elasticsearch.client.ResponseException) ContentTooLongException(org.apache.http.ContentTooLongException) ParsingException(org.elasticsearch.common.ParsingException) ElasticsearchStatusException(org.elasticsearch.ElasticsearchStatusException) IOException(java.io.IOException) ParameterizedMessage(org.apache.logging.log4j.message.ParameterizedMessage) XContentParser(org.elasticsearch.common.xcontent.XContentParser)

Example 2 with ParameterizedMessage

use of org.apache.logging.log4j.message.ParameterizedMessage in project elasticsearch by elastic.

the class AwsEc2UnicastHostsProvider method fetchDynamicNodes.

protected List<DiscoveryNode> fetchDynamicNodes() {
    List<DiscoveryNode> discoNodes = new ArrayList<>();
    DescribeInstancesResult descInstances;
    try {
        // Query EC2 API based on AZ, instance state, and tag.
        // NOTE: we don't filter by security group during the describe instances request for two reasons:
        // 1. differences in VPCs require different parameters during query (ID vs Name)
        // 2. We want to use two different strategies: (all security groups vs. any security groups)
        descInstances = SocketAccess.doPrivileged(() -> client.describeInstances(buildDescribeInstancesRequest()));
    } catch (AmazonClientException e) {
        logger.info("Exception while retrieving instance list from AWS API: {}", e.getMessage());
        logger.debug("Full exception:", e);
        return discoNodes;
    }
    logger.trace("building dynamic unicast discovery nodes...");
    for (Reservation reservation : descInstances.getReservations()) {
        for (Instance instance : reservation.getInstances()) {
            // lets see if we can filter based on groups
            if (!groups.isEmpty()) {
                List<GroupIdentifier> instanceSecurityGroups = instance.getSecurityGroups();
                ArrayList<String> securityGroupNames = new ArrayList<String>();
                ArrayList<String> securityGroupIds = new ArrayList<String>();
                for (GroupIdentifier sg : instanceSecurityGroups) {
                    securityGroupNames.add(sg.getGroupName());
                    securityGroupIds.add(sg.getGroupId());
                }
                if (bindAnyGroup) {
                    // We check if we can find at least one group name or one group id in groups.
                    if (disjoint(securityGroupNames, groups) && disjoint(securityGroupIds, groups)) {
                        logger.trace("filtering out instance {} based on groups {}, not part of {}", instance.getInstanceId(), instanceSecurityGroups, groups);
                        // continue to the next instance
                        continue;
                    }
                } else {
                    // We need tp match all group names or group ids, otherwise we ignore this instance
                    if (!(securityGroupNames.containsAll(groups) || securityGroupIds.containsAll(groups))) {
                        logger.trace("filtering out instance {} based on groups {}, does not include all of {}", instance.getInstanceId(), instanceSecurityGroups, groups);
                        // continue to the next instance
                        continue;
                    }
                }
            }
            String address = null;
            if (hostType.equals(PRIVATE_DNS)) {
                address = instance.getPrivateDnsName();
            } else if (hostType.equals(PRIVATE_IP)) {
                address = instance.getPrivateIpAddress();
            } else if (hostType.equals(PUBLIC_DNS)) {
                address = instance.getPublicDnsName();
            } else if (hostType.equals(PUBLIC_IP)) {
                address = instance.getPublicIpAddress();
            } else if (hostType.startsWith(TAG_PREFIX)) {
                // Reading the node host from its metadata
                String tagName = hostType.substring(TAG_PREFIX.length());
                logger.debug("reading hostname from [{}] instance tag", tagName);
                List<Tag> tags = instance.getTags();
                for (Tag tag : tags) {
                    if (tag.getKey().equals(tagName)) {
                        address = tag.getValue();
                        logger.debug("using [{}] as the instance address", address);
                    }
                }
            } else {
                throw new IllegalArgumentException(hostType + " is unknown for discovery.ec2.host_type");
            }
            if (address != null) {
                try {
                    // we only limit to 1 port per address, makes no sense to ping 100 ports
                    TransportAddress[] addresses = transportService.addressesFromString(address, 1);
                    for (int i = 0; i < addresses.length; i++) {
                        logger.trace("adding {}, address {}, transport_address {}", instance.getInstanceId(), address, addresses[i]);
                        discoNodes.add(new DiscoveryNode(instance.getInstanceId(), "#cloud-" + instance.getInstanceId() + "-" + i, addresses[i], emptyMap(), emptySet(), Version.CURRENT.minimumCompatibilityVersion()));
                    }
                } catch (Exception e) {
                    final String finalAddress = address;
                    logger.warn((Supplier<?>) () -> new ParameterizedMessage("failed to add {}, address {}", instance.getInstanceId(), finalAddress), e);
                }
            } else {
                logger.trace("not adding {}, address is null, host_type {}", instance.getInstanceId(), hostType);
            }
        }
    }
    logger.debug("using dynamic discovery nodes {}", discoNodes);
    return discoNodes;
}
Also used : DiscoveryNode(org.elasticsearch.cluster.node.DiscoveryNode) Instance(com.amazonaws.services.ec2.model.Instance) TransportAddress(org.elasticsearch.common.transport.TransportAddress) AmazonClientException(com.amazonaws.AmazonClientException) ArrayList(java.util.ArrayList) Collections.disjoint(java.util.Collections.disjoint) AmazonClientException(com.amazonaws.AmazonClientException) GroupIdentifier(com.amazonaws.services.ec2.model.GroupIdentifier) DescribeInstancesResult(com.amazonaws.services.ec2.model.DescribeInstancesResult) Reservation(com.amazonaws.services.ec2.model.Reservation) Supplier(org.apache.logging.log4j.util.Supplier) ParameterizedMessage(org.apache.logging.log4j.message.ParameterizedMessage) Tag(com.amazonaws.services.ec2.model.Tag)

Example 3 with ParameterizedMessage

use of org.apache.logging.log4j.message.ParameterizedMessage in project elasticsearch by elastic.

the class FileBasedUnicastHostsProvider method buildDynamicNodes.

@Override
public List<DiscoveryNode> buildDynamicNodes() {
    List<String> hostsList;
    try (Stream<String> lines = Files.lines(unicastHostsFilePath)) {
        hostsList = // lines starting with `#` are comments
        lines.filter(line -> line.startsWith("#") == false).collect(Collectors.toList());
    } catch (FileNotFoundException | NoSuchFileException e) {
        logger.warn((Supplier<?>) () -> new ParameterizedMessage("[discovery-file] Failed to find unicast hosts file [{}]", unicastHostsFilePath), e);
        hostsList = Collections.emptyList();
    } catch (IOException e) {
        logger.warn((Supplier<?>) () -> new ParameterizedMessage("[discovery-file] Error reading unicast hosts file [{}]", unicastHostsFilePath), e);
        hostsList = Collections.emptyList();
    }
    final List<DiscoveryNode> discoNodes = new ArrayList<>();
    try {
        discoNodes.addAll(resolveHostsLists(executorService, logger, hostsList, 1, transportService, UNICAST_HOST_PREFIX, resolveTimeout));
    } catch (InterruptedException e) {
        throw new RuntimeException(e);
    }
    logger.debug("[discovery-file] Using dynamic discovery nodes {}", discoNodes);
    return discoNodes;
}
Also used : DiscoveryNode(org.elasticsearch.cluster.node.DiscoveryNode) FileNotFoundException(java.io.FileNotFoundException) NoSuchFileException(java.nio.file.NoSuchFileException) ArrayList(java.util.ArrayList) IOException(java.io.IOException) Supplier(org.apache.logging.log4j.util.Supplier) ParameterizedMessage(org.apache.logging.log4j.message.ParameterizedMessage)

Example 4 with ParameterizedMessage

use of org.apache.logging.log4j.message.ParameterizedMessage in project elasticsearch by elastic.

the class AbstractSimpleTransportTestCase method testConcurrentSendRespondAndDisconnect.

public void testConcurrentSendRespondAndDisconnect() throws BrokenBarrierException, InterruptedException {
    Set<Exception> sendingErrors = ConcurrentCollections.newConcurrentSet();
    Set<Exception> responseErrors = ConcurrentCollections.newConcurrentSet();
    serviceA.registerRequestHandler("test", TestRequest::new, randomBoolean() ? ThreadPool.Names.SAME : ThreadPool.Names.GENERIC, (request, channel) -> {
        try {
            channel.sendResponse(new TestResponse());
        } catch (Exception e) {
            logger.info("caught exception while responding", e);
            responseErrors.add(e);
        }
    });
    final TransportRequestHandler<TestRequest> ignoringRequestHandler = (request, channel) -> {
        try {
            channel.sendResponse(new TestResponse());
        } catch (Exception e) {
            // we don't really care what's going on B, we're testing through A
            logger.trace("caught exception while responding from node B", e);
        }
    };
    serviceB.registerRequestHandler("test", TestRequest::new, ThreadPool.Names.SAME, ignoringRequestHandler);
    int halfSenders = scaledRandomIntBetween(3, 10);
    final CyclicBarrier go = new CyclicBarrier(halfSenders * 2 + 1);
    final CountDownLatch done = new CountDownLatch(halfSenders * 2);
    for (int i = 0; i < halfSenders; i++) {
        // B senders just generated activity so serciveA can respond, we don't test what's going on there
        final int sender = i;
        threadPool.executor(ThreadPool.Names.GENERIC).execute(new AbstractRunnable() {

            @Override
            public void onFailure(Exception e) {
                logger.trace("caught exception while sending from B", e);
            }

            @Override
            protected void doRun() throws Exception {
                go.await();
                for (int iter = 0; iter < 10; iter++) {
                    PlainActionFuture<TestResponse> listener = new PlainActionFuture<>();
                    final String info = sender + "_B_" + iter;
                    serviceB.sendRequest(nodeA, "test", new TestRequest(info), new ActionListenerResponseHandler<>(listener, TestResponse::new));
                    try {
                        listener.actionGet();
                    } catch (Exception e) {
                        logger.trace((Supplier<?>) () -> new ParameterizedMessage("caught exception while sending to node {}", nodeA), e);
                    }
                }
            }

            @Override
            public void onAfter() {
                done.countDown();
            }
        });
    }
    for (int i = 0; i < halfSenders; i++) {
        final int sender = i;
        threadPool.executor(ThreadPool.Names.GENERIC).execute(new AbstractRunnable() {

            @Override
            public void onFailure(Exception e) {
                logger.error("unexpected error", e);
                sendingErrors.add(e);
            }

            @Override
            protected void doRun() throws Exception {
                go.await();
                for (int iter = 0; iter < 10; iter++) {
                    PlainActionFuture<TestResponse> listener = new PlainActionFuture<>();
                    final String info = sender + "_" + iter;
                    // capture now
                    final DiscoveryNode node = nodeB;
                    try {
                        serviceA.sendRequest(node, "test", new TestRequest(info), new ActionListenerResponseHandler<>(listener, TestResponse::new));
                        try {
                            listener.actionGet();
                        } catch (ConnectTransportException e) {
                        // ok!
                        } catch (Exception e) {
                            logger.error((Supplier<?>) () -> new ParameterizedMessage("caught exception while sending to node {}", node), e);
                            sendingErrors.add(e);
                        }
                    } catch (NodeNotConnectedException ex) {
                    // ok
                    }
                }
            }

            @Override
            public void onAfter() {
                done.countDown();
            }
        });
    }
    go.await();
    for (int i = 0; i <= 10; i++) {
        if (i % 3 == 0) {
            // simulate restart of nodeB
            serviceB.close();
            MockTransportService newService = buildService("TS_B_" + i, version1, null);
            newService.registerRequestHandler("test", TestRequest::new, ThreadPool.Names.SAME, ignoringRequestHandler);
            serviceB = newService;
            nodeB = newService.getLocalDiscoNode();
            serviceB.connectToNode(nodeA);
            serviceA.connectToNode(nodeB);
        } else if (serviceA.nodeConnected(nodeB)) {
            serviceA.disconnectFromNode(nodeB);
        } else {
            serviceA.connectToNode(nodeB);
        }
    }
    done.await();
    assertThat("found non connection errors while sending", sendingErrors, empty());
    assertThat("found non connection errors while responding", responseErrors, empty());
}
Also used : ElasticsearchException(org.elasticsearch.ElasticsearchException) StreamOutput(org.elasticsearch.common.io.stream.StreamOutput) Arrays(java.util.Arrays) BigArrays(org.elasticsearch.common.util.BigArrays) ConcurrentCollections(org.elasticsearch.common.util.concurrent.ConcurrentCollections) InetAddress(java.net.InetAddress) ServerSocket(java.net.ServerSocket) Settings(org.elasticsearch.common.settings.Settings) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) After(org.junit.After) Map(java.util.Map) ThreadPool(org.elasticsearch.threadpool.ThreadPool) CyclicBarrier(java.util.concurrent.CyclicBarrier) Matchers.notNullValue(org.hamcrest.Matchers.notNullValue) PlainActionFuture(org.elasticsearch.action.support.PlainActionFuture) Set(java.util.Set) InetSocketAddress(java.net.InetSocketAddress) Matchers.startsWith(org.hamcrest.Matchers.startsWith) UncheckedIOException(java.io.UncheckedIOException) Matchers.instanceOf(org.hamcrest.Matchers.instanceOf) CountDownLatch(java.util.concurrent.CountDownLatch) AbstractRunnable(org.elasticsearch.common.util.concurrent.AbstractRunnable) List(java.util.List) Version(org.elasticsearch.Version) TransportAddress(org.elasticsearch.common.transport.TransportAddress) Supplier(org.apache.logging.log4j.util.Supplier) Matchers.equalTo(org.hamcrest.Matchers.equalTo) Socket(java.net.Socket) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) ParameterizedMessage(org.apache.logging.log4j.message.ParameterizedMessage) AtomicReference(java.util.concurrent.atomic.AtomicReference) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) DiscoveryNode(org.elasticsearch.cluster.node.DiscoveryNode) NetworkService(org.elasticsearch.common.network.NetworkService) ActionListenerResponseHandler(org.elasticsearch.action.ActionListenerResponseHandler) NoneCircuitBreakerService(org.elasticsearch.indices.breaker.NoneCircuitBreakerService) NamedWriteableRegistry(org.elasticsearch.common.io.stream.NamedWriteableRegistry) TimeValue(org.elasticsearch.common.unit.TimeValue) Node(org.elasticsearch.node.Node) ESTestCase(org.elasticsearch.test.ESTestCase) MockTransportService(org.elasticsearch.test.transport.MockTransportService) Before(org.junit.Before) Collections.emptyMap(java.util.Collections.emptyMap) TestThreadPool(org.elasticsearch.threadpool.TestThreadPool) Matchers.empty(org.hamcrest.Matchers.empty) Collections.emptySet(java.util.Collections.emptySet) Semaphore(java.util.concurrent.Semaphore) IOUtils(org.apache.lucene.util.IOUtils) IOException(java.io.IOException) BrokenBarrierException(java.util.concurrent.BrokenBarrierException) MockServerSocket(org.elasticsearch.mocksocket.MockServerSocket) VersionUtils(org.elasticsearch.test.VersionUtils) CollectionUtil(org.apache.lucene.util.CollectionUtil) ExecutionException(java.util.concurrent.ExecutionException) TimeUnit(java.util.concurrent.TimeUnit) ExceptionsHelper(org.elasticsearch.ExceptionsHelper) ClusterSettings(org.elasticsearch.common.settings.ClusterSettings) Constants(org.apache.lucene.util.Constants) StreamInput(org.elasticsearch.common.io.stream.StreamInput) Collections(java.util.Collections) AbstractRunnable(org.elasticsearch.common.util.concurrent.AbstractRunnable) DiscoveryNode(org.elasticsearch.cluster.node.DiscoveryNode) MockTransportService(org.elasticsearch.test.transport.MockTransportService) CountDownLatch(java.util.concurrent.CountDownLatch) ElasticsearchException(org.elasticsearch.ElasticsearchException) UncheckedIOException(java.io.UncheckedIOException) IOException(java.io.IOException) BrokenBarrierException(java.util.concurrent.BrokenBarrierException) ExecutionException(java.util.concurrent.ExecutionException) CyclicBarrier(java.util.concurrent.CyclicBarrier) PlainActionFuture(org.elasticsearch.action.support.PlainActionFuture) ActionListenerResponseHandler(org.elasticsearch.action.ActionListenerResponseHandler) ParameterizedMessage(org.apache.logging.log4j.message.ParameterizedMessage)

Example 5 with ParameterizedMessage

use of org.apache.logging.log4j.message.ParameterizedMessage in project elasticsearch by elastic.

the class NodeJoinControllerTests method joinNodeAsync.

private SimpleFuture joinNodeAsync(final DiscoveryNode node) throws InterruptedException {
    final SimpleFuture future = new SimpleFuture("join of " + node + " (id [" + joinId.incrementAndGet() + "]");
    logger.debug("starting {}", future);
    // clone the node before submitting to simulate an incoming join, which is guaranteed to have a new
    // disco node object serialized off the network
    nodeJoinController.handleJoinRequest(cloneNode(node), new MembershipAction.JoinCallback() {

        @Override
        public void onSuccess() {
            logger.debug("{} completed", future);
            future.markAsDone();
        }

        @Override
        public void onFailure(Exception e) {
            logger.error((Supplier<?>) () -> new ParameterizedMessage("unexpected error for {}", future), e);
            future.markAsFailed(e);
        }
    });
    return future;
}
Also used : Supplier(org.apache.logging.log4j.util.Supplier) ParameterizedMessage(org.apache.logging.log4j.message.ParameterizedMessage) NotMasterException(org.elasticsearch.cluster.NotMasterException) BrokenBarrierException(java.util.concurrent.BrokenBarrierException) ExecutionException(java.util.concurrent.ExecutionException)

Aggregations

ParameterizedMessage (org.apache.logging.log4j.message.ParameterizedMessage)131 Supplier (org.apache.logging.log4j.util.Supplier)90 IOException (java.io.IOException)75 ElasticsearchException (org.elasticsearch.ElasticsearchException)38 ArrayList (java.util.ArrayList)28 DiscoveryNode (org.elasticsearch.cluster.node.DiscoveryNode)26 ClusterState (org.elasticsearch.cluster.ClusterState)25 HashMap (java.util.HashMap)16 TimeValue (org.elasticsearch.common.unit.TimeValue)14 TransportException (org.elasticsearch.transport.TransportException)14 List (java.util.List)13 Supplier (java.util.function.Supplier)13 Map (java.util.Map)12 CountDownLatch (java.util.concurrent.CountDownLatch)12 ExecutionException (java.util.concurrent.ExecutionException)12 Settings (org.elasticsearch.common.settings.Settings)12 EsRejectedExecutionException (org.elasticsearch.common.util.concurrent.EsRejectedExecutionException)12 AbstractRunnable (org.elasticsearch.common.util.concurrent.AbstractRunnable)11 Index (org.elasticsearch.index.Index)11 CopyOnWriteArrayList (java.util.concurrent.CopyOnWriteArrayList)10