use of org.graylog.shaded.elasticsearch7.org.apache.http.HttpHost in project elasticsearch by elastic.
the class ElasticsearchHostsSniffer method readHost.
private static HttpHost readHost(String nodeId, JsonParser parser, Scheme scheme) throws IOException {
HttpHost httpHost = null;
String fieldName = null;
while (parser.nextToken() != JsonToken.END_OBJECT) {
if (parser.getCurrentToken() == JsonToken.FIELD_NAME) {
fieldName = parser.getCurrentName();
} else if (parser.getCurrentToken() == JsonToken.START_OBJECT) {
if ("http".equals(fieldName)) {
while (parser.nextToken() != JsonToken.END_OBJECT) {
if (parser.getCurrentToken() == JsonToken.VALUE_STRING && "publish_address".equals(parser.getCurrentName())) {
URI boundAddressAsURI = URI.create(scheme + "://" + parser.getValueAsString());
httpHost = new HttpHost(boundAddressAsURI.getHost(), boundAddressAsURI.getPort(), boundAddressAsURI.getScheme());
} else if (parser.getCurrentToken() == JsonToken.START_OBJECT) {
parser.skipChildren();
}
}
} else {
parser.skipChildren();
}
}
}
//http section is not present if http is not enabled on the node, ignore such nodes
if (httpHost == null) {
logger.debug("skipping node [" + nodeId + "] with http disabled");
return null;
}
return httpHost;
}
use of org.graylog.shaded.elasticsearch7.org.apache.http.HttpHost in project elasticsearch by elastic.
the class ElasticsearchHostsSniffer method readHosts.
private List<HttpHost> readHosts(HttpEntity entity) throws IOException {
try (InputStream inputStream = entity.getContent()) {
JsonParser parser = jsonFactory.createParser(inputStream);
if (parser.nextToken() != JsonToken.START_OBJECT) {
throw new IOException("expected data to start with an object");
}
List<HttpHost> hosts = new ArrayList<>();
while (parser.nextToken() != JsonToken.END_OBJECT) {
if (parser.getCurrentToken() == JsonToken.START_OBJECT) {
if ("nodes".equals(parser.getCurrentName())) {
while (parser.nextToken() != JsonToken.END_OBJECT) {
JsonToken token = parser.nextToken();
assert token == JsonToken.START_OBJECT;
String nodeId = parser.getCurrentName();
HttpHost sniffedHost = readHost(nodeId, parser, this.scheme);
if (sniffedHost != null) {
logger.trace("adding node [" + nodeId + "]");
hosts.add(sniffedHost);
}
}
} else {
parser.skipChildren();
}
}
}
return hosts;
}
}
use of org.graylog.shaded.elasticsearch7.org.apache.http.HttpHost in project elasticsearch by elastic.
the class ElasticsearchHostsSnifferTests method testConstructorValidation.
public void testConstructorValidation() throws IOException {
try {
new ElasticsearchHostsSniffer(null, 1, ElasticsearchHostsSniffer.Scheme.HTTP);
fail("should have failed");
} catch (NullPointerException e) {
assertEquals("restClient cannot be null", e.getMessage());
}
HttpHost httpHost = new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort());
try (RestClient restClient = RestClient.builder(httpHost).build()) {
try {
new ElasticsearchHostsSniffer(restClient, 1, null);
fail("should have failed");
} catch (NullPointerException e) {
assertEquals(e.getMessage(), "scheme cannot be null");
}
try {
new ElasticsearchHostsSniffer(restClient, RandomNumbers.randomIntBetween(getRandom(), Integer.MIN_VALUE, 0), ElasticsearchHostsSniffer.Scheme.HTTP);
fail("should have failed");
} catch (IllegalArgumentException e) {
assertEquals(e.getMessage(), "sniffRequestTimeoutMillis must be greater than 0");
}
}
}
use of org.graylog.shaded.elasticsearch7.org.apache.http.HttpHost in project elasticsearch by elastic.
the class SnifferBuilderTests method testBuild.
public void testBuild() throws Exception {
int numNodes = RandomNumbers.randomIntBetween(getRandom(), 1, 5);
HttpHost[] hosts = new HttpHost[numNodes];
for (int i = 0; i < numNodes; i++) {
hosts[i] = new HttpHost("localhost", 9200 + i);
}
try (RestClient client = RestClient.builder(hosts).build()) {
try {
Sniffer.builder(null).build();
fail("should have failed");
} catch (NullPointerException e) {
assertEquals("restClient cannot be null", e.getMessage());
}
try {
Sniffer.builder(client).setSniffIntervalMillis(RandomNumbers.randomIntBetween(getRandom(), Integer.MIN_VALUE, 0));
fail("should have failed");
} catch (IllegalArgumentException e) {
assertEquals("sniffIntervalMillis must be greater than 0", e.getMessage());
}
try {
Sniffer.builder(client).setSniffAfterFailureDelayMillis(RandomNumbers.randomIntBetween(getRandom(), Integer.MIN_VALUE, 0));
fail("should have failed");
} catch (IllegalArgumentException e) {
assertEquals("sniffAfterFailureDelayMillis must be greater than 0", e.getMessage());
}
try {
Sniffer.builder(client).setHostsSniffer(null);
fail("should have failed");
} catch (NullPointerException e) {
assertEquals("hostsSniffer cannot be null", e.getMessage());
}
try (Sniffer sniffer = Sniffer.builder(client).build()) {
assertNotNull(sniffer);
}
SnifferBuilder builder = Sniffer.builder(client);
if (getRandom().nextBoolean()) {
builder.setSniffIntervalMillis(RandomNumbers.randomIntBetween(getRandom(), 1, Integer.MAX_VALUE));
}
if (getRandom().nextBoolean()) {
builder.setSniffAfterFailureDelayMillis(RandomNumbers.randomIntBetween(getRandom(), 1, Integer.MAX_VALUE));
}
if (getRandom().nextBoolean()) {
builder.setHostsSniffer(new MockHostsSniffer());
}
try (Sniffer sniffer = builder.build()) {
assertNotNull(sniffer);
}
}
}
use of org.graylog.shaded.elasticsearch7.org.apache.http.HttpHost in project elasticsearch by elastic.
the class ResponseExceptionTests method testResponseException.
public void testResponseException() throws IOException {
ProtocolVersion protocolVersion = new ProtocolVersion("http", 1, 1);
StatusLine statusLine = new BasicStatusLine(protocolVersion, 500, "Internal Server Error");
HttpResponse httpResponse = new BasicHttpResponse(statusLine);
String responseBody = "{\"error\":{\"root_cause\": {}}}";
boolean hasBody = getRandom().nextBoolean();
if (hasBody) {
HttpEntity entity;
if (getRandom().nextBoolean()) {
entity = new StringEntity(responseBody, ContentType.APPLICATION_JSON);
} else {
//test a non repeatable entity
entity = new InputStreamEntity(new ByteArrayInputStream(responseBody.getBytes(StandardCharsets.UTF_8)), ContentType.APPLICATION_JSON);
}
httpResponse.setEntity(entity);
}
RequestLine requestLine = new BasicRequestLine("GET", "/", protocolVersion);
HttpHost httpHost = new HttpHost("localhost", 9200);
Response response = new Response(requestLine, httpHost, httpResponse);
ResponseException responseException = new ResponseException(response);
assertSame(response, responseException.getResponse());
if (hasBody) {
assertEquals(responseBody, EntityUtils.toString(responseException.getResponse().getEntity()));
} else {
assertNull(responseException.getResponse().getEntity());
}
String message = response.getRequestLine().getMethod() + " " + response.getHost() + response.getRequestLine().getUri() + ": " + response.getStatusLine().toString();
if (hasBody) {
message += "\n" + responseBody;
}
assertEquals(message, responseException.getMessage());
}
Aggregations