use of org.graylog.shaded.elasticsearch7.org.apache.http.HttpResponse in project pinot by linkedin.
the class SegmentPushControllerAPIs method deleteSegment.
private boolean deleteSegment(String tablename, String segmentName) throws IOException {
boolean deleteSuccessful = false;
HttpClient controllerClient = new DefaultHttpClient();
HttpGet req = new HttpGet(TABLES_ENDPOINT + URLEncoder.encode(tablename, UTF_8) + "/" + SEGMENTS_ENDPOINT + URLEncoder.encode(segmentName, UTF_8) + DROP_PARAMETERS);
HttpResponse res = controllerClient.execute(controllerHttpHost, req);
try {
if (res == null || res.getStatusLine() == null || res.getStatusLine().getStatusCode() != 200 || !isDeleteSuccessful(tablename, segmentName)) {
LOGGER.info("Exception in deleting segment, trying again {}", res);
} else {
deleteSuccessful = true;
}
} finally {
if (res.getEntity() != null) {
EntityUtils.consume(res.getEntity());
}
}
return deleteSuccessful;
}
use of org.graylog.shaded.elasticsearch7.org.apache.http.HttpResponse in project pinot by linkedin.
the class SegmentPushControllerAPIs method getAllSegments.
private List<String> getAllSegments(String tablename, String segmentName) throws IOException {
List<String> allSegments = new ArrayList<>();
HttpClient controllerClient = new DefaultHttpClient();
HttpGet req = new HttpGet(SEGMENTS_ENDPOINT + URLEncoder.encode(tablename, UTF_8));
HttpResponse res = controllerClient.execute(controllerHttpHost, req);
try {
if (res.getStatusLine().getStatusCode() != 200) {
throw new IllegalStateException(res.getStatusLine().toString());
}
InputStream content = res.getEntity().getContent();
String response = IOUtils.toString(content);
List<String> allSegmentsPaths = getSegmentsFromResponse(response);
for (String segment : allSegmentsPaths) {
allSegments.add(segment.substring(segment.lastIndexOf("/") + 1));
}
LOGGER.info("All segments : {}", allSegments);
} finally {
if (res.getEntity() != null) {
EntityUtils.consume(res.getEntity());
}
}
return allSegments;
}
use of org.graylog.shaded.elasticsearch7.org.apache.http.HttpResponse in project pinot by linkedin.
the class AbstractResourceHttpUtils method callJobEndpoint.
protected String callJobEndpoint(HttpRequest req) throws IOException {
HttpClient controllerClient = new DefaultHttpClient();
HttpResponse res = controllerClient.execute(resourceHttpHost, req);
String response = null;
try {
if (res.getStatusLine().getStatusCode() != 200) {
throw new IllegalStateException(res.getStatusLine().toString());
}
InputStream content = res.getEntity().getContent();
response = IOUtils.toString(content);
} finally {
if (res.getEntity() != null) {
EntityUtils.consume(res.getEntity());
}
}
return response;
}
use of org.graylog.shaded.elasticsearch7.org.apache.http.HttpResponse in project pinot by linkedin.
the class FetchMetricDataAndExistingAnomaliesTool method fetchMetric.
/**
* Fetch metric from thirdeye
* @param host host name (includes http://)
* @param port port number
* @param dataset dataset/collection name
* @param metric metric name
* @param startTime start time of requested data in DateTime
* @param endTime end time of requested data in DateTime
* @param timeGranularity the time granularity
* @param dimensions the list of dimensions
* @param filterJson filters, in JSON
* @return {dimension-> {DateTime: value}}
* @throws IOException
*/
public Map<String, Map<Long, String>> fetchMetric(String host, int port, String dataset, String metric, DateTime startTime, DateTime endTime, TimeGranularity timeGranularity, String dimensions, String filterJson, String timezone) throws IOException {
HttpClient client = HttpClientBuilder.create().build();
DateTimeZone dateTimeZone = DateTimeZone.forID(timezone);
startTime = new DateTime(startTime, dateTimeZone);
endTime = new DateTime(endTime, dateTimeZone);
// format http GET command
StringBuilder urlBuilder = new StringBuilder(host + ":" + port + DEFAULT_PATH_TO_TIMESERIES);
urlBuilder.append(DATASET + EQUALS + dataset + AND);
urlBuilder.append(METRIC + EQUALS + metric + AND);
urlBuilder.append(VIEW + EQUALS + DEFAULT_VIEW + AND);
urlBuilder.append(TIME_START + EQUALS + Long.toString(startTime.getMillis()) + AND);
urlBuilder.append(TIME_END + EQUALS + Long.toString(endTime.getMillis()) + AND);
urlBuilder.append(GRANULARITY + EQUALS + timeGranularity.toString() + AND);
if (dimensions != null || !dimensions.isEmpty()) {
urlBuilder.append(DIMENSIONS + EQUALS + dimensions + AND);
}
if (filterJson != null || !filterJson.isEmpty()) {
urlBuilder.append(FILTERS + EQUALS + URLEncoder.encode(filterJson, "UTF-8"));
}
HttpGet httpGet = new HttpGet(urlBuilder.toString());
// Execute GET command
httpGet.addHeader("User-Agent", "User");
HttpResponse response = client.execute(httpGet);
LOG.info("Response Code : {}", response.getStatusLine().getStatusCode());
BufferedReader rd = new BufferedReader(new InputStreamReader(response.getEntity().getContent()));
StringBuffer content = new StringBuffer();
String line = "";
while ((line = rd.readLine()) != null) {
content.append(line);
}
Map<String, Map<Long, String>> resultMap = null;
try {
JSONObject jsonObject = new JSONObject(content.toString());
JSONObject timeSeriesData = (JSONObject) jsonObject.get("timeSeriesData");
JSONArray timeArray = (JSONArray) timeSeriesData.get("time");
resultMap = new HashMap<>();
Iterator<String> timeSeriesDataIterator = timeSeriesData.keys();
while (timeSeriesDataIterator.hasNext()) {
String key = timeSeriesDataIterator.next();
if (key.equalsIgnoreCase("time")) {
continue;
}
Map<Long, String> entry = new HashMap<>();
JSONArray observed = (JSONArray) timeSeriesData.get(key);
for (int i = 0; i < timeArray.length(); i++) {
long timestamp = (long) timeArray.get(i);
String observedValue = observed.get(i).toString();
entry.put(timestamp, observedValue);
}
resultMap.put(key, entry);
}
} catch (JSONException e) {
LOG.error("Unable to resolve JSON string {}", e);
}
return resultMap;
}
use of org.graylog.shaded.elasticsearch7.org.apache.http.HttpResponse in project hadoop by apache.
the class WebAppProxyServlet method proxyLink.
/**
* Download link and have it be the response.
* @param req the http request
* @param resp the http response
* @param link the link to download
* @param c the cookie to set if any
* @param proxyHost the proxy host
* @param method the http method
* @throws IOException on any error.
*/
private static void proxyLink(final HttpServletRequest req, final HttpServletResponse resp, final URI link, final Cookie c, final String proxyHost, final HTTP method) throws IOException {
DefaultHttpClient client = new DefaultHttpClient();
client.getParams().setParameter(ClientPNames.COOKIE_POLICY, CookiePolicy.BROWSER_COMPATIBILITY).setBooleanParameter(ClientPNames.ALLOW_CIRCULAR_REDIRECTS, true);
// Make sure we send the request from the proxy address in the config
// since that is what the AM filter checks against. IP aliasing or
// similar could cause issues otherwise.
InetAddress localAddress = InetAddress.getByName(proxyHost);
if (LOG.isDebugEnabled()) {
LOG.debug("local InetAddress for proxy host: {}", localAddress);
}
client.getParams().setParameter(ConnRoutePNames.LOCAL_ADDRESS, localAddress);
HttpRequestBase base = null;
if (method.equals(HTTP.GET)) {
base = new HttpGet(link);
} else if (method.equals(HTTP.PUT)) {
base = new HttpPut(link);
StringBuilder sb = new StringBuilder();
BufferedReader reader = new BufferedReader(new InputStreamReader(req.getInputStream(), "UTF-8"));
String line;
while ((line = reader.readLine()) != null) {
sb.append(line);
}
((HttpPut) base).setEntity(new StringEntity(sb.toString()));
} else {
resp.setStatus(HttpServletResponse.SC_METHOD_NOT_ALLOWED);
return;
}
@SuppressWarnings("unchecked") Enumeration<String> names = req.getHeaderNames();
while (names.hasMoreElements()) {
String name = names.nextElement();
if (PASS_THROUGH_HEADERS.contains(name)) {
String value = req.getHeader(name);
if (LOG.isDebugEnabled()) {
LOG.debug("REQ HEADER: {} : {}", name, value);
}
base.setHeader(name, value);
}
}
String user = req.getRemoteUser();
if (user != null && !user.isEmpty()) {
base.setHeader("Cookie", PROXY_USER_COOKIE_NAME + "=" + URLEncoder.encode(user, "ASCII"));
}
OutputStream out = resp.getOutputStream();
try {
HttpResponse httpResp = client.execute(base);
resp.setStatus(httpResp.getStatusLine().getStatusCode());
for (Header header : httpResp.getAllHeaders()) {
resp.setHeader(header.getName(), header.getValue());
}
if (c != null) {
resp.addCookie(c);
}
InputStream in = httpResp.getEntity().getContent();
if (in != null) {
IOUtils.copyBytes(in, out, 4096, true);
}
} finally {
base.releaseConnection();
}
}
Aggregations