use of org.apache.http.client.methods.HttpGet in project pinot by linkedin.
the class SegmentPushControllerAPIs method getAllSegments.
private List<String> getAllSegments(String tablename, String segmentName) throws IOException {
List<String> allSegments = new ArrayList<>();
HttpClient controllerClient = new DefaultHttpClient();
HttpGet req = new HttpGet(SEGMENTS_ENDPOINT + URLEncoder.encode(tablename, UTF_8));
HttpResponse res = controllerClient.execute(controllerHttpHost, req);
try {
if (res.getStatusLine().getStatusCode() != 200) {
throw new IllegalStateException(res.getStatusLine().toString());
}
InputStream content = res.getEntity().getContent();
String response = IOUtils.toString(content);
List<String> allSegmentsPaths = getSegmentsFromResponse(response);
for (String segment : allSegmentsPaths) {
allSegments.add(segment.substring(segment.lastIndexOf("/") + 1));
}
LOGGER.info("All segments : {}", allSegments);
} finally {
if (res.getEntity() != null) {
EntityUtils.consume(res.getEntity());
}
}
return allSegments;
}
use of org.apache.http.client.methods.HttpGet in project pinot by linkedin.
the class AutoLoadPinotMetricsUtils method getAllTablesFromPinot.
public JsonNode getAllTablesFromPinot() throws IOException {
HttpGet tablesReq = new HttpGet(PINOT_TABLES_ENDPOINT);
LOG.info("Retrieving datasets: {}", tablesReq);
CloseableHttpResponse tablesRes = pinotControllerClient.execute(pinotControllerHost, tablesReq);
JsonNode tables = null;
try {
if (tablesRes.getStatusLine().getStatusCode() != 200) {
throw new IllegalStateException(tablesRes.getStatusLine().toString());
}
InputStream tablesContent = tablesRes.getEntity().getContent();
tables = new ObjectMapper().readTree(tablesContent).get("tables");
} catch (Exception e) {
LOG.error("Exception in loading collections", e);
} finally {
if (tablesRes.getEntity() != null) {
EntityUtils.consume(tablesRes.getEntity());
}
tablesRes.close();
}
return tables;
}
use of org.apache.http.client.methods.HttpGet in project pinot by linkedin.
the class AutoLoadPinotMetricsUtils method getSchemaFromTableConfig.
private Schema getSchemaFromTableConfig(String dataset) throws IOException {
Schema schema = null;
HttpGet schemaReq = new HttpGet(String.format(PINOT_SCHEMA_ENDPOINT_TEMPLATE, URLEncoder.encode(dataset, UTF_8)));
LOG.info("Retrieving schema: {}", schemaReq);
CloseableHttpResponse schemaRes = pinotControllerClient.execute(pinotControllerHost, schemaReq);
try {
if (schemaRes.getStatusLine().getStatusCode() != 200) {
LOG.error("Schema {} not found, {}", dataset, schemaRes.getStatusLine().toString());
} else {
InputStream schemaContent = schemaRes.getEntity().getContent();
schema = new org.codehaus.jackson.map.ObjectMapper().readValue(schemaContent, Schema.class);
}
} catch (Exception e) {
LOG.error("Exception in retrieving schema collections, skipping {}", dataset);
} finally {
if (schemaRes.getEntity() != null) {
EntityUtils.consume(schemaRes.getEntity());
}
schemaRes.close();
}
return schema;
}
use of org.apache.http.client.methods.HttpGet in project pinot by linkedin.
the class AutoLoadPinotMetricsUtils method getSchemaFromSchemaEndpoint.
private Schema getSchemaFromSchemaEndpoint(String dataset) throws IOException {
Schema schema = null;
HttpGet schemaReq = new HttpGet(String.format(PINOT_SCHEMA_ENDPOINT, URLEncoder.encode(dataset, UTF_8)));
LOG.info("Retrieving schema: {}", schemaReq);
CloseableHttpResponse schemaRes = pinotControllerClient.execute(pinotControllerHost, schemaReq);
try {
if (schemaRes.getStatusLine().getStatusCode() != 200) {
LOG.error("Schema {} not found, {}", dataset, schemaRes.getStatusLine().toString());
} else {
InputStream schemaContent = schemaRes.getEntity().getContent();
schema = new org.codehaus.jackson.map.ObjectMapper().readValue(schemaContent, Schema.class);
}
} catch (Exception e) {
LOG.error("Exception in retrieving schema collections, skipping {}", dataset);
} finally {
if (schemaRes.getEntity() != null) {
EntityUtils.consume(schemaRes.getEntity());
}
schemaRes.close();
}
return schema;
}
use of org.apache.http.client.methods.HttpGet in project pinot by linkedin.
the class FetchMetricDataAndExistingAnomaliesTool method fetchMetric.
/**
* Fetch metric from thirdeye
* @param host host name (includes http://)
* @param port port number
* @param dataset dataset/collection name
* @param metric metric name
* @param startTime start time of requested data in DateTime
* @param endTime end time of requested data in DateTime
* @param timeGranularity the time granularity
* @param dimensions the list of dimensions
* @param filterJson filters, in JSON
* @return {dimension-> {DateTime: value}}
* @throws IOException
*/
public Map<String, Map<Long, String>> fetchMetric(String host, int port, String dataset, String metric, DateTime startTime, DateTime endTime, TimeGranularity timeGranularity, String dimensions, String filterJson, String timezone) throws IOException {
HttpClient client = HttpClientBuilder.create().build();
DateTimeZone dateTimeZone = DateTimeZone.forID(timezone);
startTime = new DateTime(startTime, dateTimeZone);
endTime = new DateTime(endTime, dateTimeZone);
// format http GET command
StringBuilder urlBuilder = new StringBuilder(host + ":" + port + DEFAULT_PATH_TO_TIMESERIES);
urlBuilder.append(DATASET + EQUALS + dataset + AND);
urlBuilder.append(METRIC + EQUALS + metric + AND);
urlBuilder.append(VIEW + EQUALS + DEFAULT_VIEW + AND);
urlBuilder.append(TIME_START + EQUALS + Long.toString(startTime.getMillis()) + AND);
urlBuilder.append(TIME_END + EQUALS + Long.toString(endTime.getMillis()) + AND);
urlBuilder.append(GRANULARITY + EQUALS + timeGranularity.toString() + AND);
if (dimensions != null || !dimensions.isEmpty()) {
urlBuilder.append(DIMENSIONS + EQUALS + dimensions + AND);
}
if (filterJson != null || !filterJson.isEmpty()) {
urlBuilder.append(FILTERS + EQUALS + URLEncoder.encode(filterJson, "UTF-8"));
}
HttpGet httpGet = new HttpGet(urlBuilder.toString());
// Execute GET command
httpGet.addHeader("User-Agent", "User");
HttpResponse response = client.execute(httpGet);
LOG.info("Response Code : {}", response.getStatusLine().getStatusCode());
BufferedReader rd = new BufferedReader(new InputStreamReader(response.getEntity().getContent()));
StringBuffer content = new StringBuffer();
String line = "";
while ((line = rd.readLine()) != null) {
content.append(line);
}
Map<String, Map<Long, String>> resultMap = null;
try {
JSONObject jsonObject = new JSONObject(content.toString());
JSONObject timeSeriesData = (JSONObject) jsonObject.get("timeSeriesData");
JSONArray timeArray = (JSONArray) timeSeriesData.get("time");
resultMap = new HashMap<>();
Iterator<String> timeSeriesDataIterator = timeSeriesData.keys();
while (timeSeriesDataIterator.hasNext()) {
String key = timeSeriesDataIterator.next();
if (key.equalsIgnoreCase("time")) {
continue;
}
Map<Long, String> entry = new HashMap<>();
JSONArray observed = (JSONArray) timeSeriesData.get(key);
for (int i = 0; i < timeArray.length(); i++) {
long timestamp = (long) timeArray.get(i);
String observedValue = observed.get(i).toString();
entry.put(timestamp, observedValue);
}
resultMap.put(key, entry);
}
} catch (JSONException e) {
LOG.error("Unable to resolve JSON string {}", e);
}
return resultMap;
}
Aggregations