use of org.apache.druid.java.util.http.client.Request in project hive by apache.
the class DruidStorageHandler method fetchKafkaIngestionSpec.
private KafkaSupervisorSpec fetchKafkaIngestionSpec(Table table) {
// Stop Kafka Ingestion first
final String overlordAddress = Preconditions.checkNotNull(HiveConf.getVar(getConf(), HiveConf.ConfVars.HIVE_DRUID_OVERLORD_DEFAULT_ADDRESS), "Druid Overlord Address is null");
String dataSourceName = Preconditions.checkNotNull(DruidStorageHandlerUtils.getTableProperty(table, Constants.DRUID_DATA_SOURCE), "Druid Datasource name is null");
try {
StringFullResponseHolder response = RetryUtils.retry(() -> DruidStorageHandlerUtils.getResponseFromCurrentLeader(getHttpClient(), new Request(HttpMethod.GET, new URL(String.format("http://%s/druid/indexer/v1/supervisor/%s", overlordAddress, dataSourceName))), new StringFullResponseHandler(Charset.forName("UTF-8"))), input -> input instanceof IOException, getMaxRetryCount());
if (response.getStatus().equals(HttpResponseStatus.OK)) {
return JSON_MAPPER.readValue(response.getContent(), KafkaSupervisorSpec.class);
// Druid Returns 400 Bad Request when not found.
} else if (response.getStatus().equals(HttpResponseStatus.NOT_FOUND) || response.getStatus().equals(HttpResponseStatus.BAD_REQUEST)) {
LOG.debug("No Kafka Supervisor found for datasource[%s]", dataSourceName);
return null;
} else {
throw new IOException(String.format("Unable to fetch Kafka Ingestion Spec from Druid status [%d] full response [%s]", response.getStatus().getCode(), response.getContent()));
}
} catch (Exception e) {
throw new RuntimeException("Exception while fetching kafka ingestion spec from druid", e);
}
}
use of org.apache.druid.java.util.http.client.Request in project druid by druid-io.
the class HttpShuffleClient method fetchSegmentFile.
@Override
public File fetchSegmentFile(File partitionDir, String supervisorTaskId, GenericPartitionLocation location) throws IOException {
// Create a local buffer since this class is not thread-safe.
// Note that this method can be called by different threads at the same time with ThreadingTaskRunner.
final byte[] buffer = new byte[BUFFER_SIZE];
final File zippedFile = new File(partitionDir, StringUtils.format("temp_%s", location.getSubTaskId()));
final URI uri = location.toIntermediaryDataServerURI(supervisorTaskId);
FileUtils.copyLarge(uri, u -> {
try {
return httpClient.go(new Request(HttpMethod.GET, u.toURL()), new InputStreamResponseHandler()).get();
} catch (InterruptedException | ExecutionException e) {
throw new IOException(e);
}
}, zippedFile, buffer, t -> t instanceof IOException, NUM_FETCH_RETRIES, StringUtils.format("Failed to fetch file[%s]", uri));
final File unzippedDir = new File(partitionDir, StringUtils.format("unzipped_%s", location.getSubTaskId()));
try {
FileUtils.mkdirp(unzippedDir);
CompressionUtils.unzip(zippedFile, unzippedDir);
} finally {
if (!zippedFile.delete()) {
LOG.warn("Failed to delete temp file[%s]", zippedFile);
}
}
return unzippedDir;
}
use of org.apache.druid.java.util.http.client.Request in project druid by druid-io.
the class CoordinatorResourceTestClient method initializeLookups.
public Map<String, Object> initializeLookups(String filePath) throws Exception {
String url = StringUtils.format("%slookups/config", getCoordinatorURL());
StatusResponseHolder response = httpClient.go(new Request(HttpMethod.POST, new URL(url)).setContent("application/json", jsonMapper.writeValueAsBytes(ImmutableMap.of())), responseHandler).get();
if (!response.getStatus().equals(HttpResponseStatus.ACCEPTED)) {
throw new ISE("Error while querying[%s] status[%s] content[%s]", url, response.getStatus(), response.getContent());
}
StatusResponseHolder response2 = httpClient.go(new Request(HttpMethod.POST, new URL(url)).setContent("application/json", jsonMapper.writeValueAsBytes(jsonMapper.readValue(CoordinatorResourceTestClient.class.getResourceAsStream(filePath), new TypeReference<Map<Object, Object>>() {
}))), responseHandler).get();
if (!response2.getStatus().equals(HttpResponseStatus.ACCEPTED)) {
throw new ISE("Error while querying[%s] status[%s] content[%s]", url, response2.getStatus(), response2.getContent());
}
Map<String, Object> results2 = jsonMapper.readValue(response.getContent(), new TypeReference<Map<String, Object>>() {
});
return results2;
}
use of org.apache.druid.java.util.http.client.Request in project druid by druid-io.
the class CoordinatorResourceTestClient method postLoadRules.
public void postLoadRules(String datasourceName, List<Rule> rules) throws Exception {
String url = StringUtils.format("%srules/%s", getCoordinatorURL(), datasourceName);
StatusResponseHolder response = httpClient.go(new Request(HttpMethod.POST, new URL(url)).setContent("application/json", jsonMapper.writeValueAsBytes(rules)), responseHandler).get();
if (!response.getStatus().equals(HttpResponseStatus.OK)) {
throw new ISE("Error while setting dynamic config[%s] status[%s] content[%s]", url, response.getStatus(), response.getContent());
}
}
use of org.apache.druid.java.util.http.client.Request in project druid by druid-io.
the class CoordinatorResourceTestClient method getLookupLoadStatus.
@Nullable
private Map<String, Map<HostAndPort, LookupsState<LookupExtractorFactoryMapContainer>>> getLookupLoadStatus() {
String url = StringUtils.format("%slookups/nodeStatus", getCoordinatorURL());
Map<String, Map<HostAndPort, LookupsState<LookupExtractorFactoryMapContainer>>> status;
try {
StatusResponseHolder response = httpClient.go(new Request(HttpMethod.GET, new URL(url)), responseHandler).get();
if (response.getStatus().getCode() == HttpResponseStatus.NOT_FOUND.getCode()) {
return null;
}
if (response.getStatus().getCode() != HttpResponseStatus.OK.getCode()) {
throw new ISE("Error while making request to url[%s] status[%s] content[%s]", url, response.getStatus(), response.getContent());
}
status = jsonMapper.readValue(response.getContent(), new TypeReference<Map<String, Map<HostAndPort, LookupsState<LookupExtractorFactoryMapContainer>>>>() {
});
} catch (Exception e) {
throw new RuntimeException(e);
}
return status;
}
Aggregations