use of org.graylog.shaded.elasticsearch7.org.apache.http.HttpResponse in project openhab1-addons by openhab.
the class IhcHttpsClient method sendQ.
private String sendQ(String query, int timeout) throws ClientProtocolException, IOException, NoHttpResponseException {
logger.trace("Send query (timeout={}): {}", timeout, query);
postReq.setEntity(new StringEntity(query, "UTF-8"));
postReq.addHeader("content-type", "text/xml");
final RequestConfig params = RequestConfig.custom().setConnectTimeout(connectTimeout).setSocketTimeout(timeout).build();
postReq.setConfig(params);
// Execute POST
HttpResponse response = client.execute(postReq, IhcConnectionPool.getInstance().getHttpContext());
String resp = EntityUtils.toString(response.getEntity());
logger.trace("Received response: {}", resp);
return resp;
}
use of org.graylog.shaded.elasticsearch7.org.apache.http.HttpResponse in project Xponents by OpenSextant.
the class DefaultWebCrawl method collectItemsOnPage.
/**
* Internal method for parsing and harvesting from a single page and then crawling deeper, if instructed to do so.
*
* @param pageContent raw HTML
* @param url url for HTML
* @param site top level url for site
*/
protected void collectItemsOnPage(String pageContent, URL url, URL site) {
Collection<HyperLink> items = parseContentPage(pageContent, url, site);
/* 2. Collect items on this page.
*
*/
for (HyperLink l : items) {
if (filterOut(l)) {
continue;
}
if (this.isAllowCurrentSiteOnly() && !(l.isCurrentSite() || l.isCurrentHost())) {
// Page represented by link, l, is on another website.
log.debug("Not on current site: {}", l);
continue;
}
if (this.isAllowCurrentDirOnly() && !l.isCurrentPage()) {
// Page represented by link, l, is on another directory on same or site.
log.debug("Not on current directory: {}", l);
continue;
}
/* TODO: fix "key", as it represents not just path, but unique URLs
* different URLs with same path would collide.
* TODO: in general fix the ability to crawl off requested site.
* If that is really needed, this is not the crawling capability you want.
*
*/
String key = l.getNormalPath();
if (key == null) {
key = l.getAbsoluteURL();
}
if (found.containsKey(key)) {
// We already did this.
continue;
}
if (userFilteredOut(key)) {
// We don't want to do this.
log.debug("Filtered Out by User: {}", key);
continue;
}
found.put(key, l);
if (saved.contains(l.getId())) {
// ignore.
continue;
}
// Download artifacts
if (l.isFile() || l.isWebPage()) {
pause();
log.info("Pulling page {}", l);
try {
//
try {
if (listener != null && listener.exists(l.getId())) {
// You already collected this. So it will be ignored.
continue;
}
} catch (Exception err1) {
log.error("Collection Listener error", err1);
continue;
}
// create URL for link and download artifact.
HttpResponse itemPage = getPage(l.getURL());
// the relative path.
if (itemPage.getStatusLine().getStatusCode() >= 400) {
this.errorPages.add(l.getAbsoluteURL());
log.error("Failing on this request, HTTP status>=400, LINK={}", l.getURL());
continue;
}
/*
* Identify the correct type of file this item is, from HTTP headers & MIME, not just the link
*/
Header contentType = itemPage.getEntity().getContentType();
if (contentType != null) {
l.setMIMEType(contentType.getValue());
}
/*
* Create a non-trivial path for the item.
*
*/
String fpath = l.getNormalPath();
if (l.isDynamic()) {
if (!fpath.endsWith(".html")) {
fpath = fpath + ".html";
}
}
File itemSaved = createArchiveFile(fpath, false);
File dir = new File(itemSaved.getParentFile().getAbsolutePath());
FileUtility.makeDirectory(dir);
l.setFilepath(itemSaved);
// CACHE the identify of this URL.
saved.add(l.getId());
WebClient.downloadFile(itemPage.getEntity(), itemSaved.getAbsolutePath());
convertContent(itemSaved, l);
//
if (l.isWebPage() && depth <= MAX_DEPTH) {
collectItems(l.getAbsoluteURL(), site);
}
} catch (Exception fileErr) {
log.error("Item for URL {} was not saved due to a net or IO issue.", l.getAbsoluteURL(), fileErr);
}
}
}
--depth;
}
use of org.graylog.shaded.elasticsearch7.org.apache.http.HttpResponse in project Xponents by OpenSextant.
the class WebClient method getPage.
/**
* Get a web page that requires NTLM authentication.
*
* @param siteURL
* URL
* @return response for the URL
* @throws IOException
* on error
*/
public HttpResponse getPage(URL siteURL) throws IOException {
HttpClient httpClient = getClient();
HttpGet httpget = new HttpGet();
try {
URI address = siteURL.toURI();
httpget.setURI(address);
HttpResponse response = httpClient.execute(httpget);
if (response.getStatusLine().getStatusCode() == 404) {
throw new IOException("HTTP Page " + siteURL + " not found");
}
return response;
} catch (URISyntaxException ioerr) {
throw new IOException(ioerr);
}
}
use of org.graylog.shaded.elasticsearch7.org.apache.http.HttpResponse in project android_frameworks_base by ResurrectionRemix.
the class AbstractProxyTest method testConnectToHttps.
public void testConnectToHttps() throws Exception {
TestSSLContext testSSLContext = TestSSLContext.create();
server.useHttps(testSSLContext.serverContext.getSocketFactory(), false);
server.enqueue(new MockResponse().setResponseCode(200).setBody("this response comes via HTTPS"));
server.play();
HttpClient httpClient = newHttpClient();
SSLSocketFactory sslSocketFactory = newSslSocketFactory(testSSLContext);
sslSocketFactory.setHostnameVerifier(new AllowAllHostnameVerifier());
httpClient.getConnectionManager().getSchemeRegistry().register(new Scheme("https", sslSocketFactory, server.getPort()));
HttpResponse response = httpClient.execute(new HttpGet("https://localhost:" + server.getPort() + "/foo"));
assertEquals("this response comes via HTTPS", contentToString(response));
RecordedRequest request = server.takeRequest();
assertEquals("GET /foo HTTP/1.1", request.getRequestLine());
}
use of org.graylog.shaded.elasticsearch7.org.apache.http.HttpResponse in project android_frameworks_base by ResurrectionRemix.
the class AbstractProxyTest method testConnectViaHttpProxyToHttps.
private void testConnectViaHttpProxyToHttps(ProxyConfig proxyConfig) throws Exception {
TestSSLContext testSSLContext = TestSSLContext.create();
server.useHttps(testSSLContext.serverContext.getSocketFactory(), true);
server.enqueue(new MockResponse().setSocketPolicy(SocketPolicy.UPGRADE_TO_SSL_AT_END).clearHeaders());
server.enqueue(new MockResponse().setResponseCode(200).setBody("this response comes via a secure proxy"));
server.play();
HttpClient httpProxyClient = newHttpClient();
SSLSocketFactory sslSocketFactory = newSslSocketFactory(testSSLContext);
sslSocketFactory.setHostnameVerifier(new AllowAllHostnameVerifier());
httpProxyClient.getConnectionManager().getSchemeRegistry().register(new Scheme("https", sslSocketFactory, 443));
HttpGet request = new HttpGet("https://android.com/foo");
proxyConfig.configure(server, httpProxyClient, request);
HttpResponse response = httpProxyClient.execute(request);
assertEquals("this response comes via a secure proxy", contentToString(response));
RecordedRequest connect = server.takeRequest();
assertEquals("Connect line failure on proxy " + proxyConfig, "CONNECT android.com:443 HTTP/1.1", connect.getRequestLine());
assertContains(connect.getHeaders(), "Host: android.com");
RecordedRequest get = server.takeRequest();
assertEquals("GET /foo HTTP/1.1", get.getRequestLine());
assertContains(get.getHeaders(), "Host: android.com");
}
Aggregations