use of org.apache.http.client.methods.HttpHead in project zm-mailbox by Zimbra.
the class TritonIncomingBlob method getRemoteSize.
@Override
protected long getRemoteSize() throws IOException {
outStream.flush();
HttpClient client = ZimbraHttpConnectionManager.getInternalHttpConnMgr().newHttpClient().build();
HttpHead head = new HttpHead(baseUrl + uploadUrl);
ZimbraLog.store.info("heading %s", head.getURI());
try {
head.addHeader(TritonHeaders.SERVER_TOKEN, serverToken.getToken());
HttpResponse httpResp = HttpClientUtil.executeMethod(client, head);
int statusCode = httpResp.getStatusLine().getStatusCode();
if (statusCode == HttpStatus.SC_OK) {
String contentLength = httpResp.getFirstHeader(TritonHeaders.CONTENT_LENGTH).getValue();
long remoteSize = -1;
try {
remoteSize = Long.valueOf(contentLength);
} catch (NumberFormatException nfe) {
throw new IOException("Content length can't be parsed to Long", nfe);
}
return remoteSize;
} else {
ZimbraLog.store.error("failed with code %d response: %s", statusCode, EntityUtils.toString(httpResp.getEntity()));
throw new IOException("unable to head blob " + statusCode + ":" + httpResp.getStatusLine().getReasonPhrase(), null);
}
} catch (HttpException e) {
throw new IOException("unexpected error during getremotesize() operation: " + e.getMessage());
} finally {
head.releaseConnection();
}
}
use of org.apache.http.client.methods.HttpHead in project ats-framework by Axway.
the class HttpClient method head.
/**
* Invoke the endpoint URL using a HTTP HEAD.
*
* @return The response
* @throws HttpException
*/
@PublicAtsApi
public HttpResponse head() throws HttpException {
final URI uri = constructURI();
HttpHead method = new HttpHead(uri);
log.info("We will run a HEAD request from " + uri);
return execute(method);
}
use of org.apache.http.client.methods.HttpHead in project syncany by syncany.
the class ApplicationLink method resolveLink.
private String resolveLink(String httpApplicationLink, int redirectCount) throws IllegalArgumentException, StorageException {
if (redirectCount >= LINK_HTTP_MAX_REDIRECT_COUNT) {
throw new IllegalArgumentException("Max. redirect count of " + LINK_HTTP_MAX_REDIRECT_COUNT + " for URL reached. Cannot find syncany:// link.");
}
try {
logger.log(Level.INFO, "- Retrieving HTTP HEAD for " + httpApplicationLink + " ...");
HttpHead headMethod = new HttpHead(httpApplicationLink);
HttpResponse httpResponse = createHttpClient().execute(headMethod);
// Find syncany:// link
Header locationHeader = httpResponse.getLastHeader("Location");
if (locationHeader == null) {
throw new Exception("Link does not redirect to a syncany:// link.");
}
String locationHeaderUrl = locationHeader.getValue();
Matcher locationHeaderMatcher = LINK_PATTERN.matcher(locationHeaderUrl);
boolean isApplicationLink = locationHeaderMatcher.find();
if (isApplicationLink) {
String applicationLink = locationHeaderMatcher.group(0);
logger.log(Level.INFO, "Resolved application link is: " + applicationLink);
return applicationLink;
} else {
return resolveLink(locationHeaderUrl, ++redirectCount);
}
} catch (StorageException | IllegalArgumentException e) {
throw e;
} catch (Exception e) {
throw new StorageException(e.getMessage(), e);
}
}
use of org.apache.http.client.methods.HttpHead in project ddf by codice.
the class HttpSolrClientFactory method createSolrCore.
public static void createSolrCore(String url, String coreName, String configFileName, CloseableHttpClient httpClient) throws IOException, SolrServerException {
try (CloseableHttpClient closeableHttpClient = httpClient;
HttpSolrClient client = (httpClient != null ? new HttpSolrClient.Builder(url).withHttpClient(closeableHttpClient).build() : new HttpSolrClient.Builder(url).build())) {
HttpResponse ping = client.getHttpClient().execute(new HttpHead(url));
if (ping != null && ping.getStatusLine().getStatusCode() == 200) {
ConfigurationFileProxy configProxy = new ConfigurationFileProxy(ConfigurationStore.getInstance());
configProxy.writeSolrConfiguration(coreName);
if (!solrCoreExists(client, coreName)) {
LOGGER.debug("Solr({}): Creating Solr core", coreName);
String configFile = StringUtils.defaultIfBlank(configFileName, DEFAULT_SOLRCONFIG_XML);
String solrDir;
if (AccessController.doPrivileged((PrivilegedAction<Boolean>) () -> System.getProperty(SOLR_DATA_DIR) != null)) {
solrDir = AccessController.doPrivileged((PrivilegedAction<String>) () -> System.getProperty(SOLR_DATA_DIR));
} else {
solrDir = Paths.get(AccessController.doPrivileged((PrivilegedAction<String>) () -> System.getProperty("karaf.home")), "data", "solr").toString();
}
String instanceDir = Paths.get(solrDir, coreName).toString();
String dataDir = Paths.get(instanceDir, "data").toString();
CoreAdminRequest.createCore(coreName, instanceDir, client, configFile, DEFAULT_SCHEMA_XML, dataDir, dataDir);
} else {
LOGGER.debug("Solr({}): Solr core already exists; reloading it", coreName);
CoreAdminRequest.reloadCore(coreName, client);
}
} else {
LOGGER.debug("Solr({}): Unable to ping Solr core at {}", coreName, url);
throw new SolrServerException("Unable to ping Solr core");
}
}
}
use of org.apache.http.client.methods.HttpHead in project PlayerHater by chrisrhoden.
the class PlaylistParser method parsePlaylist.
public static Uri[] parsePlaylist(Uri uri) {
try {
HttpClient httpclient = new DefaultHttpClient();
HttpResponse response = httpclient.execute(new HttpHead(uri.toString()));
Header contentType = response.getEntity().getContentType();
if (contentType != null) {
String mimeType = contentType.getValue().split(";")[0].trim();
for (String plsMimeType : PLS_MIME_TYPES) {
if (plsMimeType.equalsIgnoreCase(mimeType)) {
return parsePls(uri);
}
}
for (String m3uMimeType : M3U_MIME_TYPES) {
if (m3uMimeType.equalsIgnoreCase(mimeType)) {
return parseM3u(uri);
}
}
}
} catch (Exception e) {
}
return new Uri[] { uri };
}
Aggregations