use of java.net.HttpURLConnection in project hadoop by apache.
the class ByteRangeInputStream method openInputStream.
@VisibleForTesting
protected InputStreamAndFileLength openInputStream(long startOffset) throws IOException {
if (startOffset < 0) {
throw new EOFException("Negative Position");
}
// Use the original url if no resolved url exists, eg. if
// it's the first time a request is made.
final boolean resolved = resolvedURL.getURL() != null;
final URLOpener opener = resolved ? resolvedURL : originalURL;
final HttpURLConnection connection = opener.connect(startOffset, resolved);
resolvedURL.setURL(getResolvedUrl(connection));
InputStream in = connection.getInputStream();
final Long length;
final Map<String, List<String>> headers = connection.getHeaderFields();
if (isChunkedTransferEncoding(headers)) {
// file length is not known
length = null;
} else {
// for non-chunked transfer-encoding, get content-length
final String cl = connection.getHeaderField(HttpHeaders.CONTENT_LENGTH);
if (cl == null) {
throw new IOException(HttpHeaders.CONTENT_LENGTH + " is missing: " + headers);
}
final long streamlength = Long.parseLong(cl);
length = startOffset + streamlength;
// Java has a bug with >2GB request streams. It won't bounds check
// the reads so the transfer blocks until the server times out
in = new BoundedInputStream(in, streamlength);
}
return new InputStreamAndFileLength(length, in);
}
use of java.net.HttpURLConnection in project hadoop by apache.
the class TestWebHdfsCreatePermissions method testPermissions.
private void testPermissions(int expectedResponse, String expectedPermission, String path, String... params) throws Exception {
final String user = System.getProperty("user.name");
final StringBuilder uri = new StringBuilder(cluster.getHttpUri(0));
uri.append("/webhdfs/v1").append(path).append("?user.name=").append(user).append("&");
for (String param : params) {
uri.append(param).append("&");
}
LOG.info(uri.toString());
try {
URL url = new URL(uri.toString());
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("PUT");
Assert.assertEquals(expectedResponse, conn.getResponseCode());
NamenodeProtocols namenode = cluster.getNameNode().getRpcServer();
FsPermission resultingPermission = namenode.getFileInfo(path).getPermission();
Assert.assertEquals(expectedPermission, resultingPermission.toString());
} finally {
cluster.shutdown();
}
}
use of java.net.HttpURLConnection in project hadoop by apache.
the class TestHttpServerLogs method testLogsDisabled.
@Test
public void testLogsDisabled() throws Exception {
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeysPublic.HADOOP_HTTP_LOGS_ENABLED, false);
startServer(conf);
URL url = new URL(baseUrl + "/logs");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
assertEquals(HttpStatus.SC_NOT_FOUND, conn.getResponseCode());
}
use of java.net.HttpURLConnection in project hadoop by apache.
the class DFSZKFailoverController method getLocalNNThreadDump.
/**
* capture local NN's thread dump and write it to ZKFC's log.
*/
private void getLocalNNThreadDump() {
isThreadDumpCaptured = false;
// We use the same timeout value for both connection establishment
// timeout and read timeout.
int httpTimeOut = conf.getInt(DFSConfigKeys.DFS_HA_ZKFC_NN_HTTP_TIMEOUT_KEY, DFSConfigKeys.DFS_HA_ZKFC_NN_HTTP_TIMEOUT_KEY_DEFAULT);
if (httpTimeOut == 0) {
// If timeout value is set to zero, the feature is turned off.
return;
}
try {
String stacksUrl = DFSUtil.getInfoServer(localNNTarget.getAddress(), conf, DFSUtil.getHttpClientScheme(conf)) + "/stacks";
URL url = new URL(stacksUrl);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setReadTimeout(httpTimeOut);
conn.setConnectTimeout(httpTimeOut);
conn.connect();
ByteArrayOutputStream out = new ByteArrayOutputStream();
IOUtils.copyBytes(conn.getInputStream(), out, 4096, true);
StringBuilder localNNThreadDumpContent = new StringBuilder("-- Local NN thread dump -- \n");
localNNThreadDumpContent.append(out);
localNNThreadDumpContent.append("\n -- Local NN thread dump -- ");
LOG.info(localNNThreadDumpContent);
isThreadDumpCaptured = true;
} catch (IOException e) {
LOG.warn("Can't get local NN thread dump due to " + e.getMessage());
}
}
use of java.net.HttpURLConnection in project Hystrix by Netflix.
the class UrlUtils method readXmlInputStream.
public static InputStream readXmlInputStream(String uri) {
if (uri == null || "".equals(uri))
throw new IllegalArgumentException("Invalid uri. URI cannot be null or blank. ");
try {
URL url = new URL(uri);
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
connection.setRequestProperty("Accept", "application/xml");
return connection.getInputStream();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
Aggregations