use of java.net.HttpURLConnection in project hadoop by apache.
the class TestRMWebServicesDelegationTokenAuthentication method testDoAs.
// Superuser "client" should be able to get a delegation token
// for user "client2" when authenticated using Kerberos
// The request shouldn't work when authenticated using DelegationTokens
@Test
public void testDoAs() throws Exception {
KerberosTestUtils.doAsClient(new Callable<Void>() {
@Override
public Void call() throws Exception {
String token = "";
String owner = "";
String renewer = "renewer";
String body = "{\"renewer\":\"" + renewer + "\"}";
URL url = new URL("http://localhost:8088/ws/v1/cluster/delegation-token?doAs=client2");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
setupConn(conn, "POST", MediaType.APPLICATION_JSON, body);
InputStream response = conn.getInputStream();
assertEquals(Status.OK.getStatusCode(), conn.getResponseCode());
BufferedReader reader = null;
try {
reader = new BufferedReader(new InputStreamReader(response, "UTF8"));
for (String line; (line = reader.readLine()) != null; ) {
JSONObject obj = new JSONObject(line);
if (obj.has("token")) {
token = obj.getString("token");
}
if (obj.has("owner")) {
owner = obj.getString("owner");
}
}
} finally {
IOUtils.closeQuietly(reader);
IOUtils.closeQuietly(response);
}
Assert.assertEquals("client2", owner);
Token<RMDelegationTokenIdentifier> realToken = new Token<RMDelegationTokenIdentifier>();
realToken.decodeFromUrlString(token);
Assert.assertEquals("client2", realToken.decodeIdentifier().getOwner().toString());
return null;
}
});
// this should not work
final String token = getDelegationToken("client");
String renewer = "renewer";
String body = "{\"renewer\":\"" + renewer + "\"}";
URL url = new URL("http://localhost:8088/ws/v1/cluster/delegation-token?doAs=client2");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestProperty(delegationTokenHeader, token);
setupConn(conn, "POST", MediaType.APPLICATION_JSON, body);
try {
conn.getInputStream();
fail("Client should not be allowed to impersonate using delegation tokens");
} catch (IOException ie) {
assertEquals(Status.FORBIDDEN.getStatusCode(), conn.getResponseCode());
}
// this should also fail due to client2 not being a super user
KerberosTestUtils.doAs("client2@EXAMPLE.COM", new Callable<Void>() {
@Override
public Void call() throws Exception {
String renewer = "renewer";
String body = "{\"renewer\":\"" + renewer + "\"}";
URL url = new URL("http://localhost:8088/ws/v1/cluster/delegation-token?doAs=client");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
setupConn(conn, "POST", MediaType.APPLICATION_JSON, body);
try {
conn.getInputStream();
fail("Non superuser client should not be allowed to carry out doAs");
} catch (IOException ie) {
assertEquals(Status.FORBIDDEN.getStatusCode(), conn.getResponseCode());
}
return null;
}
});
}
use of java.net.HttpURLConnection in project hadoop by apache.
the class TestRMWebServicesDelegationTokenAuthentication method testCancelledDelegationToken.
// Test to make sure that cancelled delegation tokens
// are rejected
@Test
public void testCancelledDelegationToken() throws Exception {
String token = getDelegationToken("client");
cancelDelegationToken(token);
ApplicationSubmissionContextInfo app = new ApplicationSubmissionContextInfo();
String appid = "application_123_0";
app.setApplicationId(appid);
String requestBody = getMarshalledAppInfo(app);
URL url = new URL("http://localhost:8088/ws/v1/cluster/apps");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestProperty(delegationTokenHeader, token);
setupConn(conn, "POST", MediaType.APPLICATION_XML, requestBody);
// auth is kerberos or delegation token
try {
conn.getInputStream();
fail("Authentication should fail with expired delegation tokens");
} catch (IOException e) {
assertEquals(Status.FORBIDDEN.getStatusCode(), conn.getResponseCode());
}
}
use of java.net.HttpURLConnection in project kafka by apache.
the class RestServer method httpRequest.
/**
* @param url HTTP connection will be established with this url.
* @param method HTTP method ("GET", "POST", "PUT", etc.)
* @param requestBodyData Object to serialize as JSON and send in the request body.
* @param responseFormat Expected format of the response to the HTTP request.
* @param <T> The type of the deserialized response to the HTTP request.
* @return The deserialized response to the HTTP request, or null if no data is expected.
*/
public static <T> HttpResponse<T> httpRequest(String url, String method, Object requestBodyData, TypeReference<T> responseFormat) {
HttpURLConnection connection = null;
try {
String serializedBody = requestBodyData == null ? null : JSON_SERDE.writeValueAsString(requestBodyData);
log.debug("Sending {} with input {} to {}", method, serializedBody, url);
connection = (HttpURLConnection) new URL(url).openConnection();
connection.setRequestMethod(method);
connection.setRequestProperty("User-Agent", "kafka-connect");
connection.setRequestProperty("Accept", "application/json");
// connection.getResponseCode() implicitly calls getInputStream, so always set to true.
// On the other hand, leaving this out breaks nothing.
connection.setDoInput(true);
connection.setUseCaches(false);
if (requestBodyData != null) {
connection.setRequestProperty("Content-Type", "application/json");
connection.setDoOutput(true);
OutputStream os = connection.getOutputStream();
os.write(serializedBody.getBytes());
os.flush();
os.close();
}
int responseCode = connection.getResponseCode();
if (responseCode == HttpURLConnection.HTTP_NO_CONTENT) {
return new HttpResponse<>(responseCode, connection.getHeaderFields(), null);
} else if (responseCode >= 400) {
InputStream es = connection.getErrorStream();
ErrorMessage errorMessage = JSON_SERDE.readValue(es, ErrorMessage.class);
es.close();
throw new ConnectRestException(responseCode, errorMessage.errorCode(), errorMessage.message());
} else if (responseCode >= 200 && responseCode < 300) {
InputStream is = connection.getInputStream();
T result = JSON_SERDE.readValue(is, responseFormat);
is.close();
return new HttpResponse<>(responseCode, connection.getHeaderFields(), result);
} else {
throw new ConnectRestException(Response.Status.INTERNAL_SERVER_ERROR, Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), "Unexpected status code when handling forwarded request: " + responseCode);
}
} catch (IOException e) {
log.error("IO error forwarding REST request: ", e);
throw new ConnectRestException(Response.Status.INTERNAL_SERVER_ERROR, "IO Error trying to forward REST request: " + e.getMessage(), e);
} finally {
if (connection != null)
connection.disconnect();
}
}
use of java.net.HttpURLConnection in project hadoop by apache.
the class Fetcher method openConnection.
@VisibleForTesting
protected synchronized void openConnection(URL url) throws IOException {
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
if (sslShuffle) {
HttpsURLConnection httpsConn = (HttpsURLConnection) conn;
try {
httpsConn.setSSLSocketFactory(sslFactory.createSSLSocketFactory());
} catch (GeneralSecurityException ex) {
throw new IOException(ex);
}
httpsConn.setHostnameVerifier(sslFactory.getHostnameVerifier());
}
connection = conn;
}
use of java.net.HttpURLConnection in project hadoop by apache.
the class SelfThrottlingIntercept method sendingRequest.
public void sendingRequest(SendingRequestEvent sendEvent) {
long lastLatency;
// for logging
boolean operationIsRead;
synchronized (this) {
lastLatency = this.lastE2Elatency;
}
float sleepMultiple;
HttpURLConnection urlConnection = (HttpURLConnection) sendEvent.getConnectionObject();
// upload.
if (urlConnection.getRequestMethod().equalsIgnoreCase("PUT")) {
operationIsRead = false;
sleepMultiple = (1 / writeFactor) - 1;
} else {
operationIsRead = true;
sleepMultiple = (1 / readFactor) - 1;
}
long sleepDuration = (long) (sleepMultiple * lastLatency);
if (sleepDuration < 0) {
sleepDuration = 0;
}
if (sleepDuration > 0) {
try {
// Thread.sleep() is not exact but it seems sufficiently accurate for
// our needs. If needed this could become a loop of small waits that
// tracks actual
// elapsed time.
Thread.sleep(sleepDuration);
} catch (InterruptedException ie) {
Thread.currentThread().interrupt();
}
// reset to avoid counting the sleep against request latency
sendEvent.getRequestResult().setStartDate(new Date());
}
if (LOG.isDebugEnabled()) {
boolean isFirstRequest = (lastLatency == 0);
long threadId = Thread.currentThread().getId();
LOG.debug(String.format(" SelfThrottlingIntercept:: SendingRequest: threadId=%d, requestType=%s, isFirstRequest=%b, sleepDuration=%d", threadId, operationIsRead ? "read " : "write", isFirstRequest, sleepDuration));
}
}
Aggregations