use of javax.ws.rs.Path in project hadoop by apache.
the class KMS method getKeyNames.
@GET
@Path(KMSRESTConstants.KEYS_NAMES_RESOURCE)
@Produces(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8)
public Response getKeyNames() throws Exception {
try {
LOG.trace("Entering getKeyNames method.");
KMSWebApp.getAdminCallsMeter().mark();
UserGroupInformation user = HttpUserGroupInformation.get();
assertAccess(KMSACLs.Type.GET_KEYS, user, KMSOp.GET_KEYS);
List<String> json = user.doAs(new PrivilegedExceptionAction<List<String>>() {
@Override
public List<String> run() throws Exception {
return provider.getKeys();
}
});
kmsAudit.ok(user, KMSOp.GET_KEYS, "");
LOG.trace("Exiting getKeyNames method.");
return Response.ok().type(MediaType.APPLICATION_JSON).entity(json).build();
} catch (Exception e) {
LOG.debug("Exception in getkeyNames.", e);
throw e;
}
}
use of javax.ws.rs.Path in project hadoop by apache.
the class KMS method generateEncryptedKeys.
@SuppressWarnings({ "rawtypes", "unchecked" })
@GET
@Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}/" + KMSRESTConstants.EEK_SUB_RESOURCE)
@Produces(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8)
public Response generateEncryptedKeys(@PathParam("name") final String name, @QueryParam(KMSRESTConstants.EEK_OP) String edekOp, @DefaultValue("1") @QueryParam(KMSRESTConstants.EEK_NUM_KEYS) final int numKeys) throws Exception {
try {
LOG.trace("Entering generateEncryptedKeys method.");
UserGroupInformation user = HttpUserGroupInformation.get();
KMSClientProvider.checkNotEmpty(name, "name");
KMSClientProvider.checkNotNull(edekOp, "eekOp");
LOG.debug("Generating encrypted key with name {}," + " the edek Operation is {}.", name, edekOp);
Object retJSON;
if (edekOp.equals(KMSRESTConstants.EEK_GENERATE)) {
LOG.debug("edek Operation is Generate.");
assertAccess(KMSACLs.Type.GENERATE_EEK, user, KMSOp.GENERATE_EEK, name);
final List<EncryptedKeyVersion> retEdeks = new LinkedList<EncryptedKeyVersion>();
try {
user.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
LOG.debug("Generated Encrypted key for {} number of " + "keys.", numKeys);
for (int i = 0; i < numKeys; i++) {
retEdeks.add(provider.generateEncryptedKey(name));
}
return null;
}
});
} catch (Exception e) {
LOG.error("Exception in generateEncryptedKeys:", e);
throw new IOException(e);
}
kmsAudit.ok(user, KMSOp.GENERATE_EEK, name, "");
retJSON = new ArrayList();
for (EncryptedKeyVersion edek : retEdeks) {
((ArrayList) retJSON).add(KMSServerJSONUtils.toJSON(edek));
}
} else {
StringBuilder error;
error = new StringBuilder("IllegalArgumentException Wrong ");
error.append(KMSRESTConstants.EEK_OP);
error.append(" value, it must be ");
error.append(KMSRESTConstants.EEK_GENERATE);
error.append(" or ");
error.append(KMSRESTConstants.EEK_DECRYPT);
LOG.error(error.toString());
throw new IllegalArgumentException(error.toString());
}
KMSWebApp.getGenerateEEKCallsMeter().mark();
LOG.trace("Exiting generateEncryptedKeys method.");
return Response.ok().type(MediaType.APPLICATION_JSON).entity(retJSON).build();
} catch (Exception e) {
LOG.debug("Exception in generateEncryptedKeys.", e);
throw e;
}
}
use of javax.ws.rs.Path in project hadoop by apache.
the class HttpFSServer method post.
/**
* Binding to handle POST requests.
*
* @param is the inputstream for the request payload.
* @param uriInfo the of the request.
* @param path the path for operation.
* @param op the HttpFS operation of the request.
* @param params the HttpFS parameters of the request.
*
* @return the request response.
*
* @throws IOException thrown if an IO error occurred. Thrown exceptions are
* handled by {@link HttpFSExceptionProvider}.
* @throws FileSystemAccessException thrown if a FileSystemAccess releated
* error occurred. Thrown exceptions are handled by
* {@link HttpFSExceptionProvider}.
*/
@POST
@Path("{path:.*}")
@Consumes({ "*/*" })
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8 })
public Response post(InputStream is, @Context UriInfo uriInfo, @PathParam("path") String path, @QueryParam(OperationParam.NAME) OperationParam op, @Context Parameters params, @Context HttpServletRequest request) throws IOException, FileSystemAccessException {
UserGroupInformation user = HttpUserGroupInformation.get();
Response response;
path = makeAbsolute(path);
MDC.put(HttpFSFileSystem.OP_PARAM, op.value().name());
MDC.put("hostname", request.getRemoteAddr());
switch(op.value()) {
case APPEND:
{
Boolean hasData = params.get(DataParam.NAME, DataParam.class);
if (!hasData) {
response = Response.temporaryRedirect(createUploadRedirectionURL(uriInfo, HttpFSFileSystem.Operation.APPEND)).build();
} else {
FSOperations.FSAppend command = new FSOperations.FSAppend(is, path);
fsExecute(user, command);
AUDIT_LOG.info("[{}]", path);
response = Response.ok().type(MediaType.APPLICATION_JSON).build();
}
break;
}
case CONCAT:
{
System.out.println("HTTPFS SERVER CONCAT");
String sources = params.get(SourcesParam.NAME, SourcesParam.class);
FSOperations.FSConcat command = new FSOperations.FSConcat(path, sources.split(","));
fsExecute(user, command);
AUDIT_LOG.info("[{}]", path);
System.out.println("SENT RESPONSE");
response = Response.ok().build();
break;
}
case TRUNCATE:
{
Long newLength = params.get(NewLengthParam.NAME, NewLengthParam.class);
FSOperations.FSTruncate command = new FSOperations.FSTruncate(path, newLength);
JSONObject json = fsExecute(user, command);
AUDIT_LOG.info("Truncate [{}] to length [{}]", path, newLength);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
case UNSETSTORAGEPOLICY:
{
FSOperations.FSUnsetStoragePolicy command = new FSOperations.FSUnsetStoragePolicy(path);
fsExecute(user, command);
AUDIT_LOG.info("Unset storage policy [{}]", path);
response = Response.ok().build();
break;
}
default:
{
throw new IOException(MessageFormat.format("Invalid HTTP POST operation [{0}]", op.value()));
}
}
return response;
}
use of javax.ws.rs.Path in project hadoop by apache.
the class HttpFSServer method get.
/**
* Binding to handle GET requests, supported operations are
*
* @param path the path for operation.
* @param op the HttpFS operation of the request.
* @param params the HttpFS parameters of the request.
*
* @return the request response.
*
* @throws IOException thrown if an IO error occurred. Thrown exceptions are
* handled by {@link HttpFSExceptionProvider}.
* @throws FileSystemAccessException thrown if a FileSystemAccess releated
* error occurred. Thrown exceptions are handled by
* {@link HttpFSExceptionProvider}.
*/
@GET
@Path("{path:.*}")
@Produces({ MediaType.APPLICATION_OCTET_STREAM + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8 })
public Response get(@PathParam("path") String path, @QueryParam(OperationParam.NAME) OperationParam op, @Context Parameters params, @Context HttpServletRequest request) throws IOException, FileSystemAccessException {
UserGroupInformation user = HttpUserGroupInformation.get();
Response response;
path = makeAbsolute(path);
MDC.put(HttpFSFileSystem.OP_PARAM, op.value().name());
MDC.put("hostname", request.getRemoteAddr());
switch(op.value()) {
case OPEN:
{
//Invoking the command directly using an unmanaged FileSystem that is
// released by the FileSystemReleaseFilter
final FSOperations.FSOpen command = new FSOperations.FSOpen(path);
final FileSystem fs = createFileSystem(user);
InputStream is = null;
UserGroupInformation ugi = UserGroupInformation.createProxyUser(user.getShortUserName(), UserGroupInformation.getLoginUser());
try {
is = ugi.doAs(new PrivilegedExceptionAction<InputStream>() {
@Override
public InputStream run() throws Exception {
return command.execute(fs);
}
});
} catch (InterruptedException ie) {
LOG.info("Open interrupted.", ie);
Thread.currentThread().interrupt();
}
Long offset = params.get(OffsetParam.NAME, OffsetParam.class);
Long len = params.get(LenParam.NAME, LenParam.class);
AUDIT_LOG.info("[{}] offset [{}] len [{}]", new Object[] { path, offset, len });
InputStreamEntity entity = new InputStreamEntity(is, offset, len);
response = Response.ok(entity).type(MediaType.APPLICATION_OCTET_STREAM).build();
break;
}
case GETFILESTATUS:
{
FSOperations.FSFileStatus command = new FSOperations.FSFileStatus(path);
Map json = fsExecute(user, command);
AUDIT_LOG.info("[{}]", path);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
case LISTSTATUS:
{
String filter = params.get(FilterParam.NAME, FilterParam.class);
FSOperations.FSListStatus command = new FSOperations.FSListStatus(path, filter);
Map json = fsExecute(user, command);
AUDIT_LOG.info("[{}] filter [{}]", path, (filter != null) ? filter : "-");
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
case GETHOMEDIRECTORY:
{
enforceRootPath(op.value(), path);
FSOperations.FSHomeDir command = new FSOperations.FSHomeDir();
JSONObject json = fsExecute(user, command);
AUDIT_LOG.info("");
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
case INSTRUMENTATION:
{
enforceRootPath(op.value(), path);
Groups groups = HttpFSServerWebApp.get().get(Groups.class);
List<String> userGroups = groups.getGroups(user.getShortUserName());
if (!userGroups.contains(HttpFSServerWebApp.get().getAdminGroup())) {
throw new AccessControlException("User not in HttpFSServer admin group");
}
Instrumentation instrumentation = HttpFSServerWebApp.get().get(Instrumentation.class);
Map snapshot = instrumentation.getSnapshot();
response = Response.ok(snapshot).build();
break;
}
case GETCONTENTSUMMARY:
{
FSOperations.FSContentSummary command = new FSOperations.FSContentSummary(path);
Map json = fsExecute(user, command);
AUDIT_LOG.info("[{}]", path);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
case GETFILECHECKSUM:
{
FSOperations.FSFileChecksum command = new FSOperations.FSFileChecksum(path);
Map json = fsExecute(user, command);
AUDIT_LOG.info("[{}]", path);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
case GETFILEBLOCKLOCATIONS:
{
response = Response.status(Response.Status.BAD_REQUEST).build();
break;
}
case GETACLSTATUS:
{
FSOperations.FSAclStatus command = new FSOperations.FSAclStatus(path);
Map json = fsExecute(user, command);
AUDIT_LOG.info("ACL status for [{}]", path);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
case GETXATTRS:
{
List<String> xattrNames = params.getValues(XAttrNameParam.NAME, XAttrNameParam.class);
XAttrCodec encoding = params.get(XAttrEncodingParam.NAME, XAttrEncodingParam.class);
FSOperations.FSGetXAttrs command = new FSOperations.FSGetXAttrs(path, xattrNames, encoding);
@SuppressWarnings("rawtypes") Map json = fsExecute(user, command);
AUDIT_LOG.info("XAttrs for [{}]", path);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
case LISTXATTRS:
{
FSOperations.FSListXAttrs command = new FSOperations.FSListXAttrs(path);
@SuppressWarnings("rawtypes") Map json = fsExecute(user, command);
AUDIT_LOG.info("XAttr names for [{}]", path);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
case LISTSTATUS_BATCH:
{
String startAfter = params.get(HttpFSParametersProvider.StartAfterParam.NAME, HttpFSParametersProvider.StartAfterParam.class);
byte[] token = HttpFSUtils.EMPTY_BYTES;
if (startAfter != null) {
token = startAfter.getBytes(Charsets.UTF_8);
}
FSOperations.FSListStatusBatch command = new FSOperations.FSListStatusBatch(path, token);
@SuppressWarnings("rawtypes") Map json = fsExecute(user, command);
AUDIT_LOG.info("[{}] token [{}]", path, token);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
case GETTRASHROOT:
{
FSOperations.FSTrashRoot command = new FSOperations.FSTrashRoot(path);
JSONObject json = fsExecute(user, command);
AUDIT_LOG.info("[{}]", path);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
case GETALLSTORAGEPOLICY:
{
FSOperations.FSGetAllStoragePolicies command = new FSOperations.FSGetAllStoragePolicies();
JSONObject json = fsExecute(user, command);
AUDIT_LOG.info("[{}]", path);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
case GETSTORAGEPOLICY:
{
FSOperations.FSGetStoragePolicy command = new FSOperations.FSGetStoragePolicy(path);
JSONObject json = fsExecute(user, command);
AUDIT_LOG.info("[{}]", path);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
default:
{
throw new IOException(MessageFormat.format("Invalid HTTP GET operation [{0}]", op.value()));
}
}
return response;
}
use of javax.ws.rs.Path in project hadoop by apache.
the class AMWebServices method getJobTasks.
@GET
@Path("/jobs/{jobid}/tasks")
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
public TasksInfo getJobTasks(@Context HttpServletRequest hsr, @PathParam("jobid") String jid, @QueryParam("type") String type) {
init();
Job job = getJobFromJobIdString(jid, appCtx);
checkAccess(job, hsr);
TasksInfo allTasks = new TasksInfo();
for (Task task : job.getTasks().values()) {
TaskType ttype = null;
if (type != null && !type.isEmpty()) {
try {
ttype = MRApps.taskType(type);
} catch (YarnRuntimeException e) {
throw new BadRequestException("tasktype must be either m or r");
}
}
if (ttype != null && task.getType() != ttype) {
continue;
}
allTasks.add(new TaskInfo(task));
}
return allTasks;
}
Aggregations