use of org.apache.hadoop.security.AccessControlException in project hadoop by apache.
the class FileSystem method checkAccessPermissions.
/**
* This method provides the default implementation of
* {@link #access(Path, FsAction)}.
*
* @param stat FileStatus to check
* @param mode type of access to check
* @throws AccessControlException if access is denied
* @throws IOException for any error
*/
@InterfaceAudience.Private
static void checkAccessPermissions(FileStatus stat, FsAction mode) throws AccessControlException, IOException {
FsPermission perm = stat.getPermission();
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
String user = ugi.getShortUserName();
if (user.equals(stat.getOwner())) {
if (perm.getUserAction().implies(mode)) {
return;
}
} else if (ugi.getGroups().contains(stat.getGroup())) {
if (perm.getGroupAction().implies(mode)) {
return;
}
} else {
if (perm.getOtherAction().implies(mode)) {
return;
}
}
throw new AccessControlException(String.format("Permission denied: user=%s, path=\"%s\":%s:%s:%s%s", user, stat.getPath(), stat.getOwner(), stat.getGroup(), stat.isDirectory() ? "d" : "-", perm));
}
use of org.apache.hadoop.security.AccessControlException in project nifi by apache.
the class AbstractFetchHDFSRecord method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
// do this before getting a flow file so that we always get a chance to attempt Kerberos relogin
final FileSystem fileSystem = getFileSystem();
final Configuration configuration = getConfiguration();
final UserGroupInformation ugi = getUserGroupInformation();
if (configuration == null || fileSystem == null || ugi == null) {
getLogger().error("Processor not configured properly because Configuration, FileSystem, or UserGroupInformation was null");
context.yield();
return;
}
final FlowFile originalFlowFile = session.get();
if (originalFlowFile == null) {
context.yield();
return;
}
ugi.doAs((PrivilegedAction<Object>) () -> {
FlowFile child = null;
final String filenameValue = context.getProperty(FILENAME).evaluateAttributeExpressions(originalFlowFile).getValue();
try {
final Path path = new Path(filenameValue);
final AtomicReference<Throwable> exceptionHolder = new AtomicReference<>(null);
final AtomicReference<WriteResult> writeResult = new AtomicReference<>();
final RecordSetWriterFactory recordSetWriterFactory = context.getProperty(RECORD_WRITER).asControllerService(RecordSetWriterFactory.class);
final StopWatch stopWatch = new StopWatch(true);
// use a child FlowFile so that if any error occurs we can route the original untouched FlowFile to retry/failure
child = session.create(originalFlowFile);
final AtomicReference<String> mimeTypeRef = new AtomicReference<>();
child = session.write(child, (final OutputStream rawOut) -> {
try (final BufferedOutputStream out = new BufferedOutputStream(rawOut);
final HDFSRecordReader recordReader = createHDFSRecordReader(context, originalFlowFile, configuration, path)) {
Record record = recordReader.nextRecord();
final RecordSchema schema = recordSetWriterFactory.getSchema(originalFlowFile.getAttributes(), record == null ? null : record.getSchema());
try (final RecordSetWriter recordSetWriter = recordSetWriterFactory.createWriter(getLogger(), schema, out)) {
recordSetWriter.beginRecordSet();
if (record != null) {
recordSetWriter.write(record);
}
while ((record = recordReader.nextRecord()) != null) {
recordSetWriter.write(record);
}
writeResult.set(recordSetWriter.finishRecordSet());
mimeTypeRef.set(recordSetWriter.getMimeType());
}
} catch (Exception e) {
exceptionHolder.set(e);
}
});
stopWatch.stop();
// into one of the appropriate catch blocks below
if (exceptionHolder.get() != null) {
throw exceptionHolder.get();
}
FlowFile successFlowFile = postProcess(context, session, child, path);
final Map<String, String> attributes = new HashMap<>(writeResult.get().getAttributes());
attributes.put(RECORD_COUNT_ATTR, String.valueOf(writeResult.get().getRecordCount()));
attributes.put(CoreAttributes.MIME_TYPE.key(), mimeTypeRef.get());
successFlowFile = session.putAllAttributes(successFlowFile, attributes);
final Path qualifiedPath = path.makeQualified(fileSystem.getUri(), fileSystem.getWorkingDirectory());
getLogger().info("Successfully received content from {} for {} in {} milliseconds", new Object[] { qualifiedPath, successFlowFile, stopWatch.getDuration() });
session.getProvenanceReporter().fetch(successFlowFile, qualifiedPath.toString(), stopWatch.getDuration(TimeUnit.MILLISECONDS));
session.transfer(successFlowFile, REL_SUCCESS);
session.remove(originalFlowFile);
return null;
} catch (final FileNotFoundException | AccessControlException e) {
getLogger().error("Failed to retrieve content from {} for {} due to {}; routing to failure", new Object[] { filenameValue, originalFlowFile, e });
final FlowFile failureFlowFile = session.putAttribute(originalFlowFile, FETCH_FAILURE_REASON_ATTR, e.getMessage() == null ? e.toString() : e.getMessage());
session.transfer(failureFlowFile, REL_FAILURE);
} catch (final IOException | FlowFileAccessException e) {
getLogger().error("Failed to retrieve content from {} for {} due to {}; routing to retry", new Object[] { filenameValue, originalFlowFile, e });
session.transfer(session.penalize(originalFlowFile), REL_RETRY);
context.yield();
} catch (final Throwable t) {
getLogger().error("Failed to retrieve content from {} for {} due to {}; routing to failure", new Object[] { filenameValue, originalFlowFile, t });
final FlowFile failureFlowFile = session.putAttribute(originalFlowFile, FETCH_FAILURE_REASON_ATTR, t.getMessage() == null ? t.toString() : t.getMessage());
session.transfer(failureFlowFile, REL_FAILURE);
}
// if we got this far then we weren't successful so we need to clean up the child flow file if it got initialized
if (child != null) {
session.remove(child);
}
return null;
});
}
use of org.apache.hadoop.security.AccessControlException in project ranger by apache.
the class RangerAdminJersey2RESTClient method revokeAccess.
@Override
public void revokeAccess(GrantRevokeRequest request) throws Exception {
if (LOG.isDebugEnabled()) {
LOG.debug("==> RangerAdminRESTClient.grantAccess(" + request + ")");
}
Map<String, String> queryParams = new HashMap<String, String>();
queryParams.put(RangerRESTUtils.REST_PARAM_PLUGIN_ID, _pluginId);
String relativeURL = RangerRESTUtils.REST_URL_SERVICE_REVOKE_ACCESS + _serviceName;
Response response = get(queryParams, relativeURL);
int httpResponseCode = response == null ? -1 : response.getStatus();
switch(httpResponseCode) {
case -1:
LOG.warn("Unexpected: Null response from policy server while granting access! Returning null!");
throw new Exception("unknown error!");
case 200:
LOG.debug("grantAccess() suceeded: HTTP status=" + httpResponseCode);
break;
case 401:
throw new AccessControlException();
default:
String body = response.readEntity(String.class);
String message = String.format("Unexpected: Received status[%d] with body[%s] form url[%s]", httpResponseCode, body, relativeURL);
LOG.warn(message);
throw new Exception("HTTP status: " + httpResponseCode);
}
if (LOG.isDebugEnabled()) {
LOG.debug("<== RangerAdminRESTClient.grantAccess(" + request + ")");
}
}
use of org.apache.hadoop.security.AccessControlException in project ranger by apache.
the class RangerAdminJersey2RESTClient method grantAccess.
@Override
public void grantAccess(GrantRevokeRequest request) throws Exception {
if (LOG.isDebugEnabled()) {
LOG.debug("==> RangerAdminRESTClient.grantAccess(" + request + ")");
}
Map<String, String> queryParams = new HashMap<String, String>();
queryParams.put(RangerRESTUtils.REST_PARAM_PLUGIN_ID, _pluginId);
String relativeURL = RangerRESTUtils.REST_URL_SERVICE_GRANT_ACCESS + _serviceName;
Response response = get(queryParams, relativeURL);
int httpResponseCode = response == null ? -1 : response.getStatus();
switch(httpResponseCode) {
case -1:
LOG.warn("Unexpected: Null response from policy server while granting access! Returning null!");
throw new Exception("unknown error!");
case 200:
LOG.debug("grantAccess() suceeded: HTTP status=" + httpResponseCode);
break;
case 401:
throw new AccessControlException();
default:
String body = response.readEntity(String.class);
String message = String.format("Unexpected: Received status[%d] with body[%s] form url[%s]", httpResponseCode, body, relativeURL);
LOG.warn(message);
throw new Exception("HTTP status: " + httpResponseCode);
}
if (LOG.isDebugEnabled()) {
LOG.debug("<== RangerAdminRESTClient.grantAccess(" + request + ")");
}
}
use of org.apache.hadoop.security.AccessControlException in project ranger by apache.
the class RangerAdminRESTClient method dropRole.
@Override
public void dropRole(final String execUser, final String roleName) throws Exception {
if (LOG.isDebugEnabled()) {
LOG.debug("==> RangerAdminRESTClient.dropRole(" + roleName + ")");
}
ClientResponse response = null;
UserGroupInformation user = MiscUtil.getUGILoginUser();
boolean isSecureMode = user != null && UserGroupInformation.isSecurityEnabled();
Map<String, String> queryParams = new HashMap<String, String>();
queryParams.put(RangerRESTUtils.SERVICE_NAME_PARAM, serviceNameUrlParam);
queryParams.put(RangerRESTUtils.REST_PARAM_EXEC_USER, execUser);
String relativeURL = RangerRESTUtils.REST_URL_SERVICE_DROP_ROLE + roleName;
if (isSecureMode) {
PrivilegedAction<ClientResponse> action = new PrivilegedAction<ClientResponse>() {
public ClientResponse run() {
ClientResponse clientRes = null;
try {
clientRes = restClient.delete(relativeURL, queryParams);
} catch (Exception e) {
LOG.error("Failed to get response, Error is : " + e.getMessage());
}
return clientRes;
}
};
if (LOG.isDebugEnabled()) {
LOG.debug("drop role as user " + user);
}
response = user.doAs(action);
} else {
response = restClient.delete(relativeURL, queryParams);
}
if (response == null) {
throw new Exception("unknown error during deleteRole. roleName=" + roleName);
} else if (response.getStatus() != HttpServletResponse.SC_OK && response.getStatus() != HttpServletResponse.SC_NO_CONTENT) {
RESTResponse resp = RESTResponse.fromClientResponse(response);
LOG.error("createRole() failed: HTTP status=" + response.getStatus() + ", message=" + resp.getMessage() + ", isSecure=" + isSecureMode + (isSecureMode ? (", user=" + user) : ""));
if (response.getStatus() == HttpServletResponse.SC_UNAUTHORIZED) {
throw new AccessControlException();
}
throw new Exception("HTTP " + response.getStatus() + " Error: " + resp.getMessage());
}
if (LOG.isDebugEnabled()) {
LOG.debug("<== RangerAdminRESTClient.deleteRole(" + roleName + ")");
}
}
Aggregations