Search in sources :

Example 71 with AccessControlException

use of org.apache.hadoop.security.AccessControlException in project hadoop by apache.

the class FileSystem method checkAccessPermissions.

/**
   * This method provides the default implementation of
   * {@link #access(Path, FsAction)}.
   *
   * @param stat FileStatus to check
   * @param mode type of access to check
   * @throws AccessControlException if access is denied
   * @throws IOException for any error
   */
@InterfaceAudience.Private
static void checkAccessPermissions(FileStatus stat, FsAction mode) throws AccessControlException, IOException {
    FsPermission perm = stat.getPermission();
    UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
    String user = ugi.getShortUserName();
    if (user.equals(stat.getOwner())) {
        if (perm.getUserAction().implies(mode)) {
            return;
        }
    } else if (ugi.getGroups().contains(stat.getGroup())) {
        if (perm.getGroupAction().implies(mode)) {
            return;
        }
    } else {
        if (perm.getOtherAction().implies(mode)) {
            return;
        }
    }
    throw new AccessControlException(String.format("Permission denied: user=%s, path=\"%s\":%s:%s:%s%s", user, stat.getPath(), stat.getOwner(), stat.getGroup(), stat.isDirectory() ? "d" : "-", perm));
}
Also used : AccessControlException(org.apache.hadoop.security.AccessControlException) FsPermission(org.apache.hadoop.fs.permission.FsPermission) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Example 72 with AccessControlException

use of org.apache.hadoop.security.AccessControlException in project nifi by apache.

the class AbstractFetchHDFSRecord method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    // do this before getting a flow file so that we always get a chance to attempt Kerberos relogin
    final FileSystem fileSystem = getFileSystem();
    final Configuration configuration = getConfiguration();
    final UserGroupInformation ugi = getUserGroupInformation();
    if (configuration == null || fileSystem == null || ugi == null) {
        getLogger().error("Processor not configured properly because Configuration, FileSystem, or UserGroupInformation was null");
        context.yield();
        return;
    }
    final FlowFile originalFlowFile = session.get();
    if (originalFlowFile == null) {
        context.yield();
        return;
    }
    ugi.doAs((PrivilegedAction<Object>) () -> {
        FlowFile child = null;
        final String filenameValue = context.getProperty(FILENAME).evaluateAttributeExpressions(originalFlowFile).getValue();
        try {
            final Path path = new Path(filenameValue);
            final AtomicReference<Throwable> exceptionHolder = new AtomicReference<>(null);
            final AtomicReference<WriteResult> writeResult = new AtomicReference<>();
            final RecordSetWriterFactory recordSetWriterFactory = context.getProperty(RECORD_WRITER).asControllerService(RecordSetWriterFactory.class);
            final StopWatch stopWatch = new StopWatch(true);
            // use a child FlowFile so that if any error occurs we can route the original untouched FlowFile to retry/failure
            child = session.create(originalFlowFile);
            final AtomicReference<String> mimeTypeRef = new AtomicReference<>();
            child = session.write(child, (final OutputStream rawOut) -> {
                try (final BufferedOutputStream out = new BufferedOutputStream(rawOut);
                    final HDFSRecordReader recordReader = createHDFSRecordReader(context, originalFlowFile, configuration, path)) {
                    Record record = recordReader.nextRecord();
                    final RecordSchema schema = recordSetWriterFactory.getSchema(originalFlowFile.getAttributes(), record == null ? null : record.getSchema());
                    try (final RecordSetWriter recordSetWriter = recordSetWriterFactory.createWriter(getLogger(), schema, out)) {
                        recordSetWriter.beginRecordSet();
                        if (record != null) {
                            recordSetWriter.write(record);
                        }
                        while ((record = recordReader.nextRecord()) != null) {
                            recordSetWriter.write(record);
                        }
                        writeResult.set(recordSetWriter.finishRecordSet());
                        mimeTypeRef.set(recordSetWriter.getMimeType());
                    }
                } catch (Exception e) {
                    exceptionHolder.set(e);
                }
            });
            stopWatch.stop();
            // into one of the appropriate catch blocks below
            if (exceptionHolder.get() != null) {
                throw exceptionHolder.get();
            }
            FlowFile successFlowFile = postProcess(context, session, child, path);
            final Map<String, String> attributes = new HashMap<>(writeResult.get().getAttributes());
            attributes.put(RECORD_COUNT_ATTR, String.valueOf(writeResult.get().getRecordCount()));
            attributes.put(CoreAttributes.MIME_TYPE.key(), mimeTypeRef.get());
            successFlowFile = session.putAllAttributes(successFlowFile, attributes);
            final Path qualifiedPath = path.makeQualified(fileSystem.getUri(), fileSystem.getWorkingDirectory());
            getLogger().info("Successfully received content from {} for {} in {} milliseconds", new Object[] { qualifiedPath, successFlowFile, stopWatch.getDuration() });
            session.getProvenanceReporter().fetch(successFlowFile, qualifiedPath.toString(), stopWatch.getDuration(TimeUnit.MILLISECONDS));
            session.transfer(successFlowFile, REL_SUCCESS);
            session.remove(originalFlowFile);
            return null;
        } catch (final FileNotFoundException | AccessControlException e) {
            getLogger().error("Failed to retrieve content from {} for {} due to {}; routing to failure", new Object[] { filenameValue, originalFlowFile, e });
            final FlowFile failureFlowFile = session.putAttribute(originalFlowFile, FETCH_FAILURE_REASON_ATTR, e.getMessage() == null ? e.toString() : e.getMessage());
            session.transfer(failureFlowFile, REL_FAILURE);
        } catch (final IOException | FlowFileAccessException e) {
            getLogger().error("Failed to retrieve content from {} for {} due to {}; routing to retry", new Object[] { filenameValue, originalFlowFile, e });
            session.transfer(session.penalize(originalFlowFile), REL_RETRY);
            context.yield();
        } catch (final Throwable t) {
            getLogger().error("Failed to retrieve content from {} for {} due to {}; routing to failure", new Object[] { filenameValue, originalFlowFile, t });
            final FlowFile failureFlowFile = session.putAttribute(originalFlowFile, FETCH_FAILURE_REASON_ATTR, t.getMessage() == null ? t.toString() : t.getMessage());
            session.transfer(failureFlowFile, REL_FAILURE);
        }
        // if we got this far then we weren't successful so we need to clean up the child flow file if it got initialized
        if (child != null) {
            session.remove(child);
        }
        return null;
    });
}
Also used : Path(org.apache.hadoop.fs.Path) FlowFile(org.apache.nifi.flowfile.FlowFile) Configuration(org.apache.hadoop.conf.Configuration) BufferedOutputStream(java.io.BufferedOutputStream) OutputStream(java.io.OutputStream) AtomicReference(java.util.concurrent.atomic.AtomicReference) RecordSetWriter(org.apache.nifi.serialization.RecordSetWriter) ProcessException(org.apache.nifi.processor.exception.ProcessException) FlowFileAccessException(org.apache.nifi.processor.exception.FlowFileAccessException) IOException(java.io.IOException) FileNotFoundException(java.io.FileNotFoundException) AccessControlException(org.apache.hadoop.security.AccessControlException) StopWatch(org.apache.nifi.util.StopWatch) RecordSetWriterFactory(org.apache.nifi.serialization.RecordSetWriterFactory) FileSystem(org.apache.hadoop.fs.FileSystem) Record(org.apache.nifi.serialization.record.Record) BufferedOutputStream(java.io.BufferedOutputStream) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) HashMap(java.util.HashMap) Map(java.util.Map) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) HDFSRecordReader(org.apache.nifi.processors.hadoop.record.HDFSRecordReader)

Example 73 with AccessControlException

use of org.apache.hadoop.security.AccessControlException in project ranger by apache.

the class RangerAdminJersey2RESTClient method revokeAccess.

@Override
public void revokeAccess(GrantRevokeRequest request) throws Exception {
    if (LOG.isDebugEnabled()) {
        LOG.debug("==> RangerAdminRESTClient.grantAccess(" + request + ")");
    }
    Map<String, String> queryParams = new HashMap<String, String>();
    queryParams.put(RangerRESTUtils.REST_PARAM_PLUGIN_ID, _pluginId);
    String relativeURL = RangerRESTUtils.REST_URL_SERVICE_REVOKE_ACCESS + _serviceName;
    Response response = get(queryParams, relativeURL);
    int httpResponseCode = response == null ? -1 : response.getStatus();
    switch(httpResponseCode) {
        case -1:
            LOG.warn("Unexpected: Null response from policy server while granting access! Returning null!");
            throw new Exception("unknown error!");
        case 200:
            LOG.debug("grantAccess() suceeded: HTTP status=" + httpResponseCode);
            break;
        case 401:
            throw new AccessControlException();
        default:
            String body = response.readEntity(String.class);
            String message = String.format("Unexpected: Received status[%d] with body[%s] form url[%s]", httpResponseCode, body, relativeURL);
            LOG.warn(message);
            throw new Exception("HTTP status: " + httpResponseCode);
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug("<== RangerAdminRESTClient.grantAccess(" + request + ")");
    }
}
Also used : Response(javax.ws.rs.core.Response) HashMap(java.util.HashMap) AccessControlException(org.apache.hadoop.security.AccessControlException) JsonParseException(com.google.gson.JsonParseException) UnknownHostException(java.net.UnknownHostException) AccessControlException(org.apache.hadoop.security.AccessControlException) ClientHandlerException(com.sun.jersey.api.client.ClientHandlerException) ProcessingException(javax.ws.rs.ProcessingException) UnsupportedEncodingException(java.io.UnsupportedEncodingException)

Example 74 with AccessControlException

use of org.apache.hadoop.security.AccessControlException in project ranger by apache.

the class RangerAdminJersey2RESTClient method grantAccess.

@Override
public void grantAccess(GrantRevokeRequest request) throws Exception {
    if (LOG.isDebugEnabled()) {
        LOG.debug("==> RangerAdminRESTClient.grantAccess(" + request + ")");
    }
    Map<String, String> queryParams = new HashMap<String, String>();
    queryParams.put(RangerRESTUtils.REST_PARAM_PLUGIN_ID, _pluginId);
    String relativeURL = RangerRESTUtils.REST_URL_SERVICE_GRANT_ACCESS + _serviceName;
    Response response = get(queryParams, relativeURL);
    int httpResponseCode = response == null ? -1 : response.getStatus();
    switch(httpResponseCode) {
        case -1:
            LOG.warn("Unexpected: Null response from policy server while granting access! Returning null!");
            throw new Exception("unknown error!");
        case 200:
            LOG.debug("grantAccess() suceeded: HTTP status=" + httpResponseCode);
            break;
        case 401:
            throw new AccessControlException();
        default:
            String body = response.readEntity(String.class);
            String message = String.format("Unexpected: Received status[%d] with body[%s] form url[%s]", httpResponseCode, body, relativeURL);
            LOG.warn(message);
            throw new Exception("HTTP status: " + httpResponseCode);
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug("<== RangerAdminRESTClient.grantAccess(" + request + ")");
    }
}
Also used : Response(javax.ws.rs.core.Response) HashMap(java.util.HashMap) AccessControlException(org.apache.hadoop.security.AccessControlException) JsonParseException(com.google.gson.JsonParseException) UnknownHostException(java.net.UnknownHostException) AccessControlException(org.apache.hadoop.security.AccessControlException) ClientHandlerException(com.sun.jersey.api.client.ClientHandlerException) ProcessingException(javax.ws.rs.ProcessingException) UnsupportedEncodingException(java.io.UnsupportedEncodingException)

Example 75 with AccessControlException

use of org.apache.hadoop.security.AccessControlException in project ranger by apache.

the class RangerAdminRESTClient method dropRole.

@Override
public void dropRole(final String execUser, final String roleName) throws Exception {
    if (LOG.isDebugEnabled()) {
        LOG.debug("==> RangerAdminRESTClient.dropRole(" + roleName + ")");
    }
    ClientResponse response = null;
    UserGroupInformation user = MiscUtil.getUGILoginUser();
    boolean isSecureMode = user != null && UserGroupInformation.isSecurityEnabled();
    Map<String, String> queryParams = new HashMap<String, String>();
    queryParams.put(RangerRESTUtils.SERVICE_NAME_PARAM, serviceNameUrlParam);
    queryParams.put(RangerRESTUtils.REST_PARAM_EXEC_USER, execUser);
    String relativeURL = RangerRESTUtils.REST_URL_SERVICE_DROP_ROLE + roleName;
    if (isSecureMode) {
        PrivilegedAction<ClientResponse> action = new PrivilegedAction<ClientResponse>() {

            public ClientResponse run() {
                ClientResponse clientRes = null;
                try {
                    clientRes = restClient.delete(relativeURL, queryParams);
                } catch (Exception e) {
                    LOG.error("Failed to get response, Error is : " + e.getMessage());
                }
                return clientRes;
            }
        };
        if (LOG.isDebugEnabled()) {
            LOG.debug("drop role as user " + user);
        }
        response = user.doAs(action);
    } else {
        response = restClient.delete(relativeURL, queryParams);
    }
    if (response == null) {
        throw new Exception("unknown error during deleteRole. roleName=" + roleName);
    } else if (response.getStatus() != HttpServletResponse.SC_OK && response.getStatus() != HttpServletResponse.SC_NO_CONTENT) {
        RESTResponse resp = RESTResponse.fromClientResponse(response);
        LOG.error("createRole() failed: HTTP status=" + response.getStatus() + ", message=" + resp.getMessage() + ", isSecure=" + isSecureMode + (isSecureMode ? (", user=" + user) : ""));
        if (response.getStatus() == HttpServletResponse.SC_UNAUTHORIZED) {
            throw new AccessControlException();
        }
        throw new Exception("HTTP " + response.getStatus() + " Error: " + resp.getMessage());
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug("<== RangerAdminRESTClient.deleteRole(" + roleName + ")");
    }
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) HashMap(java.util.HashMap) PrivilegedAction(java.security.PrivilegedAction) RESTResponse(org.apache.ranger.admin.client.datatype.RESTResponse) AccessControlException(org.apache.hadoop.security.AccessControlException) AccessControlException(org.apache.hadoop.security.AccessControlException) UnsupportedEncodingException(java.io.UnsupportedEncodingException) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Aggregations

AccessControlException (org.apache.hadoop.security.AccessControlException)165 Path (org.apache.hadoop.fs.Path)72 IOException (java.io.IOException)69 Test (org.junit.Test)60 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)44 FsPermission (org.apache.hadoop.fs.permission.FsPermission)41 SnapshotAccessControlException (org.apache.hadoop.hdfs.protocol.SnapshotAccessControlException)35 FileSystem (org.apache.hadoop.fs.FileSystem)33 Configuration (org.apache.hadoop.conf.Configuration)25 HdfsFileStatus (org.apache.hadoop.hdfs.protocol.HdfsFileStatus)21 FileNotFoundException (java.io.FileNotFoundException)19 DistributedFileSystem (org.apache.hadoop.hdfs.DistributedFileSystem)14 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)13 UnsupportedEncodingException (java.io.UnsupportedEncodingException)11 HashMap (java.util.HashMap)10 FileStatus (org.apache.hadoop.fs.FileStatus)10 ClientResponse (com.sun.jersey.api.client.ClientResponse)9 PrivilegedAction (java.security.PrivilegedAction)9 ParentNotDirectoryException (org.apache.hadoop.fs.ParentNotDirectoryException)9 RESTResponse (org.apache.ranger.admin.client.datatype.RESTResponse)9