Search in sources :

Example 41 with DoNotRetryIOException

use of org.apache.hadoop.hbase.DoNotRetryIOException in project hbase by apache.

the class AccessController method preCheckAndDelete.

@Override
public boolean preCheckAndDelete(final ObserverContext<RegionCoprocessorEnvironment> c, final byte[] row, final byte[] family, final byte[] qualifier, final CompareFilter.CompareOp compareOp, final ByteArrayComparable comparator, final Delete delete, final boolean result) throws IOException {
    // An ACL on a delete is useless, we shouldn't allow it
    if (delete.getAttribute(AccessControlConstants.OP_ATTRIBUTE_ACL) != null) {
        throw new DoNotRetryIOException("ACL on checkAndDelete has no effect: " + delete.toString());
    }
    // Require READ and WRITE permissions on the table, CF, and the KV covered
    // by the delete
    RegionCoprocessorEnvironment env = c.getEnvironment();
    Map<byte[], ? extends Collection<byte[]>> families = makeFamilyMap(family, qualifier);
    User user = getActiveUser(c);
    AuthResult authResult = permissionGranted(OpType.CHECK_AND_DELETE, user, env, families, Action.READ, Action.WRITE);
    logResult(authResult);
    if (!authResult.isAllowed()) {
        if (cellFeaturesEnabled && !compatibleEarlyTermination) {
            delete.setAttribute(CHECK_COVERING_PERM, TRUE);
        } else if (authorizationEnabled) {
            throw new AccessDeniedException("Insufficient permissions " + authResult.toContextString());
        }
    }
    return result;
}
Also used : RegionCoprocessorEnvironment(org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment) AccessDeniedException(org.apache.hadoop.hbase.security.AccessDeniedException) User(org.apache.hadoop.hbase.security.User) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException)

Example 42 with DoNotRetryIOException

use of org.apache.hadoop.hbase.DoNotRetryIOException in project hbase by apache.

the class VisibilityController method addLabels.

/****************************** VisibilityEndpoint service related methods ******************************/
@Override
public synchronized void addLabels(RpcController controller, VisibilityLabelsRequest request, RpcCallback<VisibilityLabelsResponse> done) {
    VisibilityLabelsResponse.Builder response = VisibilityLabelsResponse.newBuilder();
    List<VisibilityLabel> visLabels = request.getVisLabelList();
    if (!initialized) {
        setExceptionResults(visLabels.size(), new VisibilityControllerNotReadyException("VisibilityController not yet initialized!"), response);
    } else {
        List<byte[]> labels = new ArrayList<>(visLabels.size());
        try {
            if (authorizationEnabled) {
                checkCallingUserAuth();
            }
            RegionActionResult successResult = RegionActionResult.newBuilder().build();
            for (VisibilityLabel visLabel : visLabels) {
                byte[] label = visLabel.getLabel().toByteArray();
                labels.add(label);
                // Just mark as success. Later it will get reset
                response.addResult(successResult);
            // based on the result from
            // visibilityLabelService.addLabels ()
            }
            if (!labels.isEmpty()) {
                OperationStatus[] opStatus = this.visibilityLabelService.addLabels(labels);
                logResult(true, "addLabels", "Adding labels allowed", null, labels, null);
                int i = 0;
                for (OperationStatus status : opStatus) {
                    while (response.getResult(i) != successResult) i++;
                    if (status.getOperationStatusCode() != SUCCESS) {
                        RegionActionResult.Builder failureResultBuilder = RegionActionResult.newBuilder();
                        failureResultBuilder.setException(buildException(new DoNotRetryIOException(status.getExceptionMsg())));
                        response.setResult(i, failureResultBuilder.build());
                    }
                    i++;
                }
            }
        } catch (AccessDeniedException e) {
            logResult(false, "addLabels", e.getMessage(), null, labels, null);
            LOG.error("User is not having required permissions to add labels", e);
            setExceptionResults(visLabels.size(), e, response);
        } catch (IOException e) {
            LOG.error(e);
            setExceptionResults(visLabels.size(), e, response);
        }
    }
    done.run(response.build());
}
Also used : AccessDeniedException(org.apache.hadoop.hbase.security.AccessDeniedException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) ArrayList(java.util.ArrayList) RegionActionResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) IOException(java.io.IOException) ReplicationEndpoint(org.apache.hadoop.hbase.replication.ReplicationEndpoint) VisibilityLabel(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel) OperationStatus(org.apache.hadoop.hbase.regionserver.OperationStatus) VisibilityLabelsResponse(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse)

Example 43 with DoNotRetryIOException

use of org.apache.hadoop.hbase.DoNotRetryIOException in project hbase by apache.

the class VisibilityController method clearAuths.

@Override
public synchronized void clearAuths(RpcController controller, SetAuthsRequest request, RpcCallback<VisibilityLabelsResponse> done) {
    VisibilityLabelsResponse.Builder response = VisibilityLabelsResponse.newBuilder();
    List<ByteString> auths = request.getAuthList();
    if (!initialized) {
        setExceptionResults(auths.size(), new CoprocessorException("VisibilityController not yet initialized"), response);
    } else {
        byte[] requestUser = request.getUser().toByteArray();
        List<byte[]> labelAuths = new ArrayList<>(auths.size());
        try {
            // When AC is ON, do AC based user auth check
            if (authorizationEnabled && accessControllerAvailable && !isSystemOrSuperUser()) {
                User user = VisibilityUtils.getActiveUser();
                throw new AccessDeniedException("User '" + (user != null ? user.getShortName() : "null") + " is not authorized to perform this action.");
            }
            if (authorizationEnabled) {
                // When AC is not in place the calling user should have
                checkCallingUserAuth();
            // SYSTEM_LABEL auth to do this action.
            }
            for (ByteString authBS : auths) {
                labelAuths.add(authBS.toByteArray());
            }
            OperationStatus[] opStatus = this.visibilityLabelService.clearAuths(requestUser, labelAuths);
            logResult(true, "clearAuths", "Removing authorization for labels allowed", requestUser, labelAuths, null);
            RegionActionResult successResult = RegionActionResult.newBuilder().build();
            for (OperationStatus status : opStatus) {
                if (status.getOperationStatusCode() == SUCCESS) {
                    response.addResult(successResult);
                } else {
                    RegionActionResult.Builder failureResultBuilder = RegionActionResult.newBuilder();
                    failureResultBuilder.setException(buildException(new DoNotRetryIOException(status.getExceptionMsg())));
                    response.addResult(failureResultBuilder.build());
                }
            }
        } catch (AccessDeniedException e) {
            logResult(false, "clearAuths", e.getMessage(), requestUser, labelAuths, null);
            LOG.error("User is not having required permissions to clear authorization", e);
            setExceptionResults(auths.size(), e, response);
        } catch (IOException e) {
            LOG.error(e);
            setExceptionResults(auths.size(), e, response);
        }
    }
    done.run(response.build());
}
Also used : AccessDeniedException(org.apache.hadoop.hbase.security.AccessDeniedException) User(org.apache.hadoop.hbase.security.User) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) ByteString(com.google.protobuf.ByteString) ArrayList(java.util.ArrayList) RegionActionResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) IOException(java.io.IOException) OperationStatus(org.apache.hadoop.hbase.regionserver.OperationStatus) VisibilityLabelsResponse(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse) CoprocessorException(org.apache.hadoop.hbase.coprocessor.CoprocessorException)

Example 44 with DoNotRetryIOException

use of org.apache.hadoop.hbase.DoNotRetryIOException in project hbase by apache.

the class AccessController method preAppend.

@Override
public Result preAppend(ObserverContext<RegionCoprocessorEnvironment> c, Append append) throws IOException {
    User user = getActiveUser(c);
    checkForReservedTagPresence(user, append);
    // Require WRITE permission to the table, CF, and the KV to be appended
    RegionCoprocessorEnvironment env = c.getEnvironment();
    Map<byte[], ? extends Collection<Cell>> families = append.getFamilyCellMap();
    AuthResult authResult = permissionGranted(OpType.APPEND, user, env, families, Action.WRITE);
    logResult(authResult);
    if (!authResult.isAllowed()) {
        if (cellFeaturesEnabled && !compatibleEarlyTermination) {
            append.setAttribute(CHECK_COVERING_PERM, TRUE);
        } else if (authorizationEnabled) {
            throw new AccessDeniedException("Insufficient permissions " + authResult.toContextString());
        }
    }
    byte[] bytes = append.getAttribute(AccessControlConstants.OP_ATTRIBUTE_ACL);
    if (bytes != null) {
        if (cellFeaturesEnabled) {
            addCellPermissions(bytes, append.getFamilyCellMap());
        } else {
            throw new DoNotRetryIOException("Cell ACLs cannot be persisted");
        }
    }
    return null;
}
Also used : RegionCoprocessorEnvironment(org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment) AccessDeniedException(org.apache.hadoop.hbase.security.AccessDeniedException) User(org.apache.hadoop.hbase.security.User) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) Cell(org.apache.hadoop.hbase.Cell)

Example 45 with DoNotRetryIOException

use of org.apache.hadoop.hbase.DoNotRetryIOException in project hbase by apache.

the class BlockingRpcConnection method setupIOstreams.

private void setupIOstreams() throws IOException {
    if (socket != null) {
        // The connection is already available. Perfect.
        return;
    }
    if (this.rpcClient.failedServers.isFailedServer(remoteId.getAddress())) {
        if (LOG.isDebugEnabled()) {
            LOG.debug("Not trying to connect to " + remoteId.address + " this server is in the failed servers list");
        }
        throw new FailedServerException("This server is in the failed servers list: " + remoteId.address);
    }
    try {
        if (LOG.isDebugEnabled()) {
            LOG.debug("Connecting to " + remoteId.address);
        }
        short numRetries = 0;
        final short MAX_RETRIES = 5;
        while (true) {
            setupConnection();
            InputStream inStream = NetUtils.getInputStream(socket);
            // This creates a socket with a write timeout. This timeout cannot be changed.
            OutputStream outStream = NetUtils.getOutputStream(socket, this.rpcClient.writeTO);
            // Write out the preamble -- MAGIC, version, and auth to use.
            writeConnectionHeaderPreamble(outStream);
            if (useSasl) {
                final InputStream in2 = inStream;
                final OutputStream out2 = outStream;
                UserGroupInformation ticket = getUGI();
                boolean continueSasl;
                if (ticket == null) {
                    throw new FatalConnectionException("ticket/user is null");
                }
                try {
                    continueSasl = ticket.doAs(new PrivilegedExceptionAction<Boolean>() {

                        @Override
                        public Boolean run() throws IOException {
                            return setupSaslConnection(in2, out2);
                        }
                    });
                } catch (Exception ex) {
                    ExceptionUtil.rethrowIfInterrupt(ex);
                    handleSaslConnectionFailure(numRetries++, MAX_RETRIES, ex, ticket);
                    continue;
                }
                if (continueSasl) {
                    // Sasl connect is successful. Let's set up Sasl i/o streams.
                    inStream = saslRpcClient.getInputStream();
                    outStream = saslRpcClient.getOutputStream();
                } else {
                // fall back to simple auth because server told us so.
                // do not change authMethod and useSasl here, we should start from secure when
                // reconnecting because regionserver may change its sasl config after restart.
                }
            }
            this.in = new DataInputStream(new BufferedInputStream(inStream));
            this.out = new DataOutputStream(new BufferedOutputStream(outStream));
            // Now write out the connection header
            writeConnectionHeader();
            // process the response from server for connection header if necessary
            processResponseForConnectionHeader();
            break;
        }
    } catch (Throwable t) {
        closeSocket();
        IOException e = ExceptionUtil.asInterrupt(t);
        if (e == null) {
            this.rpcClient.failedServers.addToFailedServers(remoteId.address);
            if (t instanceof LinkageError) {
                // probably the hbase hadoop version does not match the running hadoop version
                e = new DoNotRetryIOException(t);
            } else if (t instanceof IOException) {
                e = (IOException) t;
            } else {
                e = new IOException("Could not set up IO Streams to " + remoteId.address, t);
            }
        }
        throw e;
    }
    // start the receiver thread after the socket connection has been set up
    thread = new Thread(this, threadName);
    thread.setDaemon(true);
    thread.start();
}
Also used : DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) BufferedInputStream(java.io.BufferedInputStream) DataInputStream(java.io.DataInputStream) InputStream(java.io.InputStream) DataOutputStream(java.io.DataOutputStream) DataOutputStream(java.io.DataOutputStream) BufferedOutputStream(java.io.BufferedOutputStream) OutputStream(java.io.OutputStream) ByteArrayOutputStream(org.apache.hadoop.hbase.io.ByteArrayOutputStream) PrivilegedExceptionAction(java.security.PrivilegedExceptionAction) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) InterruptedIOException(java.io.InterruptedIOException) IOException(java.io.IOException) DataInputStream(java.io.DataInputStream) SaslException(javax.security.sasl.SaslException) ConnectionClosingException(org.apache.hadoop.hbase.exceptions.ConnectionClosingException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) IPCUtil.isFatalConnectionException(org.apache.hadoop.hbase.ipc.IPCUtil.isFatalConnectionException) IPCUtil.createRemoteException(org.apache.hadoop.hbase.ipc.IPCUtil.createRemoteException) InterruptedIOException(java.io.InterruptedIOException) SocketTimeoutException(java.net.SocketTimeoutException) IOException(java.io.IOException) UnknownHostException(java.net.UnknownHostException) RemoteException(org.apache.hadoop.ipc.RemoteException) BufferedInputStream(java.io.BufferedInputStream) IPCUtil.isFatalConnectionException(org.apache.hadoop.hbase.ipc.IPCUtil.isFatalConnectionException) BufferedOutputStream(java.io.BufferedOutputStream) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Aggregations

DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)77 IOException (java.io.IOException)28 Cell (org.apache.hadoop.hbase.Cell)18 ArrayList (java.util.ArrayList)12 ServiceException (org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException)12 MutationType (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType)12 TableName (org.apache.hadoop.hbase.TableName)11 InterruptedIOException (java.io.InterruptedIOException)10 HBaseIOException (org.apache.hadoop.hbase.HBaseIOException)10 Delete (org.apache.hadoop.hbase.client.Delete)10 Put (org.apache.hadoop.hbase.client.Put)10 Test (org.junit.Test)10 AccessDeniedException (org.apache.hadoop.hbase.security.AccessDeniedException)9 User (org.apache.hadoop.hbase.security.User)8 Mutation (org.apache.hadoop.hbase.client.Mutation)7 ByteString (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString)7 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)6 NameBytesPair (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair)6 ByteBufferCell (org.apache.hadoop.hbase.ByteBufferCell)5 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)5