Search in sources :

Example 1 with CodedInputStream

use of org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream in project hbase by apache.

the class PBCell method skip.

@Override
public int skip(PositionedByteRange src) {
    CellMessage.Cell.Builder builder = CellMessage.Cell.newBuilder();
    CodedInputStream is = inputStreamFromByteRange(src);
    is.setSizeLimit(src.getLength());
    try {
        builder.mergeFrom(is);
        int consumed = is.getTotalBytesRead();
        src.setPosition(src.getPosition() + consumed);
        return consumed;
    } catch (IOException e) {
        throw new RuntimeException("Error while skipping type.", e);
    }
}
Also used : CodedInputStream(org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream) IOException(java.io.IOException)

Example 2 with CodedInputStream

use of org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream in project hbase by apache.

the class PBCell method decode.

@Override
public CellMessage.Cell decode(PositionedByteRange src) {
    CellMessage.Cell.Builder builder = CellMessage.Cell.newBuilder();
    CodedInputStream is = inputStreamFromByteRange(src);
    is.setSizeLimit(src.getLength());
    try {
        CellMessage.Cell ret = builder.mergeFrom(is).build();
        src.setPosition(src.getPosition() + is.getTotalBytesRead());
        return ret;
    } catch (IOException e) {
        throw new RuntimeException("Error while decoding type.", e);
    }
}
Also used : CellMessage(org.apache.hadoop.hbase.example.protobuf.generated.CellMessage) CodedInputStream(org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream) IOException(java.io.IOException)

Example 3 with CodedInputStream

use of org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream in project hbase by apache.

the class ServerRpcConnection method processConnectionHeader.

// Reads the connection header following version
private void processConnectionHeader(ByteBuff buf) throws IOException {
    if (buf.hasArray()) {
        this.connectionHeader = ConnectionHeader.parseFrom(buf.array());
    } else {
        CodedInputStream cis = UnsafeByteOperations.unsafeWrap(new ByteBuffByteInput(buf, 0, buf.limit()), 0, buf.limit()).newCodedInput();
        cis.enableAliasing(true);
        this.connectionHeader = ConnectionHeader.parseFrom(cis);
    }
    String serviceName = connectionHeader.getServiceName();
    if (serviceName == null)
        throw new EmptyServiceNameException();
    this.service = RpcServer.getService(this.rpcServer.services, serviceName);
    if (this.service == null)
        throw new UnknownServiceException(serviceName);
    setupCellBlockCodecs(this.connectionHeader);
    RPCProtos.ConnectionHeaderResponse.Builder chrBuilder = RPCProtos.ConnectionHeaderResponse.newBuilder();
    setupCryptoCipher(this.connectionHeader, chrBuilder);
    responseConnectionHeader(chrBuilder);
    UserGroupInformation protocolUser = createUser(connectionHeader);
    if (!useSasl) {
        ugi = protocolUser;
        if (ugi != null) {
            ugi.setAuthenticationMethod(AuthenticationMethod.SIMPLE);
        }
        // audit logging for SASL authenticated users happens in saslReadAndProcess()
        if (authenticatedWithFallback) {
            RpcServer.LOG.warn("Allowed fallback to SIMPLE auth for {} connecting from {}", ugi, getHostAddress());
        }
    } else {
        // user is authenticated
        ugi.setAuthenticationMethod(provider.getSaslAuthMethod().getAuthMethod());
        // this is not allowed if user authenticated with DIGEST.
        if ((protocolUser != null) && (!protocolUser.getUserName().equals(ugi.getUserName()))) {
            if (!provider.supportsProtocolAuthentication()) {
                // Not allowed to doAs if token authentication is used
                throw new AccessDeniedException("Authenticated user (" + ugi + ") doesn't match what the client claims to be (" + protocolUser + ")");
            } else {
                // Effective user can be different from authenticated user
                // for simple auth or kerberos auth
                // The user is the real user. Now we create a proxy user
                UserGroupInformation realUser = ugi;
                ugi = UserGroupInformation.createProxyUser(protocolUser.getUserName(), realUser);
                // Now the user is a proxy user, set Authentication method Proxy.
                ugi.setAuthenticationMethod(AuthenticationMethod.PROXY);
            }
        }
    }
    String version;
    if (this.connectionHeader.hasVersionInfo()) {
        // see if this connection will support RetryImmediatelyException
        this.retryImmediatelySupported = VersionInfoUtil.hasMinimumVersion(getVersionInfo(), 1, 2);
        version = this.connectionHeader.getVersionInfo().getVersion();
    } else {
        version = "UNKNOWN";
    }
    RpcServer.AUDITLOG.info("Connection from {}:{}, version={}, sasl={}, ugi={}, service={}", this.hostAddress, this.remotePort, version, this.useSasl, this.ugi, serviceName);
}
Also used : AccessDeniedException(org.apache.hadoop.hbase.security.AccessDeniedException) CodedInputStream(org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream) ByteString(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Example 4 with CodedInputStream

use of org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream in project hbase by apache.

the class SimpleServerRpcConnection method readAndProcess.

/**
 * Read off the wire. If there is not enough data to read, update the connection state with what
 * we have and returns.
 * @return Returns -1 if failure (and caller will close connection), else zero or more.
 * @throws IOException
 * @throws InterruptedException
 */
public int readAndProcess() throws IOException, InterruptedException {
    // If we have not read the connection setup preamble, look to see if that is on the wire.
    if (!connectionPreambleRead) {
        int count = readPreamble();
        if (!connectionPreambleRead) {
            return count;
        }
    }
    // Try and read in an int. it will be length of the data to read (or -1 if a ping). We catch the
    // integer length into the 4-byte this.dataLengthBuffer.
    int count = read4Bytes();
    if (count < 0 || dataLengthBuffer.remaining() > 0) {
        return count;
    }
    // or it is a request.
    if (data == null) {
        dataLengthBuffer.flip();
        int dataLength = dataLengthBuffer.getInt();
        if (dataLength == RpcClient.PING_CALL_ID) {
            if (!useWrap) {
                // covers the !useSasl too
                dataLengthBuffer.clear();
                // ping message
                return 0;
            }
        }
        if (dataLength < 0) {
            // A data length of zero is legal.
            throw new DoNotRetryIOException("Unexpected data length " + dataLength + "!! from " + getHostAddress());
        }
        if (dataLength > this.rpcServer.maxRequestSize) {
            String msg = "RPC data length of " + dataLength + " received from " + getHostAddress() + " is greater than max allowed " + this.rpcServer.maxRequestSize + ". Set \"" + SimpleRpcServer.MAX_REQUEST_SIZE + "\" on server to override this limit (not recommended)";
            SimpleRpcServer.LOG.warn(msg);
            if (connectionHeaderRead && connectionPreambleRead) {
                incRpcCount();
                // Construct InputStream for the non-blocking SocketChannel
                // We need the InputStream because we want to read only the request header
                // instead of the whole rpc.
                ByteBuffer buf = ByteBuffer.allocate(1);
                InputStream is = new InputStream() {

                    @Override
                    public int read() throws IOException {
                        SimpleServerRpcConnection.this.rpcServer.channelRead(channel, buf);
                        buf.flip();
                        int x = buf.get();
                        buf.flip();
                        return x;
                    }
                };
                CodedInputStream cis = CodedInputStream.newInstance(is);
                int headerSize = cis.readRawVarint32();
                Message.Builder builder = RequestHeader.newBuilder();
                ProtobufUtil.mergeFrom(builder, cis, headerSize);
                RequestHeader header = (RequestHeader) builder.build();
                // Notify the client about the offending request
                SimpleServerCall reqTooBig = new SimpleServerCall(header.getCallId(), this.service, null, null, null, null, this, 0, this.addr, EnvironmentEdgeManager.currentTime(), 0, this.rpcServer.bbAllocator, this.rpcServer.cellBlockBuilder, null, responder);
                RequestTooBigException reqTooBigEx = new RequestTooBigException(msg);
                this.rpcServer.metrics.exception(reqTooBigEx);
                // Otherwise, throw a DoNotRetryIOException.
                if (VersionInfoUtil.hasMinimumVersion(connectionHeader.getVersionInfo(), RequestTooBigException.MAJOR_VERSION, RequestTooBigException.MINOR_VERSION)) {
                    reqTooBig.setResponse(null, null, reqTooBigEx, msg);
                } else {
                    reqTooBig.setResponse(null, null, new DoNotRetryIOException(msg), msg);
                }
                // In most cases we will write out the response directly. If not, it is still OK to just
                // close the connection without writing out the reqTooBig response. Do not try to write
                // out directly here, and it will cause deserialization error if the connection is slow
                // and we have a half writing response in the queue.
                reqTooBig.sendResponseIfReady();
            }
            // Close the connection
            return -1;
        }
        // Initialize this.data with a ByteBuff.
        // This call will allocate a ByteBuff to read request into and assign to this.data
        // Also when we use some buffer(s) from pool, it will create a CallCleanup instance also and
        // assign to this.callCleanup
        initByteBuffToReadInto(dataLength);
        // Increment the rpc count. This counter will be decreased when we write
        // the response. If we want the connection to be detected as idle properly, we
        // need to keep the inc / dec correct.
        incRpcCount();
    }
    count = channelDataRead(channel, data);
    if (count >= 0 && data.remaining() == 0) {
        // count==0 if dataLength == 0
        process();
    }
    return count;
}
Also used : Message(org.apache.hbase.thirdparty.com.google.protobuf.Message) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) CodedInputStream(org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream) InputStream(java.io.InputStream) CodedInputStream(org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream) RequestTooBigException(org.apache.hadoop.hbase.exceptions.RequestTooBigException) RequestHeader(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader) ByteBuffer(java.nio.ByteBuffer)

Example 5 with CodedInputStream

use of org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream in project hbase by apache.

the class ProtobufUtil method mergeFrom.

/**
 * This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding
 * buffers when working with byte arrays
 * @param builder current message builder
 * @param b byte array
 * @param offset
 * @param length
 * @throws IOException
 */
public static void mergeFrom(Message.Builder builder, byte[] b, int offset, int length) throws IOException {
    final CodedInputStream codedInput = CodedInputStream.newInstance(b, offset, length);
    codedInput.setSizeLimit(length);
    builder.mergeFrom(codedInput);
    codedInput.checkLastTagWas(0);
}
Also used : CodedInputStream(org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream)

Aggregations

CodedInputStream (org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream)13 IOException (java.io.IOException)3 InputStream (java.io.InputStream)2 FSDataInputStream (org.apache.hadoop.fs.FSDataInputStream)2 Path (org.apache.hadoop.fs.Path)2 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)2 RequestHeader (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader)2 ByteString (org.apache.hbase.thirdparty.com.google.protobuf.ByteString)2 InvalidProtocolBufferException (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException)2 Message (org.apache.hbase.thirdparty.com.google.protobuf.Message)2 Span (io.opentelemetry.api.trace.Span)1 Context (io.opentelemetry.context.Context)1 Scope (io.opentelemetry.context.Scope)1 TextMapGetter (io.opentelemetry.context.propagation.TextMapGetter)1 FileNotFoundException (java.io.FileNotFoundException)1 InterruptedIOException (java.io.InterruptedIOException)1 InetSocketAddress (java.net.InetSocketAddress)1 ByteBuffer (java.nio.ByteBuffer)1 ArrayList (java.util.ArrayList)1 ExecutionException (java.util.concurrent.ExecutionException)1