Search in sources :

Example 41 with CellScanner

use of org.apache.hadoop.hbase.CellScanner in project hbase by apache.

the class NettyRpcDuplexHandler method readResponse.

private void readResponse(ChannelHandlerContext ctx, ByteBuf buf) throws IOException {
    int totalSize = buf.readInt();
    ByteBufInputStream in = new ByteBufInputStream(buf);
    ResponseHeader responseHeader = ResponseHeader.parseDelimitedFrom(in);
    int id = responseHeader.getCallId();
    if (LOG.isTraceEnabled()) {
        LOG.trace("got response header " + TextFormat.shortDebugString(responseHeader) + ", totalSize: " + totalSize + " bytes");
    }
    RemoteException remoteExc;
    if (responseHeader.hasException()) {
        ExceptionResponse exceptionResponse = responseHeader.getException();
        remoteExc = IPCUtil.createRemoteException(exceptionResponse);
        if (IPCUtil.isFatalConnectionException(exceptionResponse)) {
            // Here we will cleanup all calls so do not need to fall back, just return.
            exceptionCaught(ctx, remoteExc);
            return;
        }
    } else {
        remoteExc = null;
    }
    Call call = id2Call.remove(id);
    if (call == null) {
        // So we got a response for which we have no corresponding 'call' here on the client-side.
        // We probably timed out waiting, cleaned up all references, and now the server decides
        // to return a response. There is nothing we can do w/ the response at this stage. Clean
        // out the wire of the response so its out of the way and we can get other responses on
        // this connection.
        int readSoFar = IPCUtil.getTotalSizeWhenWrittenDelimited(responseHeader);
        int whatIsLeftToRead = totalSize - readSoFar;
        if (LOG.isDebugEnabled()) {
            LOG.debug("Unknown callId: " + id + ", skipping over this response of " + whatIsLeftToRead + " bytes");
        }
        return;
    }
    if (remoteExc != null) {
        call.setException(remoteExc);
        return;
    }
    Message value;
    if (call.responseDefaultType != null) {
        Builder builder = call.responseDefaultType.newBuilderForType();
        builder.mergeDelimitedFrom(in);
        value = builder.build();
    } else {
        value = null;
    }
    CellScanner cellBlockScanner;
    if (responseHeader.hasCellBlockMeta()) {
        int size = responseHeader.getCellBlockMeta().getLength();
        // Maybe we could read directly from the ByteBuf.
        // The problem here is that we do not know when to release it.
        byte[] cellBlock = new byte[size];
        buf.readBytes(cellBlock);
        cellBlockScanner = cellBlockBuilder.createCellScanner(this.codec, this.compressor, cellBlock);
    } else {
        cellBlockScanner = null;
    }
    call.setResponse(value, cellBlockScanner);
}
Also used : ResponseHeader(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader) ExceptionResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse) Message(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message) Builder(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message.Builder) ByteBufInputStream(io.netty.buffer.ByteBufInputStream) RemoteException(org.apache.hadoop.ipc.RemoteException) CellScanner(org.apache.hadoop.hbase.CellScanner)

Example 42 with CellScanner

use of org.apache.hadoop.hbase.CellScanner in project hbase by apache.

the class TestProtobufRpcServiceImpl method echo.

@Override
public EchoResponseProto echo(RpcController controller, EchoRequestProto request) throws ServiceException {
    if (controller instanceof HBaseRpcController) {
        HBaseRpcController pcrc = (HBaseRpcController) controller;
        // If cells, scan them to check we are able to iterate what we were given and since this is an
        // echo, just put them back on the controller creating a new block. Tests our block building.
        CellScanner cellScanner = pcrc.cellScanner();
        List<Cell> list = null;
        if (cellScanner != null) {
            list = new ArrayList<>();
            try {
                while (cellScanner.advance()) {
                    list.add(cellScanner.current());
                }
            } catch (IOException e) {
                throw new ServiceException(e);
            }
        }
        cellScanner = CellUtil.createCellScanner(list);
        pcrc.setCellScanner(cellScanner);
    }
    return EchoResponseProto.newBuilder().setMessage(request.getMessage()).build();
}
Also used : ServiceException(org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) IOException(java.io.IOException) CellScanner(org.apache.hadoop.hbase.CellScanner) Cell(org.apache.hadoop.hbase.Cell)

Example 43 with CellScanner

use of org.apache.hadoop.hbase.CellScanner in project hbase by apache.

the class AbstractTestIPC method testCompressCellBlock.

/**
   * It is hard to verify the compression is actually happening under the wraps. Hope that if
   * unsupported, we'll get an exception out of some time (meantime, have to trace it manually to
   * confirm that compression is happening down in the client and server).
   */
@Test
public void testCompressCellBlock() throws IOException, ServiceException {
    Configuration conf = new Configuration(HBaseConfiguration.create());
    conf.set("hbase.client.rpc.compressor", GzipCodec.class.getCanonicalName());
    List<Cell> cells = new ArrayList<>();
    int count = 3;
    for (int i = 0; i < count; i++) {
        cells.add(CELL);
    }
    RpcServer rpcServer = RpcServerFactory.createRpcServer(null, "testRpcServer", Lists.newArrayList(new BlockingServiceAndInterface(SERVICE, null)), new InetSocketAddress("localhost", 0), CONF, new FifoRpcScheduler(CONF, 1));
    try (AbstractRpcClient<?> client = createRpcClient(conf)) {
        rpcServer.start();
        BlockingInterface stub = newBlockingStub(client, rpcServer.getListenerAddress());
        HBaseRpcController pcrc = new HBaseRpcControllerImpl(CellUtil.createCellScanner(cells));
        String message = "hello";
        assertEquals(message, stub.echo(pcrc, EchoRequestProto.newBuilder().setMessage(message).build()).getMessage());
        int index = 0;
        CellScanner cellScanner = pcrc.cellScanner();
        assertNotNull(cellScanner);
        while (cellScanner.advance()) {
            assertEquals(CELL, cellScanner.current());
            index++;
        }
        assertEquals(count, index);
    } finally {
        rpcServer.stop();
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) BlockingServiceAndInterface(org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface) InetSocketAddress(java.net.InetSocketAddress) GzipCodec(org.apache.hadoop.io.compress.GzipCodec) ArrayList(java.util.ArrayList) CellScanner(org.apache.hadoop.hbase.CellScanner) BlockingInterface(org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestRpcServiceProtos.TestProtobufRpcProto.BlockingInterface) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 44 with CellScanner

use of org.apache.hadoop.hbase.CellScanner in project hbase by apache.

the class TableSnapshotInputFormatTestBase method verifyRowFromMap.

protected static void verifyRowFromMap(ImmutableBytesWritable key, Result result) throws IOException {
    byte[] row = key.get();
    CellScanner scanner = result.cellScanner();
    while (scanner.advance()) {
        Cell cell = scanner.current();
        //assert that all Cells in the Result have the same key
        Assert.assertEquals(0, Bytes.compareTo(row, 0, row.length, cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()));
    }
    for (int j = 0; j < FAMILIES.length; j++) {
        byte[] actual = result.getValue(FAMILIES[j], FAMILIES[j]);
        Assert.assertArrayEquals("Row in snapshot does not match, expected:" + Bytes.toString(row) + " ,actual:" + Bytes.toString(actual), row, actual);
    }
}
Also used : CellScanner(org.apache.hadoop.hbase.CellScanner) Cell(org.apache.hadoop.hbase.Cell)

Example 45 with CellScanner

use of org.apache.hadoop.hbase.CellScanner in project hbase by apache.

the class TestVisibilityLabelsWithDeletes method testDeleteFamilyLatestTimeStampWithMulipleVersionsWithoutCellVisibilityInPuts.

@Test
public void testDeleteFamilyLatestTimeStampWithMulipleVersionsWithoutCellVisibilityInPuts() throws Exception {
    setAuths();
    final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
    try (Table table = doPutsWithoutVisibility(tableName)) {
        PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {

            @Override
            public Void run() throws Exception {
                try (Connection connection = ConnectionFactory.createConnection(conf);
                    Table table = connection.getTable(tableName)) {
                    Delete d = new Delete(row1);
                    d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
                    d.addFamily(fam);
                    table.delete(d);
                } catch (Throwable t) {
                    throw new IOException(t);
                }
                return null;
            }
        };
        SUPERUSER.runAs(actiona);
        TEST_UTIL.getAdmin().flush(tableName);
        Scan s = new Scan();
        s.setMaxVersions(5);
        s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
        ResultScanner scanner = table.getScanner(s);
        Result[] next = scanner.next(3);
        assertTrue(next.length == 2);
        CellScanner cellScanner = next[0].cellScanner();
        cellScanner.advance();
        Cell current = cellScanner.current();
        assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(), row1, 0, row1.length));
        assertEquals(current.getTimestamp(), 127l);
        cellScanner.advance();
        current = cellScanner.current();
        assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(), row1, 0, row1.length));
        assertEquals(current.getTimestamp(), 126l);
        cellScanner.advance();
        current = cellScanner.current();
        assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(), row1, 0, row1.length));
        assertEquals(current.getTimestamp(), 125l);
        cellScanner.advance();
        current = cellScanner.current();
        assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(), row1, 0, row1.length));
        assertEquals(current.getTimestamp(), 124l);
        cellScanner.advance();
        current = cellScanner.current();
        assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(), row1, 0, row1.length));
        assertEquals(current.getTimestamp(), 123l);
        cellScanner = next[1].cellScanner();
        cellScanner.advance();
        current = cellScanner.current();
        assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(), row2, 0, row2.length));
    }
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Connection(org.apache.hadoop.hbase.client.Connection) PrivilegedExceptionAction(java.security.PrivilegedExceptionAction) InterruptedIOException(java.io.InterruptedIOException) IOException(java.io.IOException) CellScanner(org.apache.hadoop.hbase.CellScanner) Result(org.apache.hadoop.hbase.client.Result) TableName(org.apache.hadoop.hbase.TableName) Scan(org.apache.hadoop.hbase.client.Scan) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Aggregations

CellScanner (org.apache.hadoop.hbase.CellScanner)82 Cell (org.apache.hadoop.hbase.Cell)69 Test (org.junit.Test)58 Result (org.apache.hadoop.hbase.client.Result)47 Scan (org.apache.hadoop.hbase.client.Scan)43 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)42 Table (org.apache.hadoop.hbase.client.Table)41 IOException (java.io.IOException)40 TableName (org.apache.hadoop.hbase.TableName)40 InterruptedIOException (java.io.InterruptedIOException)35 Connection (org.apache.hadoop.hbase.client.Connection)34 PrivilegedExceptionAction (java.security.PrivilegedExceptionAction)32 Delete (org.apache.hadoop.hbase.client.Delete)32 Put (org.apache.hadoop.hbase.client.Put)15 KeyValue (org.apache.hadoop.hbase.KeyValue)14 ArrayList (java.util.ArrayList)10 RetriesExhaustedWithDetailsException (org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException)7 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)6 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)6 HBaseRpcController (org.apache.hadoop.hbase.ipc.HBaseRpcController)5