Search in sources :

Example 1 with ColumnValue

use of org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue in project hbase by apache.

the class ProtobufUtil method toGet.

/**
   * Convert a protocol buffer Mutate to a Get.
   * @param proto the protocol buffer Mutate to convert.
   * @param cellScanner
   * @return the converted client get.
   * @throws IOException
   */
public static Get toGet(final MutationProto proto, final CellScanner cellScanner) throws IOException {
    MutationType type = proto.getMutateType();
    assert type == MutationType.INCREMENT || type == MutationType.APPEND : type.name();
    byte[] row = proto.hasRow() ? proto.getRow().toByteArray() : null;
    Get get = null;
    int cellCount = proto.hasAssociatedCellCount() ? proto.getAssociatedCellCount() : 0;
    if (cellCount > 0) {
        // The proto has metadata only and the data is separate to be found in the cellScanner.
        if (cellScanner == null) {
            throw new DoNotRetryIOException("Cell count of " + cellCount + " but no cellScanner: " + TextFormat.shortDebugString(proto));
        }
        for (int i = 0; i < cellCount; i++) {
            if (!cellScanner.advance()) {
                throw new DoNotRetryIOException("Cell count of " + cellCount + " but at index " + i + " no cell returned: " + TextFormat.shortDebugString(proto));
            }
            Cell cell = cellScanner.current();
            if (get == null) {
                get = new Get(CellUtil.cloneRow(cell));
            }
            get.addColumn(CellUtil.cloneFamily(cell), CellUtil.cloneQualifier(cell));
        }
    } else {
        get = new Get(row);
        for (ColumnValue column : proto.getColumnValueList()) {
            byte[] family = column.getFamily().toByteArray();
            for (QualifierValue qv : column.getQualifierValueList()) {
                byte[] qualifier = qv.getQualifier().toByteArray();
                if (!qv.hasValue()) {
                    throw new DoNotRetryIOException("Missing required field: qualifier value");
                }
                get.addColumn(family, qualifier);
            }
        }
    }
    if (proto.hasTimeRange()) {
        TimeRange timeRange = protoToTimeRange(proto.getTimeRange());
        get.setTimeRange(timeRange);
    }
    for (NameBytesPair attribute : proto.getAttributeList()) {
        get.setAttribute(attribute.getName(), attribute.getValue().toByteArray());
    }
    return get;
}
Also used : TimeRange(org.apache.hadoop.hbase.io.TimeRange) MutationType(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType) NameBytesPair(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) Get(org.apache.hadoop.hbase.client.Get) QualifierValue(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) ColumnValue(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue) Cell(org.apache.hadoop.hbase.Cell) ByteBufferCell(org.apache.hadoop.hbase.ByteBufferCell)

Example 2 with ColumnValue

use of org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue in project hbase by apache.

the class ProtobufUtil method toDelta.

private static <T extends Mutation> T toDelta(Function<Bytes, T> supplier, ConsumerWithException<T, Cell> consumer, final MutationProto proto, final CellScanner cellScanner) throws IOException {
    byte[] row = proto.hasRow() ? proto.getRow().toByteArray() : null;
    T mutation = row == null ? null : supplier.apply(new Bytes(row));
    int cellCount = proto.hasAssociatedCellCount() ? proto.getAssociatedCellCount() : 0;
    if (cellCount > 0) {
        // The proto has metadata only and the data is separate to be found in the cellScanner.
        if (cellScanner == null) {
            throw new DoNotRetryIOException("Cell count of " + cellCount + " but no cellScanner: " + toShortString(proto));
        }
        for (int i = 0; i < cellCount; i++) {
            if (!cellScanner.advance()) {
                throw new DoNotRetryIOException("Cell count of " + cellCount + " but at index " + i + " no cell returned: " + toShortString(proto));
            }
            Cell cell = cellScanner.current();
            if (mutation == null) {
                mutation = supplier.apply(new Bytes(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()));
            }
            consumer.accept(mutation, cell);
        }
    } else {
        if (mutation == null) {
            throw new IllegalArgumentException("row cannot be null");
        }
        for (ColumnValue column : proto.getColumnValueList()) {
            byte[] family = column.getFamily().toByteArray();
            for (QualifierValue qv : column.getQualifierValueList()) {
                byte[] qualifier = qv.getQualifier().toByteArray();
                if (!qv.hasValue()) {
                    throw new DoNotRetryIOException("Missing required field: qualifier value");
                }
                byte[] value = qv.getValue().toByteArray();
                byte[] tags = null;
                if (qv.hasTags()) {
                    tags = qv.getTags().toByteArray();
                }
                consumer.accept(mutation, ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY).setRow(mutation.getRow()).setFamily(family).setQualifier(qualifier).setTimestamp(cellTimestampOrLatest(qv)).setType(KeyValue.Type.Put.getCode()).setValue(value).setTags(tags).build());
            }
        }
    }
    mutation.setDurability(toDurability(proto.getDurability()));
    for (NameBytesPair attribute : proto.getAttributeList()) {
        mutation.setAttribute(attribute.getName(), attribute.getValue().toByteArray());
    }
    return mutation;
}
Also used : Bytes(org.apache.hadoop.hbase.util.Bytes) NameBytesPair(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) QualifierValue(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) ColumnValue(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue) Cell(org.apache.hadoop.hbase.Cell) ByteBufferExtendedCell(org.apache.hadoop.hbase.ByteBufferExtendedCell)

Example 3 with ColumnValue

use of org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue in project hbase by apache.

the class TestAsyncTableTracing method setUp.

@Before
public void setUp() throws IOException {
    stub = mock(ClientService.Interface.class);
    AtomicInteger scanNextCalled = new AtomicInteger(0);
    doAnswer(new Answer<Void>() {

        @Override
        public Void answer(InvocationOnMock invocation) throws Throwable {
            ScanRequest req = invocation.getArgument(1);
            RpcCallback<ScanResponse> done = invocation.getArgument(2);
            if (!req.hasScannerId()) {
                done.run(ScanResponse.newBuilder().setScannerId(1).setTtl(800).setMoreResultsInRegion(true).setMoreResults(true).build());
            } else {
                if (req.hasCloseScanner() && req.getCloseScanner()) {
                    done.run(ScanResponse.getDefaultInstance());
                } else {
                    Cell cell = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY).setType(Type.Put).setRow(Bytes.toBytes(scanNextCalled.incrementAndGet())).setFamily(Bytes.toBytes("cf")).setQualifier(Bytes.toBytes("cq")).setValue(Bytes.toBytes("v")).build();
                    Result result = Result.create(Arrays.asList(cell));
                    ScanResponse.Builder builder = ScanResponse.newBuilder().setScannerId(1).setTtl(800).addResults(ProtobufUtil.toResult(result));
                    if (req.getLimitOfRows() == 1) {
                        builder.setMoreResultsInRegion(false).setMoreResults(false);
                    } else {
                        builder.setMoreResultsInRegion(true).setMoreResults(true);
                    }
                    ForkJoinPool.commonPool().execute(() -> done.run(builder.build()));
                }
            }
            return null;
        }
    }).when(stub).scan(any(HBaseRpcController.class), any(ScanRequest.class), any());
    doAnswer(new Answer<Void>() {

        @Override
        public Void answer(InvocationOnMock invocation) throws Throwable {
            ClientProtos.MultiRequest req = invocation.getArgument(1);
            ClientProtos.MultiResponse.Builder builder = ClientProtos.MultiResponse.newBuilder();
            for (ClientProtos.RegionAction regionAction : req.getRegionActionList()) {
                RegionActionResult.Builder raBuilder = RegionActionResult.newBuilder();
                for (ClientProtos.Action ignored : regionAction.getActionList()) {
                    raBuilder.addResultOrException(ResultOrException.newBuilder().setResult(ProtobufUtil.toResult(new Result())));
                }
                builder.addRegionActionResult(raBuilder);
            }
            ClientProtos.MultiResponse resp = builder.build();
            RpcCallback<ClientProtos.MultiResponse> done = invocation.getArgument(2);
            ForkJoinPool.commonPool().execute(() -> done.run(resp));
            return null;
        }
    }).when(stub).multi(any(HBaseRpcController.class), any(ClientProtos.MultiRequest.class), any());
    doAnswer(new Answer<Void>() {

        @Override
        public Void answer(InvocationOnMock invocation) throws Throwable {
            MutationProto req = ((MutateRequest) invocation.getArgument(1)).getMutation();
            MutateResponse resp;
            switch(req.getMutateType()) {
                case INCREMENT:
                    ColumnValue value = req.getColumnValue(0);
                    QualifierValue qvalue = value.getQualifierValue(0);
                    Cell cell = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY).setType(Type.Put).setRow(req.getRow().toByteArray()).setFamily(value.getFamily().toByteArray()).setQualifier(qvalue.getQualifier().toByteArray()).setValue(qvalue.getValue().toByteArray()).build();
                    resp = MutateResponse.newBuilder().setResult(ProtobufUtil.toResult(Result.create(Arrays.asList(cell)))).build();
                    break;
                default:
                    resp = MutateResponse.getDefaultInstance();
                    break;
            }
            RpcCallback<MutateResponse> done = invocation.getArgument(2);
            ForkJoinPool.commonPool().execute(() -> done.run(resp));
            return null;
        }
    }).when(stub).mutate(any(HBaseRpcController.class), any(MutateRequest.class), any());
    doAnswer(new Answer<Void>() {

        @Override
        public Void answer(InvocationOnMock invocation) throws Throwable {
            RpcCallback<GetResponse> done = invocation.getArgument(2);
            ForkJoinPool.commonPool().execute(() -> done.run(GetResponse.getDefaultInstance()));
            return null;
        }
    }).when(stub).get(any(HBaseRpcController.class), any(GetRequest.class), any());
    final User user = UserProvider.instantiate(CONF).getCurrent();
    conn = new AsyncConnectionImpl(CONF, new DoNothingConnectionRegistry(CONF), "test", null, user) {

        @Override
        AsyncRegionLocator getLocator() {
            AsyncRegionLocator locator = mock(AsyncRegionLocator.class);
            Answer<CompletableFuture<HRegionLocation>> answer = new Answer<CompletableFuture<HRegionLocation>>() {

                @Override
                public CompletableFuture<HRegionLocation> answer(InvocationOnMock invocation) throws Throwable {
                    TableName tableName = invocation.getArgument(0);
                    RegionInfo info = RegionInfoBuilder.newBuilder(tableName).build();
                    ServerName serverName = ServerName.valueOf("rs", 16010, 12345);
                    HRegionLocation loc = new HRegionLocation(info, serverName);
                    return CompletableFuture.completedFuture(loc);
                }
            };
            doAnswer(answer).when(locator).getRegionLocation(any(TableName.class), any(byte[].class), any(RegionLocateType.class), anyLong());
            doAnswer(answer).when(locator).getRegionLocation(any(TableName.class), any(byte[].class), anyInt(), any(RegionLocateType.class), anyLong());
            return locator;
        }

        @Override
        ClientService.Interface getRegionServerStub(ServerName serverName) throws IOException {
            return stub;
        }
    };
    table = conn.getTable(TableName.valueOf("table"), ForkJoinPool.commonPool());
}
Also used : User(org.apache.hadoop.hbase.security.User) MutationProto(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto) RegionActionResult(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult) MutateResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse) CompletableFuture(java.util.concurrent.CompletableFuture) HRegionLocation(org.apache.hadoop.hbase.HRegionLocation) GetRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest) RpcCallback(org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback) Cell(org.apache.hadoop.hbase.Cell) QualifierValue(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) MutateRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest) IOException(java.io.IOException) ScanRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest) Mockito.doAnswer(org.mockito.Mockito.doAnswer) Answer(org.mockito.stubbing.Answer) TableName(org.apache.hadoop.hbase.TableName) HBaseRpcController(org.apache.hadoop.hbase.ipc.HBaseRpcController) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) InvocationOnMock(org.mockito.invocation.InvocationOnMock) ServerName(org.apache.hadoop.hbase.ServerName) ColumnValue(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue) ClientProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos) Before(org.junit.Before)

Example 4 with ColumnValue

use of org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue in project hbase by apache.

the class ProtobufUtil method toPut.

/**
 * Convert a protocol buffer Mutate to a Put.
 *
 * @param proto The protocol buffer MutationProto to convert
 * @param cellScanner If non-null, the Cell data that goes with this proto.
 * @return A client Put.
 * @throws IOException
 */
public static Put toPut(final MutationProto proto, final CellScanner cellScanner) throws IOException {
    // TODO: Server-side at least why do we convert back to the Client types?  Why not just pb it?
    MutationType type = proto.getMutateType();
    assert type == MutationType.PUT : type.name();
    long timestamp = proto.hasTimestamp() ? proto.getTimestamp() : HConstants.LATEST_TIMESTAMP;
    Put put = proto.hasRow() ? new Put(proto.getRow().toByteArray(), timestamp) : null;
    int cellCount = proto.hasAssociatedCellCount() ? proto.getAssociatedCellCount() : 0;
    if (cellCount > 0) {
        // The proto has metadata only and the data is separate to be found in the cellScanner.
        if (cellScanner == null) {
            throw new DoNotRetryIOException("Cell count of " + cellCount + " but no cellScanner: " + toShortString(proto));
        }
        for (int i = 0; i < cellCount; i++) {
            if (!cellScanner.advance()) {
                throw new DoNotRetryIOException("Cell count of " + cellCount + " but at index " + i + " no cell returned: " + toShortString(proto));
            }
            Cell cell = cellScanner.current();
            if (put == null) {
                put = new Put(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), timestamp);
            }
            put.add(cell);
        }
    } else {
        if (put == null) {
            throw new IllegalArgumentException("row cannot be null");
        }
        // The proto has the metadata and the data itself
        ExtendedCellBuilder cellBuilder = ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
        for (ColumnValue column : proto.getColumnValueList()) {
            byte[] family = column.getFamily().toByteArray();
            for (QualifierValue qv : column.getQualifierValueList()) {
                if (!qv.hasValue()) {
                    throw new DoNotRetryIOException("Missing required field: qualifier value");
                }
                long ts = timestamp;
                if (qv.hasTimestamp()) {
                    ts = qv.getTimestamp();
                }
                byte[] allTagsBytes;
                if (qv.hasTags()) {
                    allTagsBytes = qv.getTags().toByteArray();
                    if (qv.hasDeleteType()) {
                        put.add(cellBuilder.clear().setRow(proto.getRow().toByteArray()).setFamily(family).setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null).setTimestamp(ts).setType(fromDeleteType(qv.getDeleteType()).getCode()).setTags(allTagsBytes).build());
                    } else {
                        put.add(cellBuilder.clear().setRow(put.getRow()).setFamily(family).setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null).setTimestamp(ts).setType(Cell.Type.Put).setValue(qv.hasValue() ? qv.getValue().toByteArray() : null).setTags(allTagsBytes).build());
                    }
                } else {
                    if (qv.hasDeleteType()) {
                        put.add(cellBuilder.clear().setRow(put.getRow()).setFamily(family).setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null).setTimestamp(ts).setType(fromDeleteType(qv.getDeleteType()).getCode()).build());
                    } else {
                        put.add(cellBuilder.clear().setRow(put.getRow()).setFamily(family).setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null).setTimestamp(ts).setType(Type.Put).setValue(qv.hasValue() ? qv.getValue().toByteArray() : null).build());
                    }
                }
            }
        }
    }
    put.setDurability(toDurability(proto.getDurability()));
    for (NameBytesPair attribute : proto.getAttributeList()) {
        put.setAttribute(attribute.getName(), attribute.getValue().toByteArray());
    }
    return put;
}
Also used : ExtendedCellBuilder(org.apache.hadoop.hbase.ExtendedCellBuilder) MutationType(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType) NameBytesPair(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) QualifierValue(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) ColumnValue(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue) Cell(org.apache.hadoop.hbase.Cell) ByteBufferExtendedCell(org.apache.hadoop.hbase.ByteBufferExtendedCell) Put(org.apache.hadoop.hbase.client.Put)

Example 5 with ColumnValue

use of org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue in project hbase by apache.

the class ProtobufUtil method toDelete.

/**
 * Convert a protocol buffer Mutate to a Delete
 *
 * @param proto the protocol buffer Mutate to convert
 * @param cellScanner if non-null, the data that goes with this delete.
 * @return the converted client Delete
 * @throws IOException
 */
public static Delete toDelete(final MutationProto proto, final CellScanner cellScanner) throws IOException {
    MutationType type = proto.getMutateType();
    assert type == MutationType.DELETE : type.name();
    long timestamp = proto.hasTimestamp() ? proto.getTimestamp() : HConstants.LATEST_TIMESTAMP;
    Delete delete = proto.hasRow() ? new Delete(proto.getRow().toByteArray(), timestamp) : null;
    int cellCount = proto.hasAssociatedCellCount() ? proto.getAssociatedCellCount() : 0;
    if (cellCount > 0) {
        // The proto has metadata only and the data is separate to be found in the cellScanner.
        if (cellScanner == null) {
            // TextFormat should be fine for a Delete since it carries no data, just coordinates.
            throw new DoNotRetryIOException("Cell count of " + cellCount + " but no cellScanner: " + TextFormat.shortDebugString(proto));
        }
        for (int i = 0; i < cellCount; i++) {
            if (!cellScanner.advance()) {
                // TextFormat should be fine for a Delete since it carries no data, just coordinates.
                throw new DoNotRetryIOException("Cell count of " + cellCount + " but at index " + i + " no cell returned: " + TextFormat.shortDebugString(proto));
            }
            Cell cell = cellScanner.current();
            if (delete == null) {
                delete = new Delete(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), timestamp);
            }
            delete.add(cell);
        }
    } else {
        if (delete == null) {
            throw new IllegalArgumentException("row cannot be null");
        }
        for (ColumnValue column : proto.getColumnValueList()) {
            byte[] family = column.getFamily().toByteArray();
            for (QualifierValue qv : column.getQualifierValueList()) {
                DeleteType deleteType = qv.getDeleteType();
                byte[] qualifier = null;
                if (qv.hasQualifier()) {
                    qualifier = qv.getQualifier().toByteArray();
                }
                long ts = cellTimestampOrLatest(qv);
                if (deleteType == DeleteType.DELETE_ONE_VERSION) {
                    delete.addColumn(family, qualifier, ts);
                } else if (deleteType == DeleteType.DELETE_MULTIPLE_VERSIONS) {
                    delete.addColumns(family, qualifier, ts);
                } else if (deleteType == DeleteType.DELETE_FAMILY_VERSION) {
                    delete.addFamilyVersion(family, ts);
                } else {
                    delete.addFamily(family, ts);
                }
            }
        }
    }
    delete.setDurability(toDurability(proto.getDurability()));
    for (NameBytesPair attribute : proto.getAttributeList()) {
        delete.setAttribute(attribute.getName(), attribute.getValue().toByteArray());
    }
    return delete;
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) MutationType(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType) NameBytesPair(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) QualifierValue(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) ColumnValue(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue) DeleteType(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType) Cell(org.apache.hadoop.hbase.Cell) ByteBufferExtendedCell(org.apache.hadoop.hbase.ByteBufferExtendedCell)

Aggregations

Cell (org.apache.hadoop.hbase.Cell)6 ColumnValue (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue)6 QualifierValue (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue)6 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)4 NameBytesPair (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair)4 ByteBufferExtendedCell (org.apache.hadoop.hbase.ByteBufferExtendedCell)3 MutationType (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType)3 IOException (java.io.IOException)2 CompletableFuture (java.util.concurrent.CompletableFuture)2 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)2 HRegionLocation (org.apache.hadoop.hbase.HRegionLocation)2 ServerName (org.apache.hadoop.hbase.ServerName)2 TableName (org.apache.hadoop.hbase.TableName)2 HBaseRpcController (org.apache.hadoop.hbase.ipc.HBaseRpcController)2 ClientProtos (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos)2 GetRequest (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest)2 MutateRequest (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest)2 MutateResponse (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse)2 MutationProto (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto)2 RegionActionResult (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult)2