use of org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto in project hbase by apache.
the class TestFromClientSide5 method testMultiRowMutationWithFilterConditionWhenConditionMatches.
@Test
public void testMultiRowMutationWithFilterConditionWhenConditionMatches() throws Exception {
final TableName tableName = name.getTableName();
final byte[] ROW1 = Bytes.toBytes("testRow1");
final byte[] ROW2 = Bytes.toBytes("testRow2");
final byte[] QUALIFIER2 = Bytes.toBytes("testQualifier2");
final byte[] VALUE1 = Bytes.toBytes("testValue1");
final byte[] VALUE2 = Bytes.toBytes("testValue2");
final byte[] VALUE3 = Bytes.toBytes("testValue3");
try (Table t = TEST_UTIL.createTable(tableName, FAMILY)) {
// Add initial data
t.put(new Put(ROW2).addColumn(FAMILY, QUALIFIER, VALUE2).addColumn(FAMILY, QUALIFIER2, VALUE3));
// Execute MultiRowMutation with conditions
Put put1 = new Put(ROW).addColumn(FAMILY, QUALIFIER, VALUE);
MutationProto m1 = ProtobufUtil.toMutation(MutationType.PUT, put1);
Put put2 = new Put(ROW1).addColumn(FAMILY, QUALIFIER, VALUE1);
MutationProto m2 = ProtobufUtil.toMutation(MutationType.PUT, put2);
Delete delete = new Delete(ROW2);
MutationProto m3 = ProtobufUtil.toMutation(MutationType.DELETE, delete);
MutateRowsRequest.Builder mrmBuilder = MutateRowsRequest.newBuilder();
mrmBuilder.addMutationRequest(m1);
mrmBuilder.addMutationRequest(m2);
mrmBuilder.addMutationRequest(m3);
mrmBuilder.addCondition(ProtobufUtil.toCondition(ROW2, new FilterList(new SingleColumnValueFilter(FAMILY, QUALIFIER, CompareOperator.EQUAL, VALUE2), new SingleColumnValueFilter(FAMILY, QUALIFIER2, CompareOperator.EQUAL, VALUE3)), null));
CoprocessorRpcChannel channel = t.coprocessorService(ROW);
MultiRowMutationService.BlockingInterface service = MultiRowMutationService.newBlockingStub(channel);
MutateRowsResponse response = service.mutateRows(null, mrmBuilder.build());
// Assert
assertTrue(response.getProcessed());
Result r = t.get(new Get(ROW));
assertEquals(Bytes.toString(VALUE), Bytes.toString(r.getValue(FAMILY, QUALIFIER)));
r = t.get(new Get(ROW1));
assertEquals(Bytes.toString(VALUE1), Bytes.toString(r.getValue(FAMILY, QUALIFIER)));
r = t.get(new Get(ROW2));
assertTrue(r.isEmpty());
}
}
use of org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto in project hbase by apache.
the class TestFromClientSide5 method testMultiRowMutationWithMultipleConditionsWhenConditionsNotMatch.
@Test
public void testMultiRowMutationWithMultipleConditionsWhenConditionsNotMatch() throws Exception {
final TableName tableName = name.getTableName();
final byte[] ROW1 = Bytes.toBytes("testRow1");
final byte[] ROW2 = Bytes.toBytes("testRow2");
final byte[] VALUE1 = Bytes.toBytes("testValue1");
final byte[] VALUE2 = Bytes.toBytes("testValue2");
try (Table t = TEST_UTIL.createTable(tableName, FAMILY)) {
// Add initial data
t.put(new Put(ROW2).addColumn(FAMILY, QUALIFIER, VALUE2));
// Execute MultiRowMutation with conditions
Put put1 = new Put(ROW).addColumn(FAMILY, QUALIFIER, VALUE);
MutationProto m1 = ProtobufUtil.toMutation(MutationType.PUT, put1);
Put put2 = new Put(ROW1).addColumn(FAMILY, QUALIFIER, VALUE1);
MutationProto m2 = ProtobufUtil.toMutation(MutationType.PUT, put2);
Delete delete = new Delete(ROW2);
MutationProto m3 = ProtobufUtil.toMutation(MutationType.DELETE, delete);
MutateRowsRequest.Builder mrmBuilder = MutateRowsRequest.newBuilder();
mrmBuilder.addMutationRequest(m1);
mrmBuilder.addMutationRequest(m2);
mrmBuilder.addMutationRequest(m3);
mrmBuilder.addCondition(ProtobufUtil.toCondition(ROW1, FAMILY, QUALIFIER, CompareOperator.EQUAL, null, null));
mrmBuilder.addCondition(ProtobufUtil.toCondition(ROW2, FAMILY, QUALIFIER, CompareOperator.EQUAL, VALUE1, null));
CoprocessorRpcChannel channel = t.coprocessorService(ROW);
MultiRowMutationService.BlockingInterface service = MultiRowMutationService.newBlockingStub(channel);
MutateRowsResponse response = service.mutateRows(null, mrmBuilder.build());
// Assert
assertFalse(response.getProcessed());
Result r = t.get(new Get(ROW));
assertTrue(r.isEmpty());
r = t.get(new Get(ROW1));
assertTrue(r.isEmpty());
r = t.get(new Get(ROW2));
assertEquals(Bytes.toString(VALUE2), Bytes.toString(r.getValue(FAMILY, QUALIFIER)));
}
}
use of org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto in project hbase by apache.
the class RequestConverter method buildMultiRequest.
private static ClientProtos.MultiRequest buildMultiRequest(final byte[] regionName, final RowMutations rowMutations, final Condition condition, long nonceGroup, long nonce) throws IOException {
RegionAction.Builder builder = getRegionActionBuilderWithRegion(RegionAction.newBuilder(), regionName);
builder.setAtomic(true);
boolean hasNonce = false;
ClientProtos.Action.Builder actionBuilder = ClientProtos.Action.newBuilder();
MutationProto.Builder mutationBuilder = MutationProto.newBuilder();
for (Mutation mutation : rowMutations.getMutations()) {
mutationBuilder.clear();
MutationProto mp;
if (mutation instanceof Increment || mutation instanceof Append) {
mp = ProtobufUtil.toMutation(getMutationType(mutation), mutation, mutationBuilder, nonce);
hasNonce = true;
} else {
mp = ProtobufUtil.toMutation(getMutationType(mutation), mutation, mutationBuilder);
}
actionBuilder.clear();
actionBuilder.setMutation(mp);
builder.addAction(actionBuilder.build());
}
if (condition != null) {
builder.setCondition(condition);
}
MultiRequest.Builder multiRequestBuilder = MultiRequest.newBuilder();
if (hasNonce) {
multiRequestBuilder.setNonceGroup(nonceGroup);
}
return multiRequestBuilder.addRegionAction(builder.build()).build();
}
use of org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto in project hbase by apache.
the class TestAsyncTableRpcPriority method setUp.
@Before
public void setUp() throws IOException {
stub = mock(ClientService.Interface.class);
AtomicInteger scanNextCalled = new AtomicInteger(0);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
ScanRequest req = invocation.getArgument(1);
RpcCallback<ScanResponse> done = invocation.getArgument(2);
if (!req.hasScannerId()) {
done.run(ScanResponse.newBuilder().setScannerId(1).setTtl(800).setMoreResultsInRegion(true).setMoreResults(true).build());
} else {
if (req.hasCloseScanner() && req.getCloseScanner()) {
done.run(ScanResponse.getDefaultInstance());
} else {
Cell cell = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY).setType(Type.Put).setRow(Bytes.toBytes(scanNextCalled.incrementAndGet())).setFamily(Bytes.toBytes("cf")).setQualifier(Bytes.toBytes("cq")).setValue(Bytes.toBytes("v")).build();
Result result = Result.create(Arrays.asList(cell));
done.run(ScanResponse.newBuilder().setScannerId(1).setTtl(800).setMoreResultsInRegion(true).setMoreResults(true).addResults(ProtobufUtil.toResult(result)).build());
}
}
return null;
}
}).when(stub).scan(any(HBaseRpcController.class), any(ScanRequest.class), any());
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
ClientProtos.MultiResponse resp = ClientProtos.MultiResponse.newBuilder().addRegionActionResult(RegionActionResult.newBuilder().addResultOrException(ResultOrException.newBuilder().setResult(ProtobufUtil.toResult(new Result())))).build();
RpcCallback<ClientProtos.MultiResponse> done = invocation.getArgument(2);
done.run(resp);
return null;
}
}).when(stub).multi(any(HBaseRpcController.class), any(ClientProtos.MultiRequest.class), any());
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
MutationProto req = ((MutateRequest) invocation.getArgument(1)).getMutation();
MutateResponse resp;
switch(req.getMutateType()) {
case INCREMENT:
ColumnValue value = req.getColumnValue(0);
QualifierValue qvalue = value.getQualifierValue(0);
Cell cell = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY).setType(Type.Put).setRow(req.getRow().toByteArray()).setFamily(value.getFamily().toByteArray()).setQualifier(qvalue.getQualifier().toByteArray()).setValue(qvalue.getValue().toByteArray()).build();
resp = MutateResponse.newBuilder().setResult(ProtobufUtil.toResult(Result.create(Arrays.asList(cell)))).build();
break;
default:
resp = MutateResponse.getDefaultInstance();
break;
}
RpcCallback<MutateResponse> done = invocation.getArgument(2);
done.run(resp);
return null;
}
}).when(stub).mutate(any(HBaseRpcController.class), any(MutateRequest.class), any());
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
RpcCallback<GetResponse> done = invocation.getArgument(2);
done.run(GetResponse.getDefaultInstance());
return null;
}
}).when(stub).get(any(HBaseRpcController.class), any(GetRequest.class), any());
conn = new AsyncConnectionImpl(CONF, new DoNothingConnectionRegistry(CONF), "test", null, UserProvider.instantiate(CONF).getCurrent()) {
@Override
AsyncRegionLocator getLocator() {
AsyncRegionLocator locator = mock(AsyncRegionLocator.class);
Answer<CompletableFuture<HRegionLocation>> answer = new Answer<CompletableFuture<HRegionLocation>>() {
@Override
public CompletableFuture<HRegionLocation> answer(InvocationOnMock invocation) throws Throwable {
TableName tableName = invocation.getArgument(0);
RegionInfo info = RegionInfoBuilder.newBuilder(tableName).build();
ServerName serverName = ServerName.valueOf("rs", 16010, 12345);
HRegionLocation loc = new HRegionLocation(info, serverName);
return CompletableFuture.completedFuture(loc);
}
};
doAnswer(answer).when(locator).getRegionLocation(any(TableName.class), any(byte[].class), any(RegionLocateType.class), anyLong());
doAnswer(answer).when(locator).getRegionLocation(any(TableName.class), any(byte[].class), anyInt(), any(RegionLocateType.class), anyLong());
return locator;
}
@Override
ClientService.Interface getRegionServerStub(ServerName serverName) throws IOException {
return stub;
}
};
}
use of org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto in project hbase by apache.
the class TestProtobufUtil method testIncrement.
/**
* Test Increment Mutate conversions.
*
* @throws IOException if converting to an {@link Increment} or
* {@link org.apache.hadoop.hbase.client.Mutation} fails
*/
@Test
public void testIncrement() throws IOException {
MutationProto proto = getIncrementMutation(111111L);
// default fields
assertEquals(MutationProto.Durability.USE_DEFAULT, proto.getDurability());
// set the default value for equal comparison
MutationProto.Builder mutateBuilder = MutationProto.newBuilder(proto);
mutateBuilder.setDurability(MutationProto.Durability.USE_DEFAULT);
Increment increment = ProtobufUtil.toIncrement(proto, null);
mutateBuilder.setTimestamp(increment.getTimestamp());
mutateBuilder.setTimeRange(ProtobufUtil.toTimeRange(increment.getTimeRange()));
assertEquals(mutateBuilder.build(), ProtobufUtil.toMutation(MutationType.INCREMENT, increment));
}
Aggregations