use of org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto in project hbase by apache.
the class RequestConverter method buildMutateRequest.
/**
* Create a protocol buffer MutateRequest for conditioned row mutations
*
* @param regionName
* @param row
* @param family
* @param qualifier
* @param comparator
* @param compareType
* @param rowMutations
* @return a mutate request
* @throws IOException
*/
public static ClientProtos.MultiRequest buildMutateRequest(final byte[] regionName, final byte[] row, final byte[] family, final byte[] qualifier, final ByteArrayComparable comparator, final CompareType compareType, final RowMutations rowMutations) throws IOException {
RegionAction.Builder builder = getRegionActionBuilderWithRegion(RegionAction.newBuilder(), regionName);
builder.setAtomic(true);
ClientProtos.Action.Builder actionBuilder = ClientProtos.Action.newBuilder();
MutationProto.Builder mutationBuilder = MutationProto.newBuilder();
Condition condition = buildCondition(row, family, qualifier, comparator, compareType);
for (Mutation mutation : rowMutations.getMutations()) {
MutationType mutateType = null;
if (mutation instanceof Put) {
mutateType = MutationType.PUT;
} else if (mutation instanceof Delete) {
mutateType = MutationType.DELETE;
} else {
throw new DoNotRetryIOException("RowMutations supports only put and delete, not " + mutation.getClass().getName());
}
mutationBuilder.clear();
MutationProto mp = ProtobufUtil.toMutation(mutateType, mutation, mutationBuilder);
actionBuilder.clear();
actionBuilder.setMutation(mp);
builder.addAction(actionBuilder.build());
}
ClientProtos.MultiRequest request = ClientProtos.MultiRequest.newBuilder().addRegionAction(builder.build()).setCondition(condition).build();
return request;
}
use of org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto in project hbase by apache.
the class RSRpcServices method isReplicationRequest.
private boolean isReplicationRequest(Action action) {
// replication request can only be put or delete.
if (!action.hasMutation()) {
return false;
}
MutationProto mutation = action.getMutation();
MutationType type = mutation.getMutateType();
if (type != MutationType.PUT && type != MutationType.DELETE) {
return false;
}
// is for replication.
return mutation.getAttributeList().stream().map(p -> p.getName()).filter(n -> n.equals(ReplicationUtils.REPLICATION_ATTR_NAME)).findAny().isPresent();
}
use of org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto in project hbase by apache.
the class TestProtobufUtil method testAppendNoTimestamp.
/**
* Older clients may not send along a timestamp in the MutationProto. Check that we
* default correctly.
*/
@Test
public void testAppendNoTimestamp() throws IOException {
MutationProto mutation = getAppendMutation(null);
Append append = ProtobufUtil.toAppend(mutation, null);
assertEquals(HConstants.LATEST_TIMESTAMP, append.getTimestamp());
append.getFamilyCellMap().values().forEach(cells -> cells.forEach(cell -> assertEquals(HConstants.LATEST_TIMESTAMP, cell.getTimestamp())));
}
use of org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto in project hbase by apache.
the class TestProtobufUtil method testPut.
/**
* Test Put Mutate conversions.
*
* @throws IOException if the conversion to a {@link Put} or a
* {@link org.apache.hadoop.hbase.client.Mutation} fails
*/
@Test
public void testPut() throws IOException {
MutationProto.Builder mutateBuilder = MutationProto.newBuilder();
mutateBuilder.setRow(ByteString.copyFromUtf8("row"));
mutateBuilder.setMutateType(MutationType.PUT);
mutateBuilder.setTimestamp(111111);
ColumnValue.Builder valueBuilder = ColumnValue.newBuilder();
valueBuilder.setFamily(ByteString.copyFromUtf8("f1"));
QualifierValue.Builder qualifierBuilder = QualifierValue.newBuilder();
qualifierBuilder.setQualifier(ByteString.copyFromUtf8("c1"));
qualifierBuilder.setValue(ByteString.copyFromUtf8("v1"));
valueBuilder.addQualifierValue(qualifierBuilder.build());
qualifierBuilder.setQualifier(ByteString.copyFromUtf8("c2"));
qualifierBuilder.setValue(ByteString.copyFromUtf8("v2"));
qualifierBuilder.setTimestamp(222222);
valueBuilder.addQualifierValue(qualifierBuilder.build());
mutateBuilder.addColumnValue(valueBuilder.build());
MutationProto proto = mutateBuilder.build();
// default fields
assertEquals(MutationProto.Durability.USE_DEFAULT, proto.getDurability());
// set the default value for equal comparison
mutateBuilder = MutationProto.newBuilder(proto);
mutateBuilder.setDurability(MutationProto.Durability.USE_DEFAULT);
Put put = ProtobufUtil.toPut(proto);
// put value always use the default timestamp if no
// value level timestamp specified,
// add the timestamp to the original mutate
long timestamp = put.getTimestamp();
for (ColumnValue.Builder column : mutateBuilder.getColumnValueBuilderList()) {
for (QualifierValue.Builder qualifier : column.getQualifierValueBuilderList()) {
if (!qualifier.hasTimestamp()) {
qualifier.setTimestamp(timestamp);
}
}
}
assertEquals(mutateBuilder.build(), ProtobufUtil.toMutation(MutationType.PUT, put));
}
use of org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto in project hbase by apache.
the class TestProtobufUtil method testIncrementNoTimestamp.
/**
* Older clients may not send along a timestamp in the MutationProto. Check that we
* default correctly.
*/
@Test
public void testIncrementNoTimestamp() throws IOException {
MutationProto mutation = getIncrementMutation(null);
Increment increment = ProtobufUtil.toIncrement(mutation, null);
assertEquals(HConstants.LATEST_TIMESTAMP, increment.getTimestamp());
increment.getFamilyCellMap().values().forEach(cells -> cells.forEach(cell -> assertEquals(HConstants.LATEST_TIMESTAMP, cell.getTimestamp())));
}
Aggregations