use of org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto in project phoenix by apache.
the class ProtobufUtil method getMutations.
/**
* Each ByteString entry is a byte array serialized from MutationProto instance
* @param mutations
* @throws IOException
*/
private static List<Mutation> getMutations(List<ByteString> mutations) throws IOException {
List<Mutation> result = new ArrayList<Mutation>();
for (ByteString mutation : mutations) {
MutationProto mProto = MutationProto.parseFrom(mutation);
result.add(org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(mProto));
}
return result;
}
use of org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto in project phoenix by apache.
the class IndexedKeyValue method readFields.
/**
* This method shouldn't be used - you should use {@link KeyValueCodec#readKeyValue(DataInput)} instead. Its the
* complement to {@link #writeData(DataOutput)}.
*/
@SuppressWarnings("javadoc")
public void readFields(DataInput in) throws IOException {
this.indexTableName = new ImmutableBytesPtr(Bytes.readByteArray(in));
byte[] mutationData = Bytes.readByteArray(in);
MutationProto mProto = MutationProto.parseFrom(mutationData);
this.mutation = org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(mProto);
this.hashCode = calcHashCode(indexTableName, mutation);
}
use of org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto in project phoenix by apache.
the class IndexUtil method updateIndexState.
public static MetaDataMutationResult updateIndexState(byte[] indexTableKey, long minTimeStamp, HTableInterface metaTable, PIndexState newState) throws Throwable {
// Mimic the Put that gets generated by the client on an update of the index state
Put put = new Put(indexTableKey);
put.add(PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES, PhoenixDatabaseMetaData.INDEX_STATE_BYTES, newState.getSerializedBytes());
put.add(PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES, PhoenixDatabaseMetaData.INDEX_DISABLE_TIMESTAMP_BYTES, PLong.INSTANCE.toBytes(minTimeStamp));
put.add(PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES, PhoenixDatabaseMetaData.ASYNC_REBUILD_TIMESTAMP_BYTES, PLong.INSTANCE.toBytes(0));
final List<Mutation> tableMetadata = Collections.<Mutation>singletonList(put);
final Map<byte[], MetaDataResponse> results = metaTable.coprocessorService(MetaDataService.class, indexTableKey, indexTableKey, new Batch.Call<MetaDataService, MetaDataResponse>() {
@Override
public MetaDataResponse call(MetaDataService instance) throws IOException {
ServerRpcController controller = new ServerRpcController();
BlockingRpcCallback<MetaDataResponse> rpcCallback = new BlockingRpcCallback<MetaDataResponse>();
UpdateIndexStateRequest.Builder builder = UpdateIndexStateRequest.newBuilder();
for (Mutation m : tableMetadata) {
MutationProto mp = ProtobufUtil.toProto(m);
builder.addTableMetadataMutations(mp.toByteString());
}
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
instance.updateIndexState(controller, builder.build(), rpcCallback);
if (controller.getFailedOn() != null) {
throw controller.getFailedOn();
}
return rpcCallback.get();
}
});
if (results.isEmpty()) {
throw new IOException("Didn't get expected result size");
}
MetaDataResponse tmpResponse = results.values().iterator().next();
return MetaDataMutationResult.constructFromProto(tmpResponse);
}
use of org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto in project beam by apache.
the class HBaseMutationCoder method encode.
@Override
public void encode(Mutation mutation, OutputStream outStream) throws IOException {
MutationType type = getType(mutation);
MutationProto proto = ProtobufUtil.toMutation(type, mutation);
proto.writeDelimitedTo(outStream);
}
use of org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto in project hbase by apache.
the class TestProtobufUtil method testAppend.
/**
* Test Append Mutate conversions.
*
* @throws IOException
*/
@Test
public void testAppend() throws IOException {
long timeStamp = 111111;
MutationProto.Builder mutateBuilder = MutationProto.newBuilder();
mutateBuilder.setRow(ByteString.copyFromUtf8("row"));
mutateBuilder.setMutateType(MutationType.APPEND);
mutateBuilder.setTimestamp(timeStamp);
ColumnValue.Builder valueBuilder = ColumnValue.newBuilder();
valueBuilder.setFamily(ByteString.copyFromUtf8("f1"));
QualifierValue.Builder qualifierBuilder = QualifierValue.newBuilder();
qualifierBuilder.setQualifier(ByteString.copyFromUtf8("c1"));
qualifierBuilder.setValue(ByteString.copyFromUtf8("v1"));
qualifierBuilder.setTimestamp(timeStamp);
valueBuilder.addQualifierValue(qualifierBuilder.build());
qualifierBuilder.setQualifier(ByteString.copyFromUtf8("c2"));
qualifierBuilder.setValue(ByteString.copyFromUtf8("v2"));
valueBuilder.addQualifierValue(qualifierBuilder.build());
qualifierBuilder.setTimestamp(timeStamp);
mutateBuilder.addColumnValue(valueBuilder.build());
MutationProto proto = mutateBuilder.build();
// default fields
assertEquals(MutationProto.Durability.USE_DEFAULT, proto.getDurability());
// set the default value for equal comparison
mutateBuilder = MutationProto.newBuilder(proto);
mutateBuilder.setDurability(MutationProto.Durability.USE_DEFAULT);
Append append = ProtobufUtil.toAppend(proto, null);
// append always use the latest timestamp,
// reset the timestamp to the original mutate
mutateBuilder.setTimestamp(append.getTimeStamp());
assertEquals(mutateBuilder.build(), ProtobufUtil.toMutation(MutationType.APPEND, append));
}
Aggregations