Search in sources :

Example 11 with MutationProto

use of org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto in project phoenix by apache.

the class ProtobufUtil method getMutations.

/**
 * Each ByteString entry is a byte array serialized from MutationProto instance
 * @param mutations
 * @throws IOException
 */
private static List<Mutation> getMutations(List<ByteString> mutations) throws IOException {
    List<Mutation> result = new ArrayList<Mutation>();
    for (ByteString mutation : mutations) {
        MutationProto mProto = MutationProto.parseFrom(mutation);
        result.add(org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(mProto));
    }
    return result;
}
Also used : ByteString(com.google.protobuf.ByteString) ArrayList(java.util.ArrayList) Mutation(org.apache.hadoop.hbase.client.Mutation) MutationProto(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto)

Example 12 with MutationProto

use of org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto in project phoenix by apache.

the class IndexedKeyValue method readFields.

/**
 * This method shouldn't be used - you should use {@link KeyValueCodec#readKeyValue(DataInput)} instead. Its the
 * complement to {@link #writeData(DataOutput)}.
 */
@SuppressWarnings("javadoc")
public void readFields(DataInput in) throws IOException {
    this.indexTableName = new ImmutableBytesPtr(Bytes.readByteArray(in));
    byte[] mutationData = Bytes.readByteArray(in);
    MutationProto mProto = MutationProto.parseFrom(mutationData);
    this.mutation = org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(mProto);
    this.hashCode = calcHashCode(indexTableName, mutation);
}
Also used : ImmutableBytesPtr(org.apache.phoenix.hbase.index.util.ImmutableBytesPtr) MutationProto(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto)

Example 13 with MutationProto

use of org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto in project phoenix by apache.

the class IndexUtil method updateIndexState.

public static MetaDataMutationResult updateIndexState(byte[] indexTableKey, long minTimeStamp, HTableInterface metaTable, PIndexState newState) throws Throwable {
    // Mimic the Put that gets generated by the client on an update of the index state
    Put put = new Put(indexTableKey);
    put.add(PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES, PhoenixDatabaseMetaData.INDEX_STATE_BYTES, newState.getSerializedBytes());
    put.add(PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES, PhoenixDatabaseMetaData.INDEX_DISABLE_TIMESTAMP_BYTES, PLong.INSTANCE.toBytes(minTimeStamp));
    put.add(PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES, PhoenixDatabaseMetaData.ASYNC_REBUILD_TIMESTAMP_BYTES, PLong.INSTANCE.toBytes(0));
    final List<Mutation> tableMetadata = Collections.<Mutation>singletonList(put);
    final Map<byte[], MetaDataResponse> results = metaTable.coprocessorService(MetaDataService.class, indexTableKey, indexTableKey, new Batch.Call<MetaDataService, MetaDataResponse>() {

        @Override
        public MetaDataResponse call(MetaDataService instance) throws IOException {
            ServerRpcController controller = new ServerRpcController();
            BlockingRpcCallback<MetaDataResponse> rpcCallback = new BlockingRpcCallback<MetaDataResponse>();
            UpdateIndexStateRequest.Builder builder = UpdateIndexStateRequest.newBuilder();
            for (Mutation m : tableMetadata) {
                MutationProto mp = ProtobufUtil.toProto(m);
                builder.addTableMetadataMutations(mp.toByteString());
            }
            builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
            instance.updateIndexState(controller, builder.build(), rpcCallback);
            if (controller.getFailedOn() != null) {
                throw controller.getFailedOn();
            }
            return rpcCallback.get();
        }
    });
    if (results.isEmpty()) {
        throw new IOException("Didn't get expected result size");
    }
    MetaDataResponse tmpResponse = results.values().iterator().next();
    return MetaDataMutationResult.constructFromProto(tmpResponse);
}
Also used : MetaDataResponse(org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse) KeyValueBuilder(org.apache.phoenix.hbase.index.util.KeyValueBuilder) IOException(java.io.IOException) ServerRpcController(org.apache.hadoop.hbase.ipc.ServerRpcController) Put(org.apache.hadoop.hbase.client.Put) MutationProto(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto) MetaDataService(org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataService) Batch(org.apache.hadoop.hbase.client.coprocessor.Batch) BlockingRpcCallback(org.apache.hadoop.hbase.ipc.BlockingRpcCallback) Mutation(org.apache.hadoop.hbase.client.Mutation)

Example 14 with MutationProto

use of org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto in project beam by apache.

the class HBaseMutationCoder method encode.

@Override
public void encode(Mutation mutation, OutputStream outStream) throws IOException {
    MutationType type = getType(mutation);
    MutationProto proto = ProtobufUtil.toMutation(type, mutation);
    proto.writeDelimitedTo(outStream);
}
Also used : MutationType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType) MutationProto(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto)

Example 15 with MutationProto

use of org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto in project hbase by apache.

the class TestProtobufUtil method testAppend.

/**
   * Test Append Mutate conversions.
   *
   * @throws IOException
   */
@Test
public void testAppend() throws IOException {
    long timeStamp = 111111;
    MutationProto.Builder mutateBuilder = MutationProto.newBuilder();
    mutateBuilder.setRow(ByteString.copyFromUtf8("row"));
    mutateBuilder.setMutateType(MutationType.APPEND);
    mutateBuilder.setTimestamp(timeStamp);
    ColumnValue.Builder valueBuilder = ColumnValue.newBuilder();
    valueBuilder.setFamily(ByteString.copyFromUtf8("f1"));
    QualifierValue.Builder qualifierBuilder = QualifierValue.newBuilder();
    qualifierBuilder.setQualifier(ByteString.copyFromUtf8("c1"));
    qualifierBuilder.setValue(ByteString.copyFromUtf8("v1"));
    qualifierBuilder.setTimestamp(timeStamp);
    valueBuilder.addQualifierValue(qualifierBuilder.build());
    qualifierBuilder.setQualifier(ByteString.copyFromUtf8("c2"));
    qualifierBuilder.setValue(ByteString.copyFromUtf8("v2"));
    valueBuilder.addQualifierValue(qualifierBuilder.build());
    qualifierBuilder.setTimestamp(timeStamp);
    mutateBuilder.addColumnValue(valueBuilder.build());
    MutationProto proto = mutateBuilder.build();
    // default fields
    assertEquals(MutationProto.Durability.USE_DEFAULT, proto.getDurability());
    // set the default value for equal comparison
    mutateBuilder = MutationProto.newBuilder(proto);
    mutateBuilder.setDurability(MutationProto.Durability.USE_DEFAULT);
    Append append = ProtobufUtil.toAppend(proto, null);
    // append always use the latest timestamp,
    // reset the timestamp to the original mutate
    mutateBuilder.setTimestamp(append.getTimeStamp());
    assertEquals(mutateBuilder.build(), ProtobufUtil.toMutation(MutationType.APPEND, append));
}
Also used : Append(org.apache.hadoop.hbase.client.Append) QualifierValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) ColumnValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue) MutationProto(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto) Test(org.junit.Test)

Aggregations

MutationProto (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto)20 Mutation (org.apache.hadoop.hbase.client.Mutation)12 BlockingRpcCallback (org.apache.hadoop.hbase.ipc.BlockingRpcCallback)11 ServerRpcController (org.apache.hadoop.hbase.ipc.ServerRpcController)11 MetaDataResponse (org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse)11 MetaDataService (org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataService)11 IOException (java.io.IOException)10 Batch (org.apache.hadoop.hbase.client.coprocessor.Batch)10 KeyValueBuilder (org.apache.phoenix.hbase.index.util.KeyValueBuilder)10 MetaDataMutationResult (org.apache.phoenix.coprocessor.MetaDataProtocol.MetaDataMutationResult)8 PhoenixIOException (org.apache.phoenix.exception.PhoenixIOException)8 NonTxIndexBuilder (org.apache.phoenix.hbase.index.covered.NonTxIndexBuilder)8 PhoenixIndexBuilder (org.apache.phoenix.index.PhoenixIndexBuilder)8 Test (org.junit.Test)5 ColumnValue (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue)4 QualifierValue (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue)4 ArrayList (java.util.ArrayList)3 Put (org.apache.hadoop.hbase.client.Put)3 MutationCode (org.apache.phoenix.coprocessor.MetaDataProtocol.MutationCode)3 ReadOnlyProps (org.apache.phoenix.util.ReadOnlyProps)3