Search in sources :

Example 6 with CheckAndMutate

use of org.apache.hadoop.hbase.client.CheckAndMutate in project hbase by apache.

the class RSRpcServices method checkAndMutate.

private CheckAndMutateResult checkAndMutate(HRegion region, List<ClientProtos.Action> actions, CellScanner cellScanner, Condition condition, long nonceGroup, ActivePolicyEnforcement spaceQuotaEnforcement) throws IOException {
    int countOfCompleteMutation = 0;
    try {
        if (!region.getRegionInfo().isMetaRegion()) {
            server.getMemStoreFlusher().reclaimMemStoreMemory();
        }
        List<Mutation> mutations = new ArrayList<>();
        long nonce = HConstants.NO_NONCE;
        for (ClientProtos.Action action : actions) {
            if (action.hasGet()) {
                throw new DoNotRetryIOException("Atomic put and/or delete only, not a Get=" + action.getGet());
            }
            MutationProto mutation = action.getMutation();
            MutationType type = mutation.getMutateType();
            switch(type) {
                case PUT:
                    Put put = ProtobufUtil.toPut(mutation, cellScanner);
                    ++countOfCompleteMutation;
                    checkCellSizeLimit(region, put);
                    spaceQuotaEnforcement.getPolicyEnforcement(region).check(put);
                    mutations.add(put);
                    break;
                case DELETE:
                    Delete del = ProtobufUtil.toDelete(mutation, cellScanner);
                    ++countOfCompleteMutation;
                    spaceQuotaEnforcement.getPolicyEnforcement(region).check(del);
                    mutations.add(del);
                    break;
                case INCREMENT:
                    Increment increment = ProtobufUtil.toIncrement(mutation, cellScanner);
                    nonce = mutation.hasNonce() ? mutation.getNonce() : HConstants.NO_NONCE;
                    ++countOfCompleteMutation;
                    checkCellSizeLimit(region, increment);
                    spaceQuotaEnforcement.getPolicyEnforcement(region).check(increment);
                    mutations.add(increment);
                    break;
                case APPEND:
                    Append append = ProtobufUtil.toAppend(mutation, cellScanner);
                    nonce = mutation.hasNonce() ? mutation.getNonce() : HConstants.NO_NONCE;
                    ++countOfCompleteMutation;
                    checkCellSizeLimit(region, append);
                    spaceQuotaEnforcement.getPolicyEnforcement(region).check(append);
                    mutations.add(append);
                    break;
                default:
                    throw new DoNotRetryIOException("invalid mutation type : " + type);
            }
        }
        if (mutations.size() == 0) {
            return new CheckAndMutateResult(true, null);
        } else {
            CheckAndMutate checkAndMutate = ProtobufUtil.toCheckAndMutate(condition, mutations);
            CheckAndMutateResult result = null;
            if (region.getCoprocessorHost() != null) {
                result = region.getCoprocessorHost().preCheckAndMutate(checkAndMutate);
            }
            if (result == null) {
                result = region.checkAndMutate(checkAndMutate, nonceGroup, nonce);
                if (region.getCoprocessorHost() != null) {
                    result = region.getCoprocessorHost().postCheckAndMutate(checkAndMutate, result);
                }
            }
            return result;
        }
    } finally {
        // even if the malformed cells are not skipped.
        for (int i = countOfCompleteMutation; i < actions.size(); ++i) {
            skipCellsForMutation(actions.get(i), cellScanner);
        }
    }
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) MutationType(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) CheckAndMutateResult(org.apache.hadoop.hbase.client.CheckAndMutateResult) ArrayList(java.util.ArrayList) Action(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action) CheckAndMutate(org.apache.hadoop.hbase.client.CheckAndMutate) MutationProto(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto) Put(org.apache.hadoop.hbase.client.Put) Append(org.apache.hadoop.hbase.client.Append) Increment(org.apache.hadoop.hbase.client.Increment) Mutation(org.apache.hadoop.hbase.client.Mutation) ClientProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos)

Example 7 with CheckAndMutate

use of org.apache.hadoop.hbase.client.CheckAndMutate in project hbase by apache.

the class ProtobufUtil method toCheckAndMutate.

public static CheckAndMutate toCheckAndMutate(ClientProtos.Condition condition, MutationProto mutation, CellScanner cellScanner) throws IOException {
    byte[] row = condition.getRow().toByteArray();
    CheckAndMutate.Builder builder = CheckAndMutate.newBuilder(row);
    Filter filter = condition.hasFilter() ? ProtobufUtil.toFilter(condition.getFilter()) : null;
    if (filter != null) {
        builder.ifMatches(filter);
    } else {
        builder.ifMatches(condition.getFamily().toByteArray(), condition.getQualifier().toByteArray(), CompareOperator.valueOf(condition.getCompareType().name()), ProtobufUtil.toComparator(condition.getComparator()).getValue());
    }
    TimeRange timeRange = condition.hasTimeRange() ? ProtobufUtil.toTimeRange(condition.getTimeRange()) : TimeRange.allTime();
    builder.timeRange(timeRange);
    try {
        MutationType type = mutation.getMutateType();
        switch(type) {
            case PUT:
                return builder.build(ProtobufUtil.toPut(mutation, cellScanner));
            case DELETE:
                return builder.build(ProtobufUtil.toDelete(mutation, cellScanner));
            case INCREMENT:
                return builder.build(ProtobufUtil.toIncrement(mutation, cellScanner));
            case APPEND:
                return builder.build(ProtobufUtil.toAppend(mutation, cellScanner));
            default:
                throw new DoNotRetryIOException("Unsupported mutate type: " + type.name());
        }
    } catch (IllegalArgumentException e) {
        throw new DoNotRetryIOException(e.getMessage());
    }
}
Also used : TimeRange(org.apache.hadoop.hbase.io.TimeRange) MutationType(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType) Filter(org.apache.hadoop.hbase.filter.Filter) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) CheckAndMutate(org.apache.hadoop.hbase.client.CheckAndMutate)

Example 8 with CheckAndMutate

use of org.apache.hadoop.hbase.client.CheckAndMutate in project hbase by apache.

the class TestHRegion method testCheckAndIncrementAndAppend.

@Test
public void testCheckAndIncrementAndAppend() throws Throwable {
    // Setting up region
    this.region = initHRegion(tableName, method, CONF, fam1);
    // CheckAndMutate with Increment and Append
    CheckAndMutate checkAndMutate = CheckAndMutate.newBuilder(row).ifNotExists(fam1, qual).build(new RowMutations(row).add((Mutation) new Increment(row).addColumn(fam1, qual1, 1L)).add((Mutation) new Append(row).addColumn(fam1, qual2, Bytes.toBytes("a"))));
    CheckAndMutateResult result = region.checkAndMutate(checkAndMutate);
    assertTrue(result.isSuccess());
    assertEquals(1L, Bytes.toLong(result.getResult().getValue(fam1, qual1)));
    assertEquals("a", Bytes.toString(result.getResult().getValue(fam1, qual2)));
    Result r = region.get(new Get(row));
    assertEquals(1L, Bytes.toLong(r.getValue(fam1, qual1)));
    assertEquals("a", Bytes.toString(r.getValue(fam1, qual2)));
    // Set return results to false
    checkAndMutate = CheckAndMutate.newBuilder(row).ifNotExists(fam1, qual).build(new RowMutations(row).add((Mutation) new Increment(row).addColumn(fam1, qual1, 1L).setReturnResults(false)).add((Mutation) new Append(row).addColumn(fam1, qual2, Bytes.toBytes("a")).setReturnResults(false)));
    result = region.checkAndMutate(checkAndMutate);
    assertTrue(result.isSuccess());
    assertNull(result.getResult().getValue(fam1, qual1));
    assertNull(result.getResult().getValue(fam1, qual2));
    r = region.get(new Get(row));
    assertEquals(2L, Bytes.toLong(r.getValue(fam1, qual1)));
    assertEquals("aa", Bytes.toString(r.getValue(fam1, qual2)));
    checkAndMutate = CheckAndMutate.newBuilder(row).ifNotExists(fam1, qual).build(new RowMutations(row).add((Mutation) new Increment(row).addColumn(fam1, qual1, 1L)).add((Mutation) new Append(row).addColumn(fam1, qual2, Bytes.toBytes("a")).setReturnResults(false)));
    result = region.checkAndMutate(checkAndMutate);
    assertTrue(result.isSuccess());
    assertEquals(3L, Bytes.toLong(result.getResult().getValue(fam1, qual1)));
    assertNull(result.getResult().getValue(fam1, qual2));
    r = region.get(new Get(row));
    assertEquals(3L, Bytes.toLong(r.getValue(fam1, qual1)));
    assertEquals("aaa", Bytes.toString(r.getValue(fam1, qual2)));
}
Also used : Append(org.apache.hadoop.hbase.client.Append) CheckAndMutateResult(org.apache.hadoop.hbase.client.CheckAndMutateResult) Increment(org.apache.hadoop.hbase.client.Increment) Get(org.apache.hadoop.hbase.client.Get) CheckAndMutate(org.apache.hadoop.hbase.client.CheckAndMutate) Mutation(org.apache.hadoop.hbase.client.Mutation) RowMutations(org.apache.hadoop.hbase.client.RowMutations) CheckAndMutateResult(org.apache.hadoop.hbase.client.CheckAndMutateResult) Result(org.apache.hadoop.hbase.client.Result) Test(org.junit.Test)

Aggregations

CheckAndMutate (org.apache.hadoop.hbase.client.CheckAndMutate)8 Append (org.apache.hadoop.hbase.client.Append)5 Increment (org.apache.hadoop.hbase.client.Increment)5 RowMutations (org.apache.hadoop.hbase.client.RowMutations)5 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)4 CheckAndMutateResult (org.apache.hadoop.hbase.client.CheckAndMutateResult)4 Delete (org.apache.hadoop.hbase.client.Delete)4 Put (org.apache.hadoop.hbase.client.Put)4 Get (org.apache.hadoop.hbase.client.Get)3 Mutation (org.apache.hadoop.hbase.client.Mutation)3 MutationType (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType)3 ArrayList (java.util.ArrayList)2 Result (org.apache.hadoop.hbase.client.Result)2 Filter (org.apache.hadoop.hbase.filter.Filter)2 TimeRange (org.apache.hadoop.hbase.io.TimeRange)2 ByteString (org.apache.hbase.thirdparty.com.google.protobuf.ByteString)2 Test (org.junit.Test)2 HashSet (java.util.HashSet)1 Action (org.apache.hadoop.hbase.client.Action)1 RegionCoprocessorServiceExec (org.apache.hadoop.hbase.client.RegionCoprocessorServiceExec)1