Search in sources :

Example 1 with MutateRowsResponse

use of org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse in project hbase by apache.

the class RegionStateStore method multiMutate.

/**
 * Performs an atomic multi-mutate operation against the given table. Used by the likes of merge
 * and split as these want to make atomic mutations across multiple rows.
 */
private void multiMutate(RegionInfo ri, List<Mutation> mutations) throws IOException {
    debugLogMutations(mutations);
    byte[] row = Bytes.toBytes(RegionReplicaUtil.getRegionInfoForDefaultReplica(ri).getRegionNameAsString() + HConstants.DELIMITER);
    MutateRowsRequest.Builder builder = MutateRowsRequest.newBuilder();
    for (Mutation mutation : mutations) {
        if (mutation instanceof Put) {
            builder.addMutationRequest(ProtobufUtil.toMutation(ClientProtos.MutationProto.MutationType.PUT, mutation));
        } else if (mutation instanceof Delete) {
            builder.addMutationRequest(ProtobufUtil.toMutation(ClientProtos.MutationProto.MutationType.DELETE, mutation));
        } else {
            throw new DoNotRetryIOException("multi in MetaEditor doesn't support " + mutation.getClass().getName());
        }
    }
    MutateRowsRequest request = builder.build();
    AsyncTable<?> table = master.getConnection().toAsyncConnection().getTable(TableName.META_TABLE_NAME);
    CompletableFuture<MutateRowsResponse> future = table.<MultiRowMutationService, MutateRowsResponse>coprocessorService(MultiRowMutationService::newStub, (stub, controller, done) -> stub.mutateRows(controller, request, done), row);
    FutureUtils.get(future);
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) MutateRowsResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse) MutateRowsRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) MultiRowMutationService(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService) Mutation(org.apache.hadoop.hbase.client.Mutation) Put(org.apache.hadoop.hbase.client.Put)

Example 2 with MutateRowsResponse

use of org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse in project hbase by apache.

the class TestConnection method testClosedConnection.

@Test(expected = DoNotRetryIOException.class)
public void testClosedConnection() throws ServiceException, Throwable {
    byte[] family = Bytes.toBytes("cf");
    TableName tableName = TableName.valueOf(name.getMethodName());
    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName).setCoprocessor(MultiRowMutationEndpoint.class.getName()).setColumnFamily(ColumnFamilyDescriptorBuilder.of(family));
    TEST_UTIL.getAdmin().createTable(builder.build());
    Connection conn = ConnectionFactory.createConnection(TEST_UTIL.getConfiguration());
    // cache the location
    try (Table table = conn.getTable(tableName)) {
        table.get(new Get(Bytes.toBytes(0)));
    } finally {
        conn.close();
    }
    Batch.Call<MultiRowMutationService, MutateRowsResponse> callable = service -> {
        throw new RuntimeException("Should not arrive here");
    };
    conn.getTable(tableName).coprocessorService(MultiRowMutationService.class, HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW, callable);
}
Also used : IntStream(java.util.stream.IntStream) Waiter(org.apache.hadoop.hbase.Waiter) BeforeClass(org.junit.BeforeClass) MultiRowMutationService(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService) LoggerFactory(org.slf4j.LoggerFactory) AtomicReference(java.util.concurrent.atomic.AtomicReference) ServiceException(org.apache.hbase.thirdparty.com.google.protobuf.ServiceException) HConstants(org.apache.hadoop.hbase.HConstants) Batch(org.apache.hadoop.hbase.client.coprocessor.Batch) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) MutateRowsResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse) TestName(org.junit.rules.TestName) ThreadLocalRandom(java.util.concurrent.ThreadLocalRandom) Configuration(org.apache.hadoop.conf.Configuration) After(org.junit.After) ClassRule(org.junit.ClassRule) ServerName(org.apache.hadoop.hbase.ServerName) Level(org.apache.hbase.thirdparty.io.netty.util.ResourceLeakDetector.Level) Bytes(org.apache.hadoop.hbase.util.Bytes) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) TableName(org.apache.hadoop.hbase.TableName) ManualEnvironmentEdge(org.apache.hadoop.hbase.util.ManualEnvironmentEdge) AfterClass(org.junit.AfterClass) Logger(org.slf4j.Logger) HBaseTestingUtil(org.apache.hadoop.hbase.HBaseTestingUtil) Assert.assertTrue(org.junit.Assert.assertTrue) Set(java.util.Set) HBaseClassTestRule(org.apache.hadoop.hbase.HBaseClassTestRule) LargeTests(org.apache.hadoop.hbase.testclassification.LargeTests) IOException(java.io.IOException) Test(org.junit.Test) Category(org.junit.experimental.categories.Category) Collectors(java.util.stream.Collectors) MultiRowMutationEndpoint(org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint) ResourceLeakDetector(org.apache.hbase.thirdparty.io.netty.util.ResourceLeakDetector) List(java.util.List) Rule(org.junit.Rule) HRegionLocation(org.apache.hadoop.hbase.HRegionLocation) Assert.assertFalse(org.junit.Assert.assertFalse) RpcClient(org.apache.hadoop.hbase.ipc.RpcClient) EnvironmentEdgeManager(org.apache.hadoop.hbase.util.EnvironmentEdgeManager) Assert(org.junit.Assert) Assert.assertEquals(org.junit.Assert.assertEquals) TableName(org.apache.hadoop.hbase.TableName) MutateRowsResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse) Batch(org.apache.hadoop.hbase.client.coprocessor.Batch) MultiRowMutationService(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService) Test(org.junit.Test)

Example 3 with MutateRowsResponse

use of org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse in project hbase by apache.

the class TestFromClientSide5 method testMultiRowMutationWithSingleConditionWhenConditionMatches.

@Test
public void testMultiRowMutationWithSingleConditionWhenConditionMatches() throws Exception {
    final TableName tableName = name.getTableName();
    final byte[] ROW1 = Bytes.toBytes("testRow1");
    final byte[] ROW2 = Bytes.toBytes("testRow2");
    final byte[] VALUE1 = Bytes.toBytes("testValue1");
    final byte[] VALUE2 = Bytes.toBytes("testValue2");
    try (Table t = TEST_UTIL.createTable(tableName, FAMILY)) {
        // Add initial data
        t.put(new Put(ROW2).addColumn(FAMILY, QUALIFIER, VALUE2));
        // Execute MultiRowMutation with conditions
        Put put1 = new Put(ROW).addColumn(FAMILY, QUALIFIER, VALUE);
        MutationProto m1 = ProtobufUtil.toMutation(MutationType.PUT, put1);
        Put put2 = new Put(ROW1).addColumn(FAMILY, QUALIFIER, VALUE1);
        MutationProto m2 = ProtobufUtil.toMutation(MutationType.PUT, put2);
        Delete delete = new Delete(ROW2);
        MutationProto m3 = ProtobufUtil.toMutation(MutationType.DELETE, delete);
        MutateRowsRequest.Builder mrmBuilder = MutateRowsRequest.newBuilder();
        mrmBuilder.addMutationRequest(m1);
        mrmBuilder.addMutationRequest(m2);
        mrmBuilder.addMutationRequest(m3);
        mrmBuilder.addCondition(ProtobufUtil.toCondition(ROW2, FAMILY, QUALIFIER, CompareOperator.EQUAL, VALUE2, null));
        CoprocessorRpcChannel channel = t.coprocessorService(ROW);
        MultiRowMutationService.BlockingInterface service = MultiRowMutationService.newBlockingStub(channel);
        MutateRowsResponse response = service.mutateRows(null, mrmBuilder.build());
        // Assert
        assertTrue(response.getProcessed());
        Result r = t.get(new Get(ROW));
        assertEquals(Bytes.toString(VALUE), Bytes.toString(r.getValue(FAMILY, QUALIFIER)));
        r = t.get(new Get(ROW1));
        assertEquals(Bytes.toString(VALUE1), Bytes.toString(r.getValue(FAMILY, QUALIFIER)));
        r = t.get(new Get(ROW2));
        assertTrue(r.isEmpty());
    }
}
Also used : CoprocessorRpcChannel(org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel) MultiRowMutationService(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService) MutationProto(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto) TableName(org.apache.hadoop.hbase.TableName) MutateRowsResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse) MutateRowsRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest) Test(org.junit.Test)

Example 4 with MutateRowsResponse

use of org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse in project hbase by apache.

the class RSGroupInfoManagerImpl method multiMutate.

private void multiMutate(List<Mutation> mutations) throws IOException {
    MutateRowsRequest.Builder builder = MutateRowsRequest.newBuilder();
    for (Mutation mutation : mutations) {
        if (mutation instanceof Put) {
            builder.addMutationRequest(ProtobufUtil.toMutation(MutationProto.MutationType.PUT, mutation));
        } else if (mutation instanceof Delete) {
            builder.addMutationRequest(ProtobufUtil.toMutation(MutationProto.MutationType.DELETE, mutation));
        } else {
            throw new DoNotRetryIOException("multiMutate doesn't support " + mutation.getClass().getName());
        }
    }
    MutateRowsRequest request = builder.build();
    AsyncTable<?> table = conn.getTable(RSGROUP_TABLE_NAME);
    LOG.debug("Multimutating {} with {} mutations", RSGROUP_TABLE_NAME, mutations.size());
    FutureUtils.get(table.<MultiRowMutationService, MutateRowsResponse>coprocessorService(MultiRowMutationService::newStub, (stub, controller, done) -> stub.mutateRows(controller, request, done), ROW_KEY));
    LOG.info("Multimutating {} with {} mutations done", RSGROUP_TABLE_NAME, mutations.size());
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Mutation(org.apache.hadoop.hbase.client.Mutation) SortedSet(java.util.SortedSet) Result(org.apache.hadoop.hbase.client.Result) MultiRowMutationService(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService) LoggerFactory(org.slf4j.LoggerFactory) Shell(org.apache.hadoop.util.Shell) StringUtils(org.apache.commons.lang3.StringUtils) ServerManager(org.apache.hadoop.hbase.master.ServerManager) RegionState(org.apache.hadoop.hbase.master.RegionState) Future(java.util.concurrent.Future) CoprocessorDescriptorBuilder(org.apache.hadoop.hbase.client.CoprocessorDescriptorBuilder) Delete(org.apache.hadoop.hbase.client.Delete) ByteArrayInputStream(java.io.ByteArrayInputStream) ProcedureExecutor(org.apache.hadoop.hbase.procedure2.ProcedureExecutor) MutateRowsResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse) Map(java.util.Map) Configuration(org.apache.hadoop.conf.Configuration) MasterServices(org.apache.hadoop.hbase.master.MasterServices) ZNodePaths(org.apache.hadoop.hbase.zookeeper.ZNodePaths) DeserializationException(org.apache.hadoop.hbase.exceptions.DeserializationException) Pair(org.apache.hadoop.hbase.util.Pair) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) Maps(org.apache.hbase.thirdparty.com.google.common.collect.Maps) TableStateManager(org.apache.hadoop.hbase.master.TableStateManager) Address(org.apache.hadoop.hbase.net.Address) ProtobufUtil(org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil) ZKUtil(org.apache.hadoop.hbase.zookeeper.ZKUtil) RSGroupProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.RSGroupProtos) Collection(java.util.Collection) Get(org.apache.hadoop.hbase.client.Get) Set(java.util.Set) ZKWatcher(org.apache.hadoop.hbase.zookeeper.ZKWatcher) TableDescriptors(org.apache.hadoop.hbase.TableDescriptors) MasterProcedureEnv(org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv) Collectors(java.util.stream.Collectors) List(java.util.List) FutureUtils(org.apache.hadoop.hbase.util.FutureUtils) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Optional(java.util.Optional) AsyncTable(org.apache.hadoop.hbase.client.AsyncTable) ProtobufMagic(org.apache.hadoop.hbase.protobuf.ProtobufMagic) Coprocessor(org.apache.hadoop.hbase.Coprocessor) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) ServerListener(org.apache.hadoop.hbase.master.ServerListener) ImmutableMap(org.apache.hbase.thirdparty.com.google.common.collect.ImmutableMap) LoadBalancer(org.apache.hadoop.hbase.master.LoadBalancer) NamespaceDescriptor(org.apache.hadoop.hbase.NamespaceDescriptor) HashMap(java.util.HashMap) TableState(org.apache.hadoop.hbase.client.TableState) Function(java.util.function.Function) TreeSet(java.util.TreeSet) RegionStateNode(org.apache.hadoop.hbase.master.assignment.RegionStateNode) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) OptionalLong(java.util.OptionalLong) HConstants(org.apache.hadoop.hbase.HConstants) MasterProcedureUtil(org.apache.hadoop.hbase.master.procedure.MasterProcedureUtil) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) ColumnFamilyDescriptorBuilder(org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder) AsyncClusterConnection(org.apache.hadoop.hbase.client.AsyncClusterConnection) LinkedList(java.util.LinkedList) ServerName(org.apache.hadoop.hbase.ServerName) Threads(org.apache.hadoop.hbase.util.Threads) Bytes(org.apache.hadoop.hbase.util.Bytes) TableName(org.apache.hadoop.hbase.TableName) Logger(org.slf4j.Logger) KeeperException(org.apache.zookeeper.KeeperException) Put(org.apache.hadoop.hbase.client.Put) Sets(org.apache.hbase.thirdparty.com.google.common.collect.Sets) IOException(java.io.IOException) BalanceRequest(org.apache.hadoop.hbase.client.BalanceRequest) CreateTableProcedure(org.apache.hadoop.hbase.master.procedure.CreateTableProcedure) ProcedureSyncWait(org.apache.hadoop.hbase.master.procedure.ProcedureSyncWait) Procedure(org.apache.hadoop.hbase.procedure2.Procedure) Lists(org.apache.hbase.thirdparty.com.google.common.collect.Lists) MultiRowMutationEndpoint(org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint) RegionPlan(org.apache.hadoop.hbase.master.RegionPlan) DisabledRegionSplitPolicy(org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy) InterfaceAudience(org.apache.yetus.audience.InterfaceAudience) ConstraintException(org.apache.hadoop.hbase.constraint.ConstraintException) MutateRowsRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest) MutationProto(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto) BalanceResponse(org.apache.hadoop.hbase.client.BalanceResponse) Collections(java.util.Collections) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) MutateRowsRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) Mutation(org.apache.hadoop.hbase.client.Mutation) Put(org.apache.hadoop.hbase.client.Put)

Example 5 with MutateRowsResponse

use of org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse in project hbase by apache.

the class TestMetaUpdatesGoToPriorityQueue method multiMutate.

private void multiMutate(byte[] row, List<Mutation> mutations) throws IOException {
    MutateRowsRequest.Builder builder = MutateRowsRequest.newBuilder();
    for (Mutation mutation : mutations) {
        if (mutation instanceof Put) {
            builder.addMutationRequest(ProtobufUtil.toMutation(ClientProtos.MutationProto.MutationType.PUT, mutation));
        } else if (mutation instanceof Delete) {
            builder.addMutationRequest(ProtobufUtil.toMutation(ClientProtos.MutationProto.MutationType.DELETE, mutation));
        } else {
            throw new DoNotRetryIOException("multi in MetaEditor doesn't support " + mutation.getClass().getName());
        }
    }
    MutateRowsRequest request = builder.build();
    AsyncTable<?> table = UTIL.getAsyncConnection().getTable(TableName.META_TABLE_NAME);
    CompletableFuture<MutateRowsResponse> future = table.<MultiRowMutationService, MutateRowsResponse>coprocessorService(MultiRowMutationService::newStub, (stub, controller, done) -> stub.mutateRows(controller, request, done), row);
    FutureUtils.get(future);
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) MutateRowsResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse) MutateRowsRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest) MultiRowMutationService(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService) Mutation(org.apache.hadoop.hbase.client.Mutation) Put(org.apache.hadoop.hbase.client.Put)

Aggregations

MultiRowMutationService (org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService)11 MutateRowsResponse (org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse)11 MutateRowsRequest (org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest)10 TableName (org.apache.hadoop.hbase.TableName)9 MutationProto (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto)8 Test (org.junit.Test)8 CoprocessorRpcChannel (org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel)6 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)3 Delete (org.apache.hadoop.hbase.client.Delete)3 Mutation (org.apache.hadoop.hbase.client.Mutation)3 Put (org.apache.hadoop.hbase.client.Put)3 IOException (java.io.IOException)2 List (java.util.List)2 Set (java.util.Set)2 Collectors (java.util.stream.Collectors)2 Configuration (org.apache.hadoop.conf.Configuration)2 HConstants (org.apache.hadoop.hbase.HConstants)2 ServerName (org.apache.hadoop.hbase.ServerName)2 MultiRowMutationEndpoint (org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint)2 FilterList (org.apache.hadoop.hbase.filter.FilterList)2