Search in sources :

Example 1 with MultiRowMutationService

use of org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService in project hbase by apache.

the class RegionStateStore method multiMutate.

/**
 * Performs an atomic multi-mutate operation against the given table. Used by the likes of merge
 * and split as these want to make atomic mutations across multiple rows.
 */
private void multiMutate(RegionInfo ri, List<Mutation> mutations) throws IOException {
    debugLogMutations(mutations);
    byte[] row = Bytes.toBytes(RegionReplicaUtil.getRegionInfoForDefaultReplica(ri).getRegionNameAsString() + HConstants.DELIMITER);
    MutateRowsRequest.Builder builder = MutateRowsRequest.newBuilder();
    for (Mutation mutation : mutations) {
        if (mutation instanceof Put) {
            builder.addMutationRequest(ProtobufUtil.toMutation(ClientProtos.MutationProto.MutationType.PUT, mutation));
        } else if (mutation instanceof Delete) {
            builder.addMutationRequest(ProtobufUtil.toMutation(ClientProtos.MutationProto.MutationType.DELETE, mutation));
        } else {
            throw new DoNotRetryIOException("multi in MetaEditor doesn't support " + mutation.getClass().getName());
        }
    }
    MutateRowsRequest request = builder.build();
    AsyncTable<?> table = master.getConnection().toAsyncConnection().getTable(TableName.META_TABLE_NAME);
    CompletableFuture<MutateRowsResponse> future = table.<MultiRowMutationService, MutateRowsResponse>coprocessorService(MultiRowMutationService::newStub, (stub, controller, done) -> stub.mutateRows(controller, request, done), row);
    FutureUtils.get(future);
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) MutateRowsResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse) MutateRowsRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) MultiRowMutationService(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService) Mutation(org.apache.hadoop.hbase.client.Mutation) Put(org.apache.hadoop.hbase.client.Put)

Example 2 with MultiRowMutationService

use of org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService in project hbase by apache.

the class TestConnection method testClosedConnection.

@Test(expected = DoNotRetryIOException.class)
public void testClosedConnection() throws ServiceException, Throwable {
    byte[] family = Bytes.toBytes("cf");
    TableName tableName = TableName.valueOf(name.getMethodName());
    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName).setCoprocessor(MultiRowMutationEndpoint.class.getName()).setColumnFamily(ColumnFamilyDescriptorBuilder.of(family));
    TEST_UTIL.getAdmin().createTable(builder.build());
    Connection conn = ConnectionFactory.createConnection(TEST_UTIL.getConfiguration());
    // cache the location
    try (Table table = conn.getTable(tableName)) {
        table.get(new Get(Bytes.toBytes(0)));
    } finally {
        conn.close();
    }
    Batch.Call<MultiRowMutationService, MutateRowsResponse> callable = service -> {
        throw new RuntimeException("Should not arrive here");
    };
    conn.getTable(tableName).coprocessorService(MultiRowMutationService.class, HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW, callable);
}
Also used : IntStream(java.util.stream.IntStream) Waiter(org.apache.hadoop.hbase.Waiter) BeforeClass(org.junit.BeforeClass) MultiRowMutationService(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService) LoggerFactory(org.slf4j.LoggerFactory) AtomicReference(java.util.concurrent.atomic.AtomicReference) ServiceException(org.apache.hbase.thirdparty.com.google.protobuf.ServiceException) HConstants(org.apache.hadoop.hbase.HConstants) Batch(org.apache.hadoop.hbase.client.coprocessor.Batch) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) MutateRowsResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse) TestName(org.junit.rules.TestName) ThreadLocalRandom(java.util.concurrent.ThreadLocalRandom) Configuration(org.apache.hadoop.conf.Configuration) After(org.junit.After) ClassRule(org.junit.ClassRule) ServerName(org.apache.hadoop.hbase.ServerName) Level(org.apache.hbase.thirdparty.io.netty.util.ResourceLeakDetector.Level) Bytes(org.apache.hadoop.hbase.util.Bytes) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) TableName(org.apache.hadoop.hbase.TableName) ManualEnvironmentEdge(org.apache.hadoop.hbase.util.ManualEnvironmentEdge) AfterClass(org.junit.AfterClass) Logger(org.slf4j.Logger) HBaseTestingUtil(org.apache.hadoop.hbase.HBaseTestingUtil) Assert.assertTrue(org.junit.Assert.assertTrue) Set(java.util.Set) HBaseClassTestRule(org.apache.hadoop.hbase.HBaseClassTestRule) LargeTests(org.apache.hadoop.hbase.testclassification.LargeTests) IOException(java.io.IOException) Test(org.junit.Test) Category(org.junit.experimental.categories.Category) Collectors(java.util.stream.Collectors) MultiRowMutationEndpoint(org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint) ResourceLeakDetector(org.apache.hbase.thirdparty.io.netty.util.ResourceLeakDetector) List(java.util.List) Rule(org.junit.Rule) HRegionLocation(org.apache.hadoop.hbase.HRegionLocation) Assert.assertFalse(org.junit.Assert.assertFalse) RpcClient(org.apache.hadoop.hbase.ipc.RpcClient) EnvironmentEdgeManager(org.apache.hadoop.hbase.util.EnvironmentEdgeManager) Assert(org.junit.Assert) Assert.assertEquals(org.junit.Assert.assertEquals) TableName(org.apache.hadoop.hbase.TableName) MutateRowsResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse) Batch(org.apache.hadoop.hbase.client.coprocessor.Batch) MultiRowMutationService(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService) Test(org.junit.Test)

Example 3 with MultiRowMutationService

use of org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService in project hbase by apache.

the class RSGroupInfoManagerImpl method multiMutate.

private void multiMutate(List<Mutation> mutations) throws IOException {
    MutateRowsRequest.Builder builder = MutateRowsRequest.newBuilder();
    for (Mutation mutation : mutations) {
        if (mutation instanceof Put) {
            builder.addMutationRequest(ProtobufUtil.toMutation(MutationProto.MutationType.PUT, mutation));
        } else if (mutation instanceof Delete) {
            builder.addMutationRequest(ProtobufUtil.toMutation(MutationProto.MutationType.DELETE, mutation));
        } else {
            throw new DoNotRetryIOException("multiMutate doesn't support " + mutation.getClass().getName());
        }
    }
    MutateRowsRequest request = builder.build();
    AsyncTable<?> table = conn.getTable(RSGROUP_TABLE_NAME);
    LOG.debug("Multimutating {} with {} mutations", RSGROUP_TABLE_NAME, mutations.size());
    FutureUtils.get(table.<MultiRowMutationService, MutateRowsResponse>coprocessorService(MultiRowMutationService::newStub, (stub, controller, done) -> stub.mutateRows(controller, request, done), ROW_KEY));
    LOG.info("Multimutating {} with {} mutations done", RSGROUP_TABLE_NAME, mutations.size());
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Mutation(org.apache.hadoop.hbase.client.Mutation) SortedSet(java.util.SortedSet) Result(org.apache.hadoop.hbase.client.Result) MultiRowMutationService(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService) LoggerFactory(org.slf4j.LoggerFactory) Shell(org.apache.hadoop.util.Shell) StringUtils(org.apache.commons.lang3.StringUtils) ServerManager(org.apache.hadoop.hbase.master.ServerManager) RegionState(org.apache.hadoop.hbase.master.RegionState) Future(java.util.concurrent.Future) CoprocessorDescriptorBuilder(org.apache.hadoop.hbase.client.CoprocessorDescriptorBuilder) Delete(org.apache.hadoop.hbase.client.Delete) ByteArrayInputStream(java.io.ByteArrayInputStream) ProcedureExecutor(org.apache.hadoop.hbase.procedure2.ProcedureExecutor) MutateRowsResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse) Map(java.util.Map) Configuration(org.apache.hadoop.conf.Configuration) MasterServices(org.apache.hadoop.hbase.master.MasterServices) ZNodePaths(org.apache.hadoop.hbase.zookeeper.ZNodePaths) DeserializationException(org.apache.hadoop.hbase.exceptions.DeserializationException) Pair(org.apache.hadoop.hbase.util.Pair) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) Maps(org.apache.hbase.thirdparty.com.google.common.collect.Maps) TableStateManager(org.apache.hadoop.hbase.master.TableStateManager) Address(org.apache.hadoop.hbase.net.Address) ProtobufUtil(org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil) ZKUtil(org.apache.hadoop.hbase.zookeeper.ZKUtil) RSGroupProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.RSGroupProtos) Collection(java.util.Collection) Get(org.apache.hadoop.hbase.client.Get) Set(java.util.Set) ZKWatcher(org.apache.hadoop.hbase.zookeeper.ZKWatcher) TableDescriptors(org.apache.hadoop.hbase.TableDescriptors) MasterProcedureEnv(org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv) Collectors(java.util.stream.Collectors) List(java.util.List) FutureUtils(org.apache.hadoop.hbase.util.FutureUtils) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Optional(java.util.Optional) AsyncTable(org.apache.hadoop.hbase.client.AsyncTable) ProtobufMagic(org.apache.hadoop.hbase.protobuf.ProtobufMagic) Coprocessor(org.apache.hadoop.hbase.Coprocessor) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) ServerListener(org.apache.hadoop.hbase.master.ServerListener) ImmutableMap(org.apache.hbase.thirdparty.com.google.common.collect.ImmutableMap) LoadBalancer(org.apache.hadoop.hbase.master.LoadBalancer) NamespaceDescriptor(org.apache.hadoop.hbase.NamespaceDescriptor) HashMap(java.util.HashMap) TableState(org.apache.hadoop.hbase.client.TableState) Function(java.util.function.Function) TreeSet(java.util.TreeSet) RegionStateNode(org.apache.hadoop.hbase.master.assignment.RegionStateNode) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) OptionalLong(java.util.OptionalLong) HConstants(org.apache.hadoop.hbase.HConstants) MasterProcedureUtil(org.apache.hadoop.hbase.master.procedure.MasterProcedureUtil) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) ColumnFamilyDescriptorBuilder(org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder) AsyncClusterConnection(org.apache.hadoop.hbase.client.AsyncClusterConnection) LinkedList(java.util.LinkedList) ServerName(org.apache.hadoop.hbase.ServerName) Threads(org.apache.hadoop.hbase.util.Threads) Bytes(org.apache.hadoop.hbase.util.Bytes) TableName(org.apache.hadoop.hbase.TableName) Logger(org.slf4j.Logger) KeeperException(org.apache.zookeeper.KeeperException) Put(org.apache.hadoop.hbase.client.Put) Sets(org.apache.hbase.thirdparty.com.google.common.collect.Sets) IOException(java.io.IOException) BalanceRequest(org.apache.hadoop.hbase.client.BalanceRequest) CreateTableProcedure(org.apache.hadoop.hbase.master.procedure.CreateTableProcedure) ProcedureSyncWait(org.apache.hadoop.hbase.master.procedure.ProcedureSyncWait) Procedure(org.apache.hadoop.hbase.procedure2.Procedure) Lists(org.apache.hbase.thirdparty.com.google.common.collect.Lists) MultiRowMutationEndpoint(org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint) RegionPlan(org.apache.hadoop.hbase.master.RegionPlan) DisabledRegionSplitPolicy(org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy) InterfaceAudience(org.apache.yetus.audience.InterfaceAudience) ConstraintException(org.apache.hadoop.hbase.constraint.ConstraintException) MutateRowsRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest) MutationProto(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto) BalanceResponse(org.apache.hadoop.hbase.client.BalanceResponse) Collections(java.util.Collections) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) MutateRowsRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) Mutation(org.apache.hadoop.hbase.client.Mutation) Put(org.apache.hadoop.hbase.client.Put)

Example 4 with MultiRowMutationService

use of org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService in project hbase by apache.

the class TestMetaUpdatesGoToPriorityQueue method multiMutate.

private void multiMutate(byte[] row, List<Mutation> mutations) throws IOException {
    MutateRowsRequest.Builder builder = MutateRowsRequest.newBuilder();
    for (Mutation mutation : mutations) {
        if (mutation instanceof Put) {
            builder.addMutationRequest(ProtobufUtil.toMutation(ClientProtos.MutationProto.MutationType.PUT, mutation));
        } else if (mutation instanceof Delete) {
            builder.addMutationRequest(ProtobufUtil.toMutation(ClientProtos.MutationProto.MutationType.DELETE, mutation));
        } else {
            throw new DoNotRetryIOException("multi in MetaEditor doesn't support " + mutation.getClass().getName());
        }
    }
    MutateRowsRequest request = builder.build();
    AsyncTable<?> table = UTIL.getAsyncConnection().getTable(TableName.META_TABLE_NAME);
    CompletableFuture<MutateRowsResponse> future = table.<MultiRowMutationService, MutateRowsResponse>coprocessorService(MultiRowMutationService::newStub, (stub, controller, done) -> stub.mutateRows(controller, request, done), row);
    FutureUtils.get(future);
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) MutateRowsResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse) MutateRowsRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest) MultiRowMutationService(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService) Mutation(org.apache.hadoop.hbase.client.Mutation) Put(org.apache.hadoop.hbase.client.Put)

Aggregations

MultiRowMutationService (org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService)4 MutateRowsResponse (org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse)4 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)3 Delete (org.apache.hadoop.hbase.client.Delete)3 Mutation (org.apache.hadoop.hbase.client.Mutation)3 Put (org.apache.hadoop.hbase.client.Put)3 MutateRowsRequest (org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest)3 IOException (java.io.IOException)2 List (java.util.List)2 Set (java.util.Set)2 Collectors (java.util.stream.Collectors)2 Configuration (org.apache.hadoop.conf.Configuration)2 HConstants (org.apache.hadoop.hbase.HConstants)2 ServerName (org.apache.hadoop.hbase.ServerName)2 TableName (org.apache.hadoop.hbase.TableName)2 MultiRowMutationEndpoint (org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint)2 Bytes (org.apache.hadoop.hbase.util.Bytes)2 Logger (org.slf4j.Logger)2 LoggerFactory (org.slf4j.LoggerFactory)2 ByteArrayInputStream (java.io.ByteArrayInputStream)1