Search in sources :

Example 1 with ServerRpcController

use of org.apache.hadoop.hbase.ipc.ServerRpcController in project hbase by apache.

the class TestCoprocessorEndpoint method testCoprocessorServiceNullResponse.

@Test
public void testCoprocessorServiceNullResponse() throws Throwable {
    Table table = util.getConnection().getTable(TEST_TABLE);
    List<HRegionLocation> regions;
    try (RegionLocator rl = util.getConnection().getRegionLocator(TEST_TABLE)) {
        regions = rl.getAllRegionLocations();
    }
    final TestProtos.EchoRequestProto request = TestProtos.EchoRequestProto.newBuilder().setMessage("hello").build();
    try {
        // scan: for all regions
        final RpcController controller = new ServerRpcController();
        // test that null results are supported
        Map<byte[], String> results = table.coprocessorService(TestRpcServiceProtos.TestProtobufRpcProto.class, ROWS[0], ROWS[ROWS.length - 1], new Batch.Call<TestRpcServiceProtos.TestProtobufRpcProto, String>() {

            public String call(TestRpcServiceProtos.TestProtobufRpcProto instance) throws IOException {
                CoprocessorRpcUtils.BlockingRpcCallback<TestProtos.EchoResponseProto> callback = new CoprocessorRpcUtils.BlockingRpcCallback<>();
                instance.echo(controller, request, callback);
                TestProtos.EchoResponseProto response = callback.get();
                LOG.debug("Batch.Call got result " + response);
                return null;
            }
        });
        for (Map.Entry<byte[], String> e : results.entrySet()) {
            LOG.info("Got value " + e.getValue() + " for region " + Bytes.toStringBinary(e.getKey()));
        }
        assertEquals(3, results.size());
        for (HRegionLocation region : regions) {
            HRegionInfo info = region.getRegionInfo();
            LOG.info("Region info is " + info.getRegionNameAsString());
            assertTrue(results.containsKey(info.getRegionName()));
            assertNull(results.get(info.getRegionName()));
        }
    } finally {
        table.close();
    }
}
Also used : TestRpcServiceProtos(org.apache.hadoop.hbase.ipc.protobuf.generated.TestRpcServiceProtos) RegionLocator(org.apache.hadoop.hbase.client.RegionLocator) Table(org.apache.hadoop.hbase.client.Table) IOException(java.io.IOException) TestProtos(org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos) ServerRpcController(org.apache.hadoop.hbase.ipc.ServerRpcController) ServerRpcController(org.apache.hadoop.hbase.ipc.ServerRpcController) RpcController(com.google.protobuf.RpcController) HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) HRegionLocation(org.apache.hadoop.hbase.HRegionLocation) CoprocessorRpcUtils(org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils) Batch(org.apache.hadoop.hbase.client.coprocessor.Batch) Map(java.util.Map) TreeMap(java.util.TreeMap) Test(org.junit.Test)

Example 2 with ServerRpcController

use of org.apache.hadoop.hbase.ipc.ServerRpcController in project hbase by apache.

the class SecureBulkLoadEndpointClient method bulkLoadHFiles.

public boolean bulkLoadHFiles(final List<Pair<byte[], String>> familyPaths, final Token<?> userToken, final String bulkToken, final byte[] startRow) throws IOException {
    // HTable#coprocessorService methods that take start and end rowkeys; see HBASE-9639
    try {
        CoprocessorRpcChannel channel = table.coprocessorService(startRow);
        SecureBulkLoadProtos.SecureBulkLoadService instance = ProtobufUtil.newServiceStub(SecureBulkLoadProtos.SecureBulkLoadService.class, channel);
        DelegationToken protoDT = DelegationToken.newBuilder().build();
        if (userToken != null) {
            protoDT = DelegationToken.newBuilder().setIdentifier(ByteStringer.wrap(userToken.getIdentifier())).setPassword(ByteStringer.wrap(userToken.getPassword())).setKind(userToken.getKind().toString()).setService(userToken.getService().toString()).build();
        }
        List<ClientProtos.BulkLoadHFileRequest.FamilyPath> protoFamilyPaths = new ArrayList<>(familyPaths.size());
        for (Pair<byte[], String> el : familyPaths) {
            protoFamilyPaths.add(ClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder().setFamily(ByteStringer.wrap(el.getFirst())).setPath(el.getSecond()).build());
        }
        SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request = SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.newBuilder().setFsToken(protoDT).addAllFamilyPath(protoFamilyPaths).setBulkToken(bulkToken).build();
        ServerRpcController controller = new ServerRpcController();
        CoprocessorRpcUtils.BlockingRpcCallback<SecureBulkLoadProtos.SecureBulkLoadHFilesResponse> rpcCallback = new CoprocessorRpcUtils.BlockingRpcCallback<>();
        instance.secureBulkLoadHFiles(controller, request, rpcCallback);
        SecureBulkLoadProtos.SecureBulkLoadHFilesResponse response = rpcCallback.get();
        if (controller.failedOnException()) {
            throw controller.getFailedOn();
        }
        return response.getLoaded();
    } catch (Throwable throwable) {
        throw new IOException(throwable);
    }
}
Also used : CoprocessorRpcChannel(org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel) DelegationToken(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken) ArrayList(java.util.ArrayList) IOException(java.io.IOException) ServerRpcController(org.apache.hadoop.hbase.ipc.ServerRpcController) SecureBulkLoadProtos(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos) CoprocessorRpcUtils(org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils) ClientProtos(org.apache.hadoop.hbase.protobuf.generated.ClientProtos)

Example 3 with ServerRpcController

use of org.apache.hadoop.hbase.ipc.ServerRpcController in project hbase by apache.

the class TestCoprocessorEndpoint method testCoprocessorService.

@Test
public void testCoprocessorService() throws Throwable {
    Table table = util.getConnection().getTable(TEST_TABLE);
    List<HRegionLocation> regions;
    try (RegionLocator rl = util.getConnection().getRegionLocator(TEST_TABLE)) {
        regions = rl.getAllRegionLocations();
    }
    final TestProtos.EchoRequestProto request = TestProtos.EchoRequestProto.newBuilder().setMessage("hello").build();
    final Map<byte[], String> results = Collections.synchronizedMap(new TreeMap<byte[], String>(Bytes.BYTES_COMPARATOR));
    try {
        // scan: for all regions
        final RpcController controller = new ServerRpcController();
        table.coprocessorService(TestRpcServiceProtos.TestProtobufRpcProto.class, ROWS[0], ROWS[ROWS.length - 1], new Batch.Call<TestRpcServiceProtos.TestProtobufRpcProto, TestProtos.EchoResponseProto>() {

            public TestProtos.EchoResponseProto call(TestRpcServiceProtos.TestProtobufRpcProto instance) throws IOException {
                LOG.debug("Default response is " + TestProtos.EchoRequestProto.getDefaultInstance());
                CoprocessorRpcUtils.BlockingRpcCallback<TestProtos.EchoResponseProto> callback = new CoprocessorRpcUtils.BlockingRpcCallback<>();
                instance.echo(controller, request, callback);
                TestProtos.EchoResponseProto response = callback.get();
                LOG.debug("Batch.Call returning result " + response);
                return response;
            }
        }, new Batch.Callback<TestProtos.EchoResponseProto>() {

            public void update(byte[] region, byte[] row, TestProtos.EchoResponseProto result) {
                assertNotNull(result);
                assertEquals("hello", result.getMessage());
                results.put(region, result.getMessage());
            }
        });
        for (Map.Entry<byte[], String> e : results.entrySet()) {
            LOG.info("Got value " + e.getValue() + " for region " + Bytes.toStringBinary(e.getKey()));
        }
        assertEquals(3, results.size());
        for (HRegionLocation info : regions) {
            LOG.info("Region info is " + info.getRegionInfo().getRegionNameAsString());
            assertTrue(results.containsKey(info.getRegionInfo().getRegionName()));
        }
        results.clear();
        // scan: for region 2 and region 3
        table.coprocessorService(TestRpcServiceProtos.TestProtobufRpcProto.class, ROWS[rowSeperator1], ROWS[ROWS.length - 1], new Batch.Call<TestRpcServiceProtos.TestProtobufRpcProto, TestProtos.EchoResponseProto>() {

            public TestProtos.EchoResponseProto call(TestRpcServiceProtos.TestProtobufRpcProto instance) throws IOException {
                LOG.debug("Default response is " + TestProtos.EchoRequestProto.getDefaultInstance());
                CoprocessorRpcUtils.BlockingRpcCallback<TestProtos.EchoResponseProto> callback = new CoprocessorRpcUtils.BlockingRpcCallback<>();
                instance.echo(controller, request, callback);
                TestProtos.EchoResponseProto response = callback.get();
                LOG.debug("Batch.Call returning result " + response);
                return response;
            }
        }, new Batch.Callback<TestProtos.EchoResponseProto>() {

            public void update(byte[] region, byte[] row, TestProtos.EchoResponseProto result) {
                assertNotNull(result);
                assertEquals("hello", result.getMessage());
                results.put(region, result.getMessage());
            }
        });
        for (Map.Entry<byte[], String> e : results.entrySet()) {
            LOG.info("Got value " + e.getValue() + " for region " + Bytes.toStringBinary(e.getKey()));
        }
        assertEquals(2, results.size());
    } finally {
        table.close();
    }
}
Also used : TestRpcServiceProtos(org.apache.hadoop.hbase.ipc.protobuf.generated.TestRpcServiceProtos) TestProtos(org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos) ServerRpcController(org.apache.hadoop.hbase.ipc.ServerRpcController) HRegionLocation(org.apache.hadoop.hbase.HRegionLocation) Batch(org.apache.hadoop.hbase.client.coprocessor.Batch) RegionLocator(org.apache.hadoop.hbase.client.RegionLocator) Table(org.apache.hadoop.hbase.client.Table) IOException(java.io.IOException) ServerRpcController(org.apache.hadoop.hbase.ipc.ServerRpcController) RpcController(com.google.protobuf.RpcController) CoprocessorRpcUtils(org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils) Map(java.util.Map) TreeMap(java.util.TreeMap) Test(org.junit.Test)

Example 4 with ServerRpcController

use of org.apache.hadoop.hbase.ipc.ServerRpcController in project hbase by apache.

the class MasterRpcServices method execMasterService.

@Override
public ClientProtos.CoprocessorServiceResponse execMasterService(final RpcController controller, final ClientProtos.CoprocessorServiceRequest request) throws ServiceException {
    try {
        master.checkInitialized();
        ServerRpcController execController = new ServerRpcController();
        ClientProtos.CoprocessorServiceCall call = request.getCall();
        String serviceName = call.getServiceName();
        String methodName = call.getMethodName();
        if (!master.coprocessorServiceHandlers.containsKey(serviceName)) {
            throw new UnknownProtocolException(null, "No registered Master Coprocessor Endpoint found for " + serviceName + ". Has it been enabled?");
        }
        com.google.protobuf.Service service = master.coprocessorServiceHandlers.get(serviceName);
        com.google.protobuf.Descriptors.ServiceDescriptor serviceDesc = service.getDescriptorForType();
        com.google.protobuf.Descriptors.MethodDescriptor methodDesc = CoprocessorRpcUtils.getMethodDescriptor(methodName, serviceDesc);
        com.google.protobuf.Message execRequest = CoprocessorRpcUtils.getRequest(service, methodDesc, call.getRequest());
        final com.google.protobuf.Message.Builder responseBuilder = service.getResponsePrototype(methodDesc).newBuilderForType();
        service.callMethod(methodDesc, execController, execRequest, new com.google.protobuf.RpcCallback<com.google.protobuf.Message>() {

            @Override
            public void run(com.google.protobuf.Message message) {
                if (message != null) {
                    responseBuilder.mergeFrom(message);
                }
            }
        });
        com.google.protobuf.Message execResult = responseBuilder.build();
        if (execController.getFailedOn() != null) {
            throw execController.getFailedOn();
        }
        return CoprocessorRpcUtils.getResponse(execResult, HConstants.EMPTY_BYTE_ARRAY);
    } catch (IOException ie) {
        throw new ServiceException(ie);
    }
}
Also used : IOException(java.io.IOException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) ServerRpcController(org.apache.hadoop.hbase.ipc.ServerRpcController) ServiceException(org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException) UnknownProtocolException(org.apache.hadoop.hbase.exceptions.UnknownProtocolException)

Example 5 with ServerRpcController

use of org.apache.hadoop.hbase.ipc.ServerRpcController in project hbase by apache.

the class VisibilityClient method setOrClearAuths.

private static VisibilityLabelsResponse setOrClearAuths(Connection connection, final String[] auths, final String user, final boolean setOrClear) throws IOException, ServiceException, Throwable {
    try (Table table = connection.getTable(LABELS_TABLE_NAME)) {
        Batch.Call<VisibilityLabelsService, VisibilityLabelsResponse> callable = new Batch.Call<VisibilityLabelsService, VisibilityLabelsResponse>() {

            ServerRpcController controller = new ServerRpcController();

            CoprocessorRpcUtils.BlockingRpcCallback<VisibilityLabelsResponse> rpcCallback = new CoprocessorRpcUtils.BlockingRpcCallback<>();

            public VisibilityLabelsResponse call(VisibilityLabelsService service) throws IOException {
                SetAuthsRequest.Builder setAuthReqBuilder = SetAuthsRequest.newBuilder();
                setAuthReqBuilder.setUser(ByteStringer.wrap(Bytes.toBytes(user)));
                for (String auth : auths) {
                    if (auth.length() > 0) {
                        setAuthReqBuilder.addAuth((ByteString.copyFromUtf8(auth)));
                    }
                }
                if (setOrClear) {
                    service.setAuths(controller, setAuthReqBuilder.build(), rpcCallback);
                } else {
                    service.clearAuths(controller, setAuthReqBuilder.build(), rpcCallback);
                }
                VisibilityLabelsResponse response = rpcCallback.get();
                if (controller.failedOnException()) {
                    throw controller.getFailedOn();
                }
                return response;
            }
        };
        Map<byte[], VisibilityLabelsResponse> result = table.coprocessorService(VisibilityLabelsService.class, HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY, callable);
        // There will be exactly one region for labels
        return result.values().iterator().next();
    // table and so one entry in result Map.
    }
}
Also used : SetAuthsRequest(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest) VisibilityLabelsService(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsService) Table(org.apache.hadoop.hbase.client.Table) ByteString(com.google.protobuf.ByteString) ServerRpcController(org.apache.hadoop.hbase.ipc.ServerRpcController) CoprocessorRpcUtils(org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils) Batch(org.apache.hadoop.hbase.client.coprocessor.Batch) VisibilityLabelsResponse(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse)

Aggregations

ServerRpcController (org.apache.hadoop.hbase.ipc.ServerRpcController)31 IOException (java.io.IOException)24 Batch (org.apache.hadoop.hbase.client.coprocessor.Batch)21 BlockingRpcCallback (org.apache.hadoop.hbase.ipc.BlockingRpcCallback)17 MetaDataService (org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataService)15 KeyValueBuilder (org.apache.phoenix.hbase.index.util.KeyValueBuilder)13 ThreadFactoryBuilder (com.google.common.util.concurrent.ThreadFactoryBuilder)12 CoprocessorRpcUtils (org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils)12 PhoenixIOException (org.apache.phoenix.exception.PhoenixIOException)12 NonTxIndexBuilder (org.apache.phoenix.hbase.index.covered.NonTxIndexBuilder)12 PhoenixIndexBuilder (org.apache.phoenix.index.PhoenixIndexBuilder)12 MetaDataResponse (org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse)11 Mutation (org.apache.hadoop.hbase.client.Mutation)10 MutationProto (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto)10 MetaDataMutationResult (org.apache.phoenix.coprocessor.MetaDataProtocol.MetaDataMutationResult)8 Table (org.apache.hadoop.hbase.client.Table)6 SQLException (java.sql.SQLException)5 Map (java.util.Map)5 TreeMap (java.util.TreeMap)5 HTableInterface (org.apache.hadoop.hbase.client.HTableInterface)5