use of org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel in project hbase by apache.
the class TestGenerateDelegationToken method testTokenAuth.
private void testTokenAuth(Class<? extends RpcClient> rpcImplClass) throws IOException, ServiceException {
TEST_UTIL.getConfiguration().set(RpcClientFactory.CUSTOM_RPC_CLIENT_IMPL_CONF_KEY, rpcImplClass.getName());
try (Connection conn = ConnectionFactory.createConnection(TEST_UTIL.getConfiguration());
Table table = conn.getTable(TableName.META_TABLE_NAME)) {
CoprocessorRpcChannel rpcChannel = table.coprocessorService(HConstants.EMPTY_START_ROW);
AuthenticationProtos.AuthenticationService.BlockingInterface service = AuthenticationProtos.AuthenticationService.newBlockingStub(rpcChannel);
WhoAmIResponse response = service.whoAmI(null, WhoAmIRequest.getDefaultInstance());
assertEquals(USERNAME, response.getUsername());
assertEquals(AuthenticationMethod.TOKEN.name(), response.getAuthMethod());
try {
service.getAuthenticationToken(null, GetAuthenticationTokenRequest.getDefaultInstance());
} catch (ServiceException e) {
AccessDeniedException exc = (AccessDeniedException) ProtobufUtil.handleRemoteException(e);
assertTrue(exc.getMessage().contains("Token generation only allowed for Kerberos authenticated clients"));
}
}
}
use of org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel in project phoenix by apache.
the class StatisticsWriter method commitStats.
public void commitStats(List<Mutation> mutations, StatisticsCollector statsCollector) throws IOException {
commitLastStatsUpdatedTime(statsCollector);
if (mutations.size() > 0) {
byte[] row = mutations.get(0).getRow();
MutateRowsRequest.Builder mrmBuilder = MutateRowsRequest.newBuilder();
for (Mutation m : mutations) {
mrmBuilder.addMutationRequest(ProtobufUtil.toMutation(getMutationType(m), m));
}
MutateRowsRequest mrm = mrmBuilder.build();
CoprocessorRpcChannel channel = statsWriterTable.coprocessorService(row);
MultiRowMutationService.BlockingInterface service = MultiRowMutationService.newBlockingStub(channel);
try {
service.mutateRows(null, mrm);
} catch (ServiceException ex) {
ProtobufUtil.toIOException(ex);
}
}
}
use of org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel in project hbase by apache.
the class TokenUtil method obtainToken.
/**
* Obtain and return an authentication token for the current user.
* @param conn The HBase cluster connection
* @return the authentication token instance
*/
public static Token<AuthenticationTokenIdentifier> obtainToken(Connection conn) throws IOException {
Table meta = null;
try {
meta = conn.getTable(TableName.META_TABLE_NAME);
CoprocessorRpcChannel rpcChannel = meta.coprocessorService(HConstants.EMPTY_START_ROW);
AuthenticationProtos.AuthenticationService.BlockingInterface service = AuthenticationProtos.AuthenticationService.newBlockingStub(rpcChannel);
AuthenticationProtos.GetAuthenticationTokenResponse response = service.getAuthenticationToken(null, AuthenticationProtos.GetAuthenticationTokenRequest.getDefaultInstance());
return toToken(response.getToken());
} catch (ServiceException se) {
ProtobufUtil.handleRemoteException(se);
} finally {
if (meta != null) {
meta.close();
}
}
// dummy return for ServiceException block
return null;
}
use of org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel in project hbase by apache.
the class TestRowProcessorEndpoint method testDoubleScan.
@Test
public void testDoubleScan() throws Throwable {
prepareTestData();
CoprocessorRpcChannel channel = table.coprocessorService(ROW);
RowProcessorEndpoint.FriendsOfFriendsProcessor processor = new RowProcessorEndpoint.FriendsOfFriendsProcessor(ROW, A);
RowProcessorService.BlockingInterface service = RowProcessorService.newBlockingStub(channel);
ProcessRequest request = RowProcessorClient.getRowProcessorPB(processor);
ProcessResponse protoResult = service.process(null, request);
FriendsOfFriendsProcessorResponse response = FriendsOfFriendsProcessorResponse.parseFrom(protoResult.getRowProcessorResult());
Set<String> result = new HashSet<>();
result.addAll(response.getResultList());
Set<String> expected = new HashSet<>(Arrays.asList(new String[] { "d", "e", "f", "g" }));
Get get = new Get(ROW);
LOG.debug("row keyvalues:" + stringifyKvs(table.get(get).listCells()));
assertEquals(expected, result);
}
use of org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel in project hbase by apache.
the class SecureBulkLoadEndpointClient method prepareBulkLoad.
public String prepareBulkLoad(final TableName tableName) throws IOException {
try {
CoprocessorRpcChannel channel = table.coprocessorService(HConstants.EMPTY_START_ROW);
SecureBulkLoadProtos.SecureBulkLoadService instance = ProtobufUtil.newServiceStub(SecureBulkLoadProtos.SecureBulkLoadService.class, channel);
ServerRpcController controller = new ServerRpcController();
CoprocessorRpcUtils.BlockingRpcCallback<PrepareBulkLoadResponse> rpcCallback = new CoprocessorRpcUtils.BlockingRpcCallback<>();
PrepareBulkLoadRequest request = PrepareBulkLoadRequest.newBuilder().setTableName(ProtobufUtil.toProtoTableName(tableName)).build();
instance.prepareBulkLoad(controller, request, rpcCallback);
PrepareBulkLoadResponse response = rpcCallback.get();
if (controller.failedOnException()) {
throw controller.getFailedOn();
}
return response.getBulkToken();
} catch (Throwable throwable) {
throw new IOException(throwable);
}
}
Aggregations