Search in sources :

Example 1 with HiveAuthzContext

use of org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext in project hive by apache.

the class TestThriftHttpCLIServiceFeatures method verifyForwardedHeaders.

private void verifyForwardedHeaders(ArrayList<String> headerIPs, String cmd) throws Exception {
    TTransport transport;
    DefaultHttpClient hClient = new DefaultHttpClient();
    String httpUrl = getHttpUrl();
    // add an interceptor that adds the X-Forwarded-For header with given ips
    if (!headerIPs.isEmpty()) {
        Header xForwardHeader = new BasicHeader("X-Forwarded-For", Joiner.on(",").join(headerIPs));
        RequestDefaultHeaders headerInterceptor = new RequestDefaultHeaders(Arrays.asList(xForwardHeader));
        hClient.addRequestInterceptor(headerInterceptor);
    }
    // interceptor for adding username, pwd
    HttpBasicAuthInterceptor authInt = new HttpBasicAuthInterceptor(ThriftCLIServiceTest.USERNAME, ThriftCLIServiceTest.PASSWORD, null, null, false, null);
    hClient.addRequestInterceptor(authInt);
    transport = new THttpClient(httpUrl, hClient);
    TCLIService.Client httpClient = getClient(transport);
    // Create a new open session request object
    TOpenSessionReq openReq = new TOpenSessionReq();
    TOpenSessionResp openResp = httpClient.OpenSession(openReq);
    //execute a query
    TExecuteStatementReq execReq = new TExecuteStatementReq(openResp.getSessionHandle(), "show tables");
    httpClient.ExecuteStatement(execReq);
    // capture arguments to authorizer impl call and verify ip addresses passed
    ArgumentCaptor<HiveAuthzContext> contextCapturer = ArgumentCaptor.forClass(HiveAuthzContext.class);
    verify(mockedAuthorizer).checkPrivileges(any(HiveOperationType.class), Matchers.anyListOf(HivePrivilegeObject.class), Matchers.anyListOf(HivePrivilegeObject.class), contextCapturer.capture());
    HiveAuthzContext context = contextCapturer.getValue();
    System.err.println("Forwarded IP Addresses " + context.getForwardedAddresses());
    List<String> auditIPAddresses = new ArrayList<String>(context.getForwardedAddresses());
    Collections.sort(auditIPAddresses);
    Collections.sort(headerIPs);
    Assert.assertEquals("Checking forwarded IP Address", headerIPs, auditIPAddresses);
}
Also used : RequestDefaultHeaders(org.apache.http.client.protocol.RequestDefaultHeaders) HiveAuthzContext(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext) ArrayList(java.util.ArrayList) THttpClient(org.apache.thrift.transport.THttpClient) TExecuteStatementReq(org.apache.hive.service.rpc.thrift.TExecuteStatementReq) HiveOperationType(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) DefaultHttpClient(org.apache.http.impl.client.DefaultHttpClient) TCLIService(org.apache.hive.service.rpc.thrift.TCLIService) Header(org.apache.http.Header) BasicHeader(org.apache.http.message.BasicHeader) TTransport(org.apache.thrift.transport.TTransport) TOpenSessionReq(org.apache.hive.service.rpc.thrift.TOpenSessionReq) TOpenSessionResp(org.apache.hive.service.rpc.thrift.TOpenSessionResp) BasicHeader(org.apache.http.message.BasicHeader) HttpBasicAuthInterceptor(org.apache.hive.jdbc.HttpBasicAuthInterceptor)

Example 2 with HiveAuthzContext

use of org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext in project hive by apache.

the class TestHS2AuthzContext method verifyContextContents.

private void verifyContextContents(final String cmd, String ctxCmd) throws Exception, HiveAuthzPluginException, HiveAccessControlException {
    Connection hs2Conn = getConnection("user1");
    Statement stmt = hs2Conn.createStatement();
    stmt.execute(cmd);
    stmt.close();
    hs2Conn.close();
    ArgumentCaptor<HiveAuthzContext> contextCapturer = ArgumentCaptor.forClass(HiveAuthzContext.class);
    verify(mockedAuthorizer).checkPrivileges(any(HiveOperationType.class), Matchers.anyListOf(HivePrivilegeObject.class), Matchers.anyListOf(HivePrivilegeObject.class), contextCapturer.capture());
    HiveAuthzContext context = contextCapturer.getValue();
    assertEquals("Command ", ctxCmd, context.getCommandString());
    assertTrue("ip address pattern check", context.getIpAddress().matches("[.:a-fA-F0-9]+"));
    // ip address size check - check for something better than non zero
    assertTrue("ip address size check", context.getIpAddress().length() > 7);
}
Also used : Statement(java.sql.Statement) Connection(java.sql.Connection) HiveAuthzContext(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext) HiveOperationType(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject)

Aggregations

HiveAuthzContext (org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext)2 HiveOperationType (org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType)2 HivePrivilegeObject (org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject)2 Connection (java.sql.Connection)1 Statement (java.sql.Statement)1 ArrayList (java.util.ArrayList)1 HttpBasicAuthInterceptor (org.apache.hive.jdbc.HttpBasicAuthInterceptor)1 TCLIService (org.apache.hive.service.rpc.thrift.TCLIService)1 TExecuteStatementReq (org.apache.hive.service.rpc.thrift.TExecuteStatementReq)1 TOpenSessionReq (org.apache.hive.service.rpc.thrift.TOpenSessionReq)1 TOpenSessionResp (org.apache.hive.service.rpc.thrift.TOpenSessionResp)1 Header (org.apache.http.Header)1 RequestDefaultHeaders (org.apache.http.client.protocol.RequestDefaultHeaders)1 DefaultHttpClient (org.apache.http.impl.client.DefaultHttpClient)1 BasicHeader (org.apache.http.message.BasicHeader)1 THttpClient (org.apache.thrift.transport.THttpClient)1 TTransport (org.apache.thrift.transport.TTransport)1