use of org.apache.hive.service.rpc.thrift.TExecuteStatementReq in project hive by apache.
the class ThriftCLIServiceClient method executeStatementInternal.
private OperationHandle executeStatementInternal(SessionHandle sessionHandle, String statement, Map<String, String> confOverlay, boolean isAsync, long queryTimeout) throws HiveSQLException {
try {
TExecuteStatementReq req = new TExecuteStatementReq(sessionHandle.toTSessionHandle(), statement);
req.setConfOverlay(confOverlay);
req.setRunAsync(isAsync);
req.setQueryTimeout(queryTimeout);
TExecuteStatementResp resp = cliService.ExecuteStatement(req);
checkStatus(resp.getStatus());
TProtocolVersion protocol = sessionHandle.getProtocolVersion();
return new OperationHandle(resp.getOperationHandle(), protocol);
} catch (HiveSQLException e) {
throw e;
} catch (Exception e) {
throw new HiveSQLException(e);
}
}
use of org.apache.hive.service.rpc.thrift.TExecuteStatementReq in project hive by apache.
the class HiveStatement method runAsyncOnServer.
private void runAsyncOnServer(String sql) throws SQLException {
checkConnection("execute");
closeClientOperation();
initFlags();
TExecuteStatementReq execReq = new TExecuteStatementReq(sessHandle, sql);
/**
* Run asynchronously whenever possible
* Currently only a SQLOperation can be run asynchronously,
* in a background operation thread
* Compilation can run asynchronously or synchronously and execution run asynchronously
*/
execReq.setRunAsync(true);
execReq.setConfOverlay(sessConf);
execReq.setQueryTimeout(queryTimeout);
try {
TExecuteStatementResp execResp = client.ExecuteStatement(execReq);
Utils.verifySuccessWithInfo(execResp.getStatus());
stmtHandle = execResp.getOperationHandle();
isExecuteStatementFailed = false;
} catch (SQLException eS) {
isExecuteStatementFailed = true;
throw eS;
} catch (Exception ex) {
isExecuteStatementFailed = true;
throw new SQLException(ex.toString(), "08S01", ex);
}
}
use of org.apache.hive.service.rpc.thrift.TExecuteStatementReq in project hive by apache.
the class TestThriftHttpCLIServiceFeatures method verifyForwardedHeaders.
private void verifyForwardedHeaders(ArrayList<String> headerIPs, String cmd) throws Exception {
TTransport transport;
DefaultHttpClient hClient = new DefaultHttpClient();
String httpUrl = getHttpUrl();
// add an interceptor that adds the X-Forwarded-For header with given ips
if (!headerIPs.isEmpty()) {
Header xForwardHeader = new BasicHeader("X-Forwarded-For", Joiner.on(",").join(headerIPs));
RequestDefaultHeaders headerInterceptor = new RequestDefaultHeaders(Arrays.asList(xForwardHeader));
hClient.addRequestInterceptor(headerInterceptor);
}
// interceptor for adding username, pwd
HttpBasicAuthInterceptor authInt = new HttpBasicAuthInterceptor(ThriftCLIServiceTest.USERNAME, ThriftCLIServiceTest.PASSWORD, null, null, false, null);
hClient.addRequestInterceptor(authInt);
transport = new THttpClient(httpUrl, hClient);
TCLIService.Client httpClient = getClient(transport);
// Create a new open session request object
TOpenSessionReq openReq = new TOpenSessionReq();
TOpenSessionResp openResp = httpClient.OpenSession(openReq);
//execute a query
TExecuteStatementReq execReq = new TExecuteStatementReq(openResp.getSessionHandle(), "show tables");
httpClient.ExecuteStatement(execReq);
// capture arguments to authorizer impl call and verify ip addresses passed
ArgumentCaptor<HiveAuthzContext> contextCapturer = ArgumentCaptor.forClass(HiveAuthzContext.class);
verify(mockedAuthorizer).checkPrivileges(any(HiveOperationType.class), Matchers.anyListOf(HivePrivilegeObject.class), Matchers.anyListOf(HivePrivilegeObject.class), contextCapturer.capture());
HiveAuthzContext context = contextCapturer.getValue();
System.err.println("Forwarded IP Addresses " + context.getForwardedAddresses());
List<String> auditIPAddresses = new ArrayList<String>(context.getForwardedAddresses());
Collections.sort(auditIPAddresses);
Collections.sort(headerIPs);
Assert.assertEquals("Checking forwarded IP Address", headerIPs, auditIPAddresses);
}
Aggregations