use of org.apache.hive.service.cli.SessionHandle in project hive by apache.
the class TestHiveServer2 method testConnection.
/**
* Open a new session and run a test query
* @throws Exception
*/
@Test
public void testConnection() throws Exception {
String tableName = "TestHiveServer2TestConnection";
CLIServiceClient serviceClient = miniHS2.getServiceClient();
SessionHandle sessHandle = serviceClient.openSession("foo", "bar");
serviceClient.executeStatement(sessHandle, "DROP TABLE IF EXISTS " + tableName, confOverlay);
serviceClient.executeStatement(sessHandle, "CREATE TABLE " + tableName + " (id INT)", confOverlay);
OperationHandle opHandle = serviceClient.executeStatement(sessHandle, "SHOW TABLES", confOverlay);
RowSet rowSet = serviceClient.fetchResults(opHandle);
assertFalse(rowSet.numRows() == 0);
serviceClient.executeStatement(sessHandle, "DROP TABLE IF EXISTS " + tableName, confOverlay);
serviceClient.closeSession(sessHandle);
}
use of org.apache.hive.service.cli.SessionHandle in project hive by apache.
the class TestHs2Metrics method testMetrics.
@Test
public void testMetrics() throws Exception {
String tableName = "testMetrics";
CLIServiceClient serviceClient = miniHS2.getServiceClient();
SessionHandle sessHandle = serviceClient.openSession("foo", "bar");
// Block on semantic analysis to check 'active_calls'
serviceClient.executeStatement(sessHandle, "CREATE TABLE " + tableName + " (id INT)", confOverlay);
// check that all calls were recorded.
CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance();
String json = metrics.dumpJson();
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.TIMER, "api_hs2_operation_INITIALIZED", 1);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.TIMER, "api_hs2_operation_PENDING", 1);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.TIMER, "api_hs2_operation_RUNNING", 1);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, "hs2_completed_operation_FINISHED", 1);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.TIMER, "api_hs2_sql_operation_PENDING", 1);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.TIMER, "api_hs2_sql_operation_RUNNING", 1);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, "hs2_completed_sql_operation_FINISHED", 1);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.TIMER, "api_Driver.run", 1);
// but there should be no more active calls.
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, "active_calls_api_semanticAnalyze", 0);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, "active_calls_api_compile", 0);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, "active_calls_api_hs2_operation_RUNNING", 0);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, "active_calls_api_hs2_sql_operation_RUNNING", 0);
serviceClient.closeSession(sessHandle);
}
use of org.apache.hive.service.cli.SessionHandle in project hive by apache.
the class TestHs2Metrics method testClosedScopes.
@Test
public void testClosedScopes() throws Exception {
CLIServiceClient serviceClient = miniHS2.getServiceClient();
SessionHandle sessHandle = serviceClient.openSession("foo", "bar");
// this should error at analyze scope
Exception expectedException = null;
try {
serviceClient.executeStatement(sessHandle, "select aaa", confOverlay);
} catch (Exception e) {
expectedException = e;
}
Assert.assertNotNull("Expected semantic exception", expectedException);
// verify all scopes were recorded
CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance();
String json = metrics.dumpJson();
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.TIMER, "api_parse", 1);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.TIMER, "api_semanticAnalyze", 1);
// verify all scopes are closed.
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, "active_calls_api_parse", 0);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, "active_calls_api_semanticAnalyze", 0);
serviceClient.closeSession(sessHandle);
}
use of org.apache.hive.service.cli.SessionHandle in project hive by apache.
the class ThriftCLIService method SetClientInfo.
@Override
public TSetClientInfoResp SetClientInfo(TSetClientInfoReq req) throws TException {
// TODO: We don't do anything for now, just log this for debugging.
// We may be able to make use of this later, e.g. for workload management.
TSetClientInfoResp resp = null;
if (req.isSetConfiguration()) {
StringBuilder sb = null;
SessionHandle sh = null;
for (Map.Entry<String, String> e : req.getConfiguration().entrySet()) {
if (sb == null) {
sh = new SessionHandle(req.getSessionHandle());
sb = new StringBuilder("Client information for ").append(sh).append(": ");
} else {
sb.append(", ");
}
sb.append(e.getKey()).append(" = ").append(e.getValue());
if ("ApplicationName".equals(e.getKey())) {
try {
cliService.setApplicationName(sh, e.getValue());
} catch (Exception ex) {
LOG.warn("Error setting application name", ex);
resp = new TSetClientInfoResp(HiveSQLException.toTStatus(ex));
}
}
}
if (sb != null) {
LOG.info("{}", sb);
}
}
return resp == null ? new TSetClientInfoResp(OK_STATUS) : resp;
}
use of org.apache.hive.service.cli.SessionHandle in project hive by apache.
the class ThriftCLIService method OpenSession.
@Override
public TOpenSessionResp OpenSession(TOpenSessionReq req) throws TException {
LOG.info("Client protocol version: " + req.getClient_protocol());
TOpenSessionResp resp = new TOpenSessionResp();
try {
SessionHandle sessionHandle = getSessionHandle(req, resp);
resp.setSessionHandle(sessionHandle.toTSessionHandle());
Map<String, String> configurationMap = new HashMap<String, String>();
// Set the updated fetch size from the server into the configuration map for the client
HiveConf sessionConf = cliService.getSessionConf(sessionHandle);
configurationMap.put(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_DEFAULT_FETCH_SIZE.varname, Integer.toString(sessionConf != null ? sessionConf.getIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_DEFAULT_FETCH_SIZE) : hiveConf.getIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_DEFAULT_FETCH_SIZE)));
resp.setConfiguration(configurationMap);
resp.setStatus(OK_STATUS);
ThriftCLIServerContext context = (ThriftCLIServerContext) currentServerContext.get();
if (context != null) {
context.setSessionHandle(sessionHandle);
}
} catch (Exception e) {
LOG.warn("Error opening session: ", e);
resp.setStatus(HiveSQLException.toTStatus(e));
}
return resp;
}
Aggregations