use of org.apache.hadoop.hive.common.metrics.metrics2.CodahaleMetrics in project hive by apache.
the class TestHs2Metrics method testClosedScopes.
@Test
public void testClosedScopes() throws Exception {
CLIServiceClient serviceClient = miniHS2.getServiceClient();
SessionHandle sessHandle = serviceClient.openSession("foo", "bar");
//this should error at analyze scope
Exception expectedException = null;
try {
serviceClient.executeStatement(sessHandle, "select aaa", confOverlay);
} catch (Exception e) {
expectedException = e;
}
Assert.assertNotNull("Expected semantic exception", expectedException);
//verify all scopes were recorded
CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance();
String json = metrics.dumpJson();
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.TIMER, "api_parse", 1);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.TIMER, "api_semanticAnalyze", 1);
//verify all scopes are closed.
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, "active_calls_api_parse", 0);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, "active_calls_api_semanticAnalyze", 0);
serviceClient.closeSession(sessHandle);
}
use of org.apache.hadoop.hive.common.metrics.metrics2.CodahaleMetrics in project hive by apache.
the class TestMetricsQueryLifeTimeHook method before.
@Before
public void before() throws Exception {
HiveConf conf = new HiveConf();
conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, "local");
conf.setVar(HiveConf.ConfVars.HIVE_METRICS_CLASS, CodahaleMetrics.class.getCanonicalName());
conf.setVar(HiveConf.ConfVars.HIVE_METRICS_REPORTER, MetricsReporting.JSON_FILE.name() + "," + MetricsReporting.JMX.name());
conf.setVar(HiveConf.ConfVars.HIVE_METRICS_JSON_FILE_INTERVAL, "100000s");
MetricsFactory.init(conf);
metricRegistry = ((CodahaleMetrics) MetricsFactory.getInstance()).getMetricRegistry();
hook = new MetricsQueryLifeTimeHook();
ctx = new QueryLifeTimeHookContextImpl();
}
Aggregations