use of org.apache.hadoop.hive.common.metrics.metrics2.CodahaleMetrics in project hive by apache.
the class TestZookeeperLockManager method testMetrics.
@Test
public void testMetrics() throws Exception {
conf.setVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_QUORUM, "localhost");
conf.setVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_CLIENT_PORT, String.valueOf(server.getPort()));
conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_METRICS_ENABLED, true);
conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
conf.setVar(HiveConf.ConfVars.HIVE_METRICS_REPORTER, MetricsReporting.JSON_FILE.name() + "," + MetricsReporting.JMX.name());
MetricsFactory.init(conf);
CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance();
HiveLockManagerCtx ctx = new HiveLockManagerCtx(conf);
ZooKeeperHiveLockManager zMgr = new ZooKeeperHiveLockManager();
zMgr.setContext(ctx);
ZooKeeperHiveLock curLock = zMgr.lock(hiveLock, HiveLockMode.SHARED, false);
String json = metrics.dumpJson();
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, MetricsConstant.ZOOKEEPER_HIVE_SHAREDLOCKS, 1);
zMgr.unlock(curLock);
json = metrics.dumpJson();
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, MetricsConstant.ZOOKEEPER_HIVE_SHAREDLOCKS, 0);
zMgr.close();
}
use of org.apache.hadoop.hive.common.metrics.metrics2.CodahaleMetrics in project hive by apache.
the class TestHBaseMetastoreMetrics method testMetaDataCounts.
@Test
public void testMetaDataCounts() throws Exception {
//1 databases created
driver.run("create database testdb1");
//4 tables
driver.run("create table testtbl1 (key string)");
driver.run("create table testtblpart (key string) partitioned by (partkey string)");
driver.run("use testdb1");
driver.run("create table testtbl2 (key string)");
driver.run("create table testtblpart2 (key string) partitioned by (partkey string)");
//6 partitions
driver.run("alter table default.testtblpart add partition (partkey='a')");
driver.run("alter table default.testtblpart add partition (partkey='b')");
driver.run("alter table default.testtblpart add partition (partkey='c')");
driver.run("alter table testdb1.testtblpart2 add partition (partkey='a')");
driver.run("alter table testdb1.testtblpart2 add partition (partkey='b')");
driver.run("alter table testdb1.testtblpart2 add partition (partkey='c')");
//create and drop some additional metadata, to test drop counts.
driver.run("create database tempdb");
driver.run("use tempdb");
driver.run("create table delete_by_table (key string) partitioned by (partkey string)");
driver.run("alter table delete_by_table add partition (partkey='temp')");
driver.run("drop table delete_by_table");
driver.run("create table delete_by_part (key string) partitioned by (partkey string)");
driver.run("alter table delete_by_part add partition (partkey='temp')");
driver.run("alter table delete_by_part drop partition (partkey='temp')");
driver.run("create table delete_by_db (key string) partitioned by (partkey string)");
driver.run("alter table delete_by_db add partition (partkey='temp')");
driver.run("use default");
driver.run("drop database tempdb cascade");
CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance();
String json = metrics.dumpJson();
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, MetricsConstant.CREATE_TOTAL_DATABASES, 2);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, MetricsConstant.CREATE_TOTAL_TABLES, 7);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, MetricsConstant.CREATE_TOTAL_PARTITIONS, 9);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, MetricsConstant.DELETE_TOTAL_DATABASES, 1);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, MetricsConstant.DELETE_TOTAL_TABLES, 3);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, MetricsConstant.DELETE_TOTAL_PARTITIONS, 3);
//to test initial metadata count metrics.
conf.setVar(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL, ObjectStore.class.getName());
HiveMetaStore.HMSHandler baseHandler = new HiveMetaStore.HMSHandler("test", conf, false);
baseHandler.init();
baseHandler.updateMetrics();
//1 new db + default
json = metrics.dumpJson();
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.INIT_TOTAL_DATABASES, 2);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.INIT_TOTAL_TABLES, 4);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.INIT_TOTAL_PARTITIONS, 6);
}
use of org.apache.hadoop.hive.common.metrics.metrics2.CodahaleMetrics in project hive by apache.
the class TestObjectStore method testDirectSqlErrorMetrics.
@Test
public void testDirectSqlErrorMetrics() throws Exception {
HiveConf conf = new HiveConf();
conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_METRICS_ENABLED, true);
conf.setVar(HiveConf.ConfVars.HIVE_METRICS_REPORTER, MetricsReporting.JSON_FILE.name() + "," + MetricsReporting.JMX.name());
MetricsFactory.init(conf);
CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance();
objectStore.new GetDbHelper("foo", null, true, true) {
@Override
protected Database getSqlResult(ObjectStore.GetHelper<Database> ctx) throws MetaException {
return null;
}
@Override
protected Database getJdoResult(ObjectStore.GetHelper<Database> ctx) throws MetaException, NoSuchObjectException {
return null;
}
}.run(false);
String json = metrics.dumpJson();
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, MetricsConstant.DIRECTSQL_ERRORS, "");
objectStore.new GetDbHelper("foo", null, true, true) {
@Override
protected Database getSqlResult(ObjectStore.GetHelper<Database> ctx) throws MetaException {
throw new RuntimeException();
}
@Override
protected Database getJdoResult(ObjectStore.GetHelper<Database> ctx) throws MetaException, NoSuchObjectException {
return null;
}
}.run(false);
json = metrics.dumpJson();
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, MetricsConstant.DIRECTSQL_ERRORS, 1);
}
use of org.apache.hadoop.hive.common.metrics.metrics2.CodahaleMetrics in project hive by apache.
the class TestMetaStoreMetrics method testMetaDataCounts.
@Test
public void testMetaDataCounts() throws Exception {
CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance();
String json = metrics.dumpJson();
int initDbCount = (new Integer((MetricsTestUtils.getJsonNode(json, MetricsTestUtils.GAUGE, MetricsConstant.INIT_TOTAL_DATABASES)).asText())).intValue();
int initTblCount = (new Integer((MetricsTestUtils.getJsonNode(json, MetricsTestUtils.GAUGE, MetricsConstant.INIT_TOTAL_TABLES)).asText())).intValue();
int initPartCount = (new Integer((MetricsTestUtils.getJsonNode(json, MetricsTestUtils.GAUGE, MetricsConstant.INIT_TOTAL_PARTITIONS)).asText())).intValue();
//1 databases created
driver.run("create database testdb1");
//4 tables
driver.run("create table testtbl1 (key string)");
driver.run("create table testtblpart (key string) partitioned by (partkey string)");
driver.run("use testdb1");
driver.run("create table testtbl2 (key string)");
driver.run("create table testtblpart2 (key string) partitioned by (partkey string)");
//6 partitions
driver.run("alter table default.testtblpart add partition (partkey='a')");
driver.run("alter table default.testtblpart add partition (partkey='b')");
driver.run("alter table default.testtblpart add partition (partkey='c')");
driver.run("alter table testdb1.testtblpart2 add partition (partkey='a')");
driver.run("alter table testdb1.testtblpart2 add partition (partkey='b')");
driver.run("alter table testdb1.testtblpart2 add partition (partkey='c')");
//create and drop some additional metadata, to test drop counts.
driver.run("create database tempdb");
driver.run("use tempdb");
driver.run("create table delete_by_table (key string) partitioned by (partkey string)");
driver.run("alter table delete_by_table add partition (partkey='temp')");
driver.run("drop table delete_by_table");
driver.run("create table delete_by_part (key string) partitioned by (partkey string)");
driver.run("alter table delete_by_part add partition (partkey='temp')");
driver.run("alter table delete_by_part drop partition (partkey='temp')");
driver.run("create table delete_by_db (key string) partitioned by (partkey string)");
driver.run("alter table delete_by_db add partition (partkey='temp')");
driver.run("use default");
driver.run("drop database tempdb cascade");
json = metrics.dumpJson();
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, MetricsConstant.CREATE_TOTAL_DATABASES, 2);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, MetricsConstant.CREATE_TOTAL_TABLES, 7);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, MetricsConstant.CREATE_TOTAL_PARTITIONS, 9);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, MetricsConstant.DELETE_TOTAL_DATABASES, 1);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, MetricsConstant.DELETE_TOTAL_TABLES, 3);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, MetricsConstant.DELETE_TOTAL_PARTITIONS, 3);
//to test initial metadata count metrics.
hiveConf.setVar(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL, ObjectStore.class.getName());
HiveMetaStore.HMSHandler baseHandler = new HiveMetaStore.HMSHandler("test", hiveConf, false);
baseHandler.init();
baseHandler.updateMetrics();
json = metrics.dumpJson();
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.INIT_TOTAL_DATABASES, initDbCount + 1);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.INIT_TOTAL_TABLES, initTblCount + 4);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.INIT_TOTAL_PARTITIONS, initPartCount + 6);
}
use of org.apache.hadoop.hive.common.metrics.metrics2.CodahaleMetrics in project hive by apache.
the class TestHs2Metrics method testMetrics.
@Test
public void testMetrics() throws Exception {
String tableName = "testMetrics";
CLIServiceClient serviceClient = miniHS2.getServiceClient();
SessionHandle sessHandle = serviceClient.openSession("foo", "bar");
//Block on semantic analysis to check 'active_calls'
serviceClient.executeStatement(sessHandle, "CREATE TABLE " + tableName + " (id INT)", confOverlay);
//check that all calls were recorded.
CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance();
String json = metrics.dumpJson();
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.TIMER, "api_hs2_operation_INITIALIZED", 1);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.TIMER, "api_hs2_operation_PENDING", 1);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.TIMER, "api_hs2_operation_RUNNING", 1);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, "hs2_completed_operation_FINISHED", 1);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.TIMER, "api_hs2_sql_operation_PENDING", 1);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.TIMER, "api_hs2_sql_operation_RUNNING", 1);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, "hs2_completed_sql_operation_FINISHED", 1);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.TIMER, "api_Driver.run", 1);
//but there should be no more active calls.
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, "active_calls_api_semanticAnalyze", 0);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, "active_calls_api_compile", 0);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, "active_calls_api_hs2_operation_RUNNING", 0);
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, "active_calls_api_hs2_sql_operation_RUNNING", 0);
serviceClient.closeSession(sessHandle);
}
Aggregations