use of org.apache.hadoop.hive.common.metrics.common.Metrics in project hive by apache.
the class OperationManager method closeOperation.
public void closeOperation(OperationHandle opHandle) throws HiveSQLException {
LOG.info("Closing operation: " + opHandle);
Operation operation = removeOperation(opHandle);
if (operation == null) {
throw new HiveSQLException("Operation does not exist: " + opHandle);
}
Metrics metrics = MetricsFactory.getInstance();
if (metrics != null) {
try {
metrics.decrementCounter(MetricsConstant.OPEN_OPERATIONS);
} catch (Exception e) {
LOG.warn("Error Reporting close operation to Metrics system", e);
}
}
operation.close();
}
use of org.apache.hadoop.hive.common.metrics.common.Metrics in project hive by apache.
the class TestMapRedTask method mrTask_updates_Metrics.
@Test
public void mrTask_updates_Metrics() throws IOException {
Metrics mockMetrics = Mockito.mock(Metrics.class);
MapRedTask mapRedTask = new MapRedTask();
mapRedTask.updateTaskMetrics(mockMetrics);
verify(mockMetrics, times(1)).incrementCounter(MetricsConstant.HIVE_MR_TASKS);
verify(mockMetrics, never()).incrementCounter(MetricsConstant.HIVE_TEZ_TASKS);
verify(mockMetrics, never()).incrementCounter(MetricsConstant.HIVE_SPARK_TASKS);
}
use of org.apache.hadoop.hive.common.metrics.common.Metrics in project hive by apache.
the class TestSparkTask method sparkTask_updates_Metrics.
@Test
public void sparkTask_updates_Metrics() throws IOException {
Metrics mockMetrics = Mockito.mock(Metrics.class);
SparkTask sparkTask = new SparkTask();
sparkTask.updateTaskMetrics(mockMetrics);
verify(mockMetrics, times(1)).incrementCounter(MetricsConstant.HIVE_SPARK_TASKS);
verify(mockMetrics, never()).incrementCounter(MetricsConstant.HIVE_TEZ_TASKS);
verify(mockMetrics, never()).incrementCounter(MetricsConstant.HIVE_MR_TASKS);
}
use of org.apache.hadoop.hive.common.metrics.common.Metrics in project hive by apache.
the class Driver method compileInternal.
private void compileInternal(String command, boolean deferClose) throws CommandProcessorResponse {
Metrics metrics = MetricsFactory.getInstance();
if (metrics != null) {
metrics.incrementCounter(MetricsConstant.WAITING_COMPILE_OPS, 1);
}
PerfLogger perfLogger = SessionState.getPerfLogger();
perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.WAIT_COMPILE);
final ReentrantLock compileLock = tryAcquireCompileLock(isParallelEnabled, command);
perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.WAIT_COMPILE);
if (metrics != null) {
metrics.decrementCounter(MetricsConstant.WAITING_COMPILE_OPS, 1);
}
if (compileLock == null) {
throw createProcessorResponse(ErrorMsg.COMPILE_LOCK_TIMED_OUT.getErrorCode());
}
try {
compile(command, true, deferClose);
} catch (CommandProcessorResponse cpr) {
try {
releaseLocksAndCommitOrRollback(false);
} catch (LockException e) {
LOG.warn("Exception in releasing locks. " + org.apache.hadoop.util.StringUtils.stringifyException(e));
}
throw cpr;
} finally {
compileLock.unlock();
}
// Save compile-time PerfLogging for WebUI.
// Execution-time Perf logs are done by either another thread's PerfLogger
// or a reset PerfLogger.
queryDisplay.setPerfLogStarts(QueryDisplay.Phase.COMPILATION, perfLogger.getStartTimes());
queryDisplay.setPerfLogEnds(QueryDisplay.Phase.COMPILATION, perfLogger.getEndTimes());
}
use of org.apache.hadoop.hive.common.metrics.common.Metrics in project hive by apache.
the class SessionManager method init.
@Override
public synchronized void init(HiveConf hiveConf) {
this.hiveConf = hiveConf;
// Create operation log root directory, if operation logging is enabled
if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED)) {
initOperationLogRootDir();
}
createBackgroundOperationPool();
addService(operationManager);
initSessionImplClassName();
Metrics metrics = MetricsFactory.getInstance();
if (metrics != null) {
registerOpenSesssionMetrics(metrics);
registerActiveSesssionMetrics(metrics);
}
userLimit = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER);
ipAddressLimit = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_IPADDRESS);
userIpAddressLimit = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER_IPADDRESS);
LOG.info("Connections limit are user: {} ipaddress: {} user-ipaddress: {}", userLimit, ipAddressLimit, userIpAddressLimit);
super.init(hiveConf);
}
Aggregations