use of org.apache.hadoop.hive.metastore.datasource.HikariCPDataSourceProvider in project hive by apache.
the class TxnHandler method setupJdbcConnectionPool.
private static synchronized DataSource setupJdbcConnectionPool(Configuration conf, int maxPoolSize, long getConnectionTimeoutMs) throws SQLException {
String driverUrl = DataSourceProvider.getMetastoreJdbcDriverUrl(conf);
String user = DataSourceProvider.getMetastoreJdbcUser(conf);
String passwd = DataSourceProvider.getMetastoreJdbcPasswd(conf);
String connectionPooler = MetastoreConf.getVar(conf, ConfVars.CONNECTION_POOLING_TYPE).toLowerCase();
if ("bonecp".equals(connectionPooler)) {
// Enable retries to work around BONECP bug.
doRetryOnConnPool = true;
return new BoneCPDataSourceProvider().create(conf);
} else if ("dbcp".equals(connectionPooler)) {
GenericObjectPool objectPool = new GenericObjectPool();
// https://commons.apache.org/proper/commons-pool/api-1.6/org/apache/commons/pool/impl/GenericObjectPool.html#setMaxActive(int)
objectPool.setMaxActive(maxPoolSize);
objectPool.setMaxWait(getConnectionTimeoutMs);
ConnectionFactory connFactory = new DriverManagerConnectionFactory(driverUrl, user, passwd);
// This doesn't get used, but it's still necessary, see
// http://svn.apache.org/viewvc/commons/proper/dbcp/branches/DBCP_1_4_x_BRANCH/doc/ManualPoolingDataSourceExample.java?view=markup
PoolableConnectionFactory poolConnFactory = new PoolableConnectionFactory(connFactory, objectPool, null, null, false, true);
return new PoolingDataSource(objectPool);
} else if ("hikaricp".equals(connectionPooler)) {
return new HikariCPDataSourceProvider().create(conf);
} else if ("none".equals(connectionPooler)) {
LOG.info("Choosing not to pool JDBC connections");
return new NoPoolConnectionPool(conf);
} else {
throw new RuntimeException("Unknown JDBC connection pooling " + connectionPooler);
}
}
Aggregations