use of org.apache.hadoop.hive.metastore.DatabaseProduct in project hive by apache.
the class TxnUtils method seedTxnSequence.
/**
* Restarts the txnId sequence with the given seed value.
* It is the responsibility of the caller to not set the sequence backward.
* @param conn database connection
* @param stmt sql statement
* @param seedTxnId the seed value for the sequence
* @throws SQLException ex
*/
public static void seedTxnSequence(Connection conn, Configuration conf, Statement stmt, long seedTxnId) throws SQLException {
String dbProduct = conn.getMetaData().getDatabaseProductName();
DatabaseProduct databaseProduct = determineDatabaseProduct(dbProduct, conf);
stmt.execute(databaseProduct.getTxnSeedFn(seedTxnId));
}
use of org.apache.hadoop.hive.metastore.DatabaseProduct in project hive by apache.
the class HikariCPDataSourceProvider method create.
@Override
public DataSource create(Configuration hdpConfig) throws SQLException {
LOG.debug("Creating Hikari connection pool for the MetaStore");
String driverUrl = DataSourceProvider.getMetastoreJdbcDriverUrl(hdpConfig);
String user = DataSourceProvider.getMetastoreJdbcUser(hdpConfig);
String passwd = DataSourceProvider.getMetastoreJdbcPasswd(hdpConfig);
int maxPoolSize = MetastoreConf.getIntVar(hdpConfig, MetastoreConf.ConfVars.CONNECTION_POOLING_MAX_CONNECTIONS);
Properties properties = replacePrefix(DataSourceProvider.getPrefixedProperties(hdpConfig, HIKARI));
long connectionTimeout = hdpConfig.getLong(CONNECTION_TIMEOUT_PROPERTY, 30000L);
long leakDetectionThreshold = hdpConfig.getLong(LEAK_DETECTION_THRESHOLD, 3600000L);
HikariConfig config;
try {
config = new HikariConfig(properties);
} catch (Exception e) {
throw new SQLException("Cannot create HikariCP configuration: ", e);
}
config.setMaximumPoolSize(maxPoolSize);
config.setJdbcUrl(driverUrl);
config.setUsername(user);
config.setPassword(passwd);
config.setLeakDetectionThreshold(leakDetectionThreshold);
// https://github.com/brettwooldridge/HikariCP
config.setConnectionTimeout(connectionTimeout);
DatabaseProduct dbProduct = DatabaseProduct.determineDatabaseProduct(driverUrl, hdpConfig);
String s = dbProduct.getPrepareTxnStmt();
if (s != null) {
config.setConnectionInitSql(s);
}
Map<String, String> props = dbProduct.getDataSourceProperties();
for (Map.Entry<String, String> kv : props.entrySet()) {
config.addDataSourceProperty(kv.getKey(), kv.getValue());
}
return new HikariDataSource(initMetrics(config));
}
use of org.apache.hadoop.hive.metastore.DatabaseProduct in project hive by apache.
the class DbCPDataSourceProvider method create.
@SuppressWarnings({ "rawtypes", "unchecked" })
@Override
public DataSource create(Configuration hdpConfig) throws SQLException {
LOG.debug("Creating dbcp connection pool for the MetaStore");
String driverUrl = DataSourceProvider.getMetastoreJdbcDriverUrl(hdpConfig);
String user = DataSourceProvider.getMetastoreJdbcUser(hdpConfig);
String passwd = DataSourceProvider.getMetastoreJdbcPasswd(hdpConfig);
BasicDataSource dbcpDs = new BasicDataSource();
dbcpDs.setUrl(driverUrl);
dbcpDs.setUsername(user);
dbcpDs.setPassword(passwd);
dbcpDs.setDefaultReadOnly(false);
dbcpDs.setDefaultAutoCommit(true);
DatabaseProduct dbProduct = DatabaseProduct.determineDatabaseProduct(driverUrl, hdpConfig);
Map<String, String> props = dbProduct.getDataSourceProperties();
for (Map.Entry<String, String> kv : props.entrySet()) {
dbcpDs.setConnectionProperties(kv.getKey() + "=" + kv.getValue());
}
int maxPoolSize = hdpConfig.getInt(MetastoreConf.ConfVars.CONNECTION_POOLING_MAX_CONNECTIONS.getVarname(), ((Long) MetastoreConf.ConfVars.CONNECTION_POOLING_MAX_CONNECTIONS.getDefaultVal()).intValue());
long connectionTimeout = hdpConfig.getLong(CONNECTION_TIMEOUT_PROPERTY, 30000L);
int connectionMaxIlde = hdpConfig.getInt(CONNECTION_MAX_IDLE_PROPERTY, 8);
int connectionMinIlde = hdpConfig.getInt(CONNECTION_MIN_IDLE_PROPERTY, 0);
boolean testOnBorrow = hdpConfig.getBoolean(CONNECTION_TEST_BORROW_PROPERTY, BaseObjectPoolConfig.DEFAULT_TEST_ON_BORROW);
long evictionTimeMillis = hdpConfig.getLong(CONNECTION_MIN_EVICT_MILLIS_PROPERTY, BaseObjectPoolConfig.DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS);
boolean testWhileIdle = hdpConfig.getBoolean(CONNECTION_TEST_IDLEPROPERTY, BaseObjectPoolConfig.DEFAULT_TEST_WHILE_IDLE);
long timeBetweenEvictionRuns = hdpConfig.getLong(CONNECTION_TIME_BETWEEN_EVICTION_RUNS_MILLIS, BaseObjectPoolConfig.DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS);
int numTestsPerEvictionRun = hdpConfig.getInt(CONNECTION_NUM_TESTS_PER_EVICTION_RUN, BaseObjectPoolConfig.DEFAULT_NUM_TESTS_PER_EVICTION_RUN);
boolean testOnReturn = hdpConfig.getBoolean(CONNECTION_TEST_ON_RETURN, BaseObjectPoolConfig.DEFAULT_TEST_ON_RETURN);
long softMinEvictableIdleTimeMillis = hdpConfig.getLong(CONNECTION_SOFT_MIN_EVICTABLE_IDLE_TIME, BaseObjectPoolConfig.DEFAULT_SOFT_MIN_EVICTABLE_IDLE_TIME_MILLIS);
boolean lifo = hdpConfig.getBoolean(CONNECTION_LIFO, BaseObjectPoolConfig.DEFAULT_LIFO);
ConnectionFactory connFactory = new DataSourceConnectionFactory(dbcpDs);
PoolableConnectionFactory poolableConnFactory = new PoolableConnectionFactory(connFactory, null);
GenericObjectPool objectPool = new GenericObjectPool(poolableConnFactory);
objectPool.setMaxTotal(maxPoolSize);
objectPool.setMaxWaitMillis(connectionTimeout);
objectPool.setMaxIdle(connectionMaxIlde);
objectPool.setMinIdle(connectionMinIlde);
objectPool.setTestOnBorrow(testOnBorrow);
objectPool.setTestWhileIdle(testWhileIdle);
objectPool.setMinEvictableIdleTimeMillis(evictionTimeMillis);
objectPool.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRuns);
objectPool.setNumTestsPerEvictionRun(numTestsPerEvictionRun);
objectPool.setTestOnReturn(testOnReturn);
objectPool.setSoftMinEvictableIdleTimeMillis(softMinEvictableIdleTimeMillis);
objectPool.setLifo(lifo);
String stmt = dbProduct.getPrepareTxnStmt();
if (stmt != null) {
poolableConnFactory.setValidationQuery(stmt);
}
return new PoolingDataSource(objectPool);
}
Aggregations