use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project cdap by caskdata.
the class BaseHiveExploreService method getMetaStoreClient.
private IMetaStoreClient getMetaStoreClient() throws ExploreException {
if (metastoreClientLocal.get() == null) {
try {
IMetaStoreClient client = new HiveMetaStoreClient(createHiveConf());
Supplier<IMetaStoreClient> supplier = Suppliers.ofInstance(client);
metastoreClientLocal.set(supplier);
// We use GC of the supplier as a signal for us to know that a thread is gone
// The supplier is set into the thread local, which will get GC'ed when the thread is gone.
// Since we use a weak reference key to the supplier that points to the client
// (in the metastoreClientReferences map), it won't block GC of the supplier instance.
// We can use the weak reference, which is retrieved through polling the ReferenceQueue,
// to get back the client and call close() on it.
metastoreClientReferences.put(new WeakReference<>(supplier, metastoreClientReferenceQueue), client);
} catch (MetaException e) {
throw new ExploreException("Error initializing Hive Metastore client", e);
}
}
return metastoreClientLocal.get().get();
}
use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project streamline by hortonworks.
the class HiveMetadataService method newInstance.
/**
* Creates secure {@link HiveMetadataService}, which delegates to {@link HiveMetaStoreClient}
* instantiated with the {@link HiveConf} provided using the first parameter
*/
public static HiveMetadataService newInstance(HiveConf hiveConf, SecurityContext securityContext, Subject subject, Component hiveMetastore, Collection<ComponentProcess> hiveMetastoreProcesses) throws MetaException, IOException, EntityNotFoundException, PrivilegedActionException {
if (SecurityUtil.isKerberosAuthenticated(securityContext)) {
// Sets Kerberos rules
UserGroupInformation.setConfiguration(hiveConf);
// Adds User principal to this subject
UserGroupInformation.getUGIFromSubject(subject);
return new HiveMetadataService(SecurityUtil.execute(() -> new HiveMetaStoreClient(hiveConf), securityContext, subject), hiveConf, securityContext, subject, hiveMetastore, hiveMetastoreProcesses);
} else {
return new HiveMetadataService(new HiveMetaStoreClient(hiveConf), hiveConf, securityContext, subject, hiveMetastore, hiveMetastoreProcesses);
}
}
use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project incubator-gobblin by apache.
the class HivePurgerPublisher method initHiveMetastoreClient.
public void initHiveMetastoreClient() throws Exception {
if (this.state.contains(ConfigurationKeys.SUPER_USER_KEY_TAB_LOCATION)) {
String superUser = this.state.getProp(ComplianceConfigurationKeys.GOBBLIN_COMPLIANCE_SUPER_USER);
String realm = this.state.getProp(ConfigurationKeys.KERBEROS_REALM);
String keytabLocation = this.state.getProp(ConfigurationKeys.SUPER_USER_KEY_TAB_LOCATION);
log.info("Establishing MetastoreClient connection using " + keytabLocation);
UserGroupInformation.loginUserFromKeytab(HostUtils.getPrincipalUsingHostname(superUser, realm), keytabLocation);
UserGroupInformation loginUser = UserGroupInformation.getLoginUser();
loginUser.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws TException {
HivePurgerPublisher.this.client = new HiveMetaStoreClient(new HiveConf());
return null;
}
});
} else {
HivePurgerPublisher.this.client = new HiveMetaStoreClient(new HiveConf());
}
}
use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project incubator-gobblin by apache.
the class ProxyUtils method cancelTokens.
public static void cancelTokens(State state) throws IOException, InterruptedException, TException {
Preconditions.checkArgument(state.contains(ConfigurationKeys.SUPER_USER_KEY_TAB_LOCATION), "Missing required property " + ConfigurationKeys.SUPER_USER_KEY_TAB_LOCATION);
Preconditions.checkArgument(state.contains(ComplianceConfigurationKeys.GOBBLIN_COMPLIANCE_SUPER_USER), "Missing required property " + ComplianceConfigurationKeys.GOBBLIN_COMPLIANCE_SUPER_USER);
Preconditions.checkArgument(state.contains(ConfigurationKeys.KERBEROS_REALM), "Missing required property " + ConfigurationKeys.KERBEROS_REALM);
String superUser = state.getProp(ComplianceConfigurationKeys.GOBBLIN_COMPLIANCE_SUPER_USER);
String keytabLocation = state.getProp(ConfigurationKeys.SUPER_USER_KEY_TAB_LOCATION);
String realm = state.getProp(ConfigurationKeys.KERBEROS_REALM);
UserGroupInformation.loginUserFromKeytab(HostUtils.getPrincipalUsingHostname(superUser, realm), keytabLocation);
UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
UserGroupInformation realUser = currentUser.getRealUser();
Credentials credentials = realUser.getCredentials();
for (Token<?> token : credentials.getAllTokens()) {
if (token.getKind().equals(DelegationTokenIdentifier.HIVE_DELEGATION_KIND)) {
log.info("Cancelling hive token");
HiveMetaStoreClient hiveClient = new HiveMetaStoreClient(new HiveConf());
hiveClient.cancelDelegationToken(token.encodeToUrlString());
}
}
}
use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project beam by apache.
the class HCatalogBeamSchema method create.
/**
* Create the schema adapter.
*
* <p>Config map is used to construct the {@link HiveMetaStoreClient}.
*/
public static HCatalogBeamSchema create(Map<String, String> config) {
try {
HiveConf hiveConf = new HiveConf();
config.forEach(hiveConf::set);
return new HCatalogBeamSchema(new HiveMetaStoreClient(hiveConf));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
Aggregations