use of org.apache.hadoop.hive.metastore.IMetaStoreClient in project hive by apache.
the class GetPrimaryKeysOperation method runInternal.
@Override
public void runInternal() throws HiveSQLException {
setState(OperationState.RUNNING);
try {
IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
PrimaryKeysRequest sqlReq = new PrimaryKeysRequest(schemaName, tableName);
List<SQLPrimaryKey> pks = metastoreClient.getPrimaryKeys(sqlReq);
if (pks == null) {
return;
}
for (SQLPrimaryKey pk : pks) {
rowSet.addRow(new Object[] { catalogName, pk.getTable_db(), pk.getTable_name(), pk.getColumn_name(), pk.getKey_seq(), pk.getPk_name() });
}
setState(OperationState.FINISHED);
} catch (Exception e) {
setState(OperationState.ERROR);
throw new HiveSQLException(e);
}
}
use of org.apache.hadoop.hive.metastore.IMetaStoreClient in project hive by apache.
the class GetSchemasOperation method runInternal.
@Override
public void runInternal() throws HiveSQLException {
setState(OperationState.RUNNING);
if (isAuthV2Enabled()) {
String cmdStr = "catalog : " + catalogName + ", schemaPattern : " + schemaName;
authorizeMetaGets(HiveOperationType.GET_SCHEMAS, null, cmdStr);
}
try {
IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
String schemaPattern = convertSchemaPattern(schemaName);
for (String dbName : metastoreClient.getDatabases(schemaPattern)) {
rowSet.addRow(new Object[] { dbName, DEFAULT_HIVE_CATALOG });
}
setState(OperationState.FINISHED);
} catch (Exception e) {
setState(OperationState.ERROR);
throw new HiveSQLException(e);
}
}
use of org.apache.hadoop.hive.metastore.IMetaStoreClient in project hive by apache.
the class TestHiveClientCache method testCacheMiss.
@Test
public void testCacheMiss() throws IOException, MetaException, LoginException {
HiveClientCache cache = new HiveClientCache(1000);
IMetaStoreClient client = cache.get(hiveConf);
assertNotNull(client);
// Set different uri as it is one of the criteria deciding whether to return the same client or not
// URIs are checked for string equivalence, even spaces make them different
hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, " ");
IMetaStoreClient client2 = cache.get(hiveConf);
assertNotNull(client2);
assertNotSame(client, client2);
}
use of org.apache.hadoop.hive.metastore.IMetaStoreClient in project hive by apache.
the class SecureProxySupport method buildHcatDelegationToken.
private String buildHcatDelegationToken(String user) throws IOException, InterruptedException, TException {
final HiveConf c = new HiveConf();
final IMetaStoreClient client = HCatUtil.getHiveMetastoreClient(c);
LOG.info("user: " + user + " loginUser: " + UserGroupInformation.getLoginUser().getUserName());
final UserGroupInformation ugi = UgiFactory.getUgi(user);
String s = ugi.doAs(new PrivilegedExceptionAction<String>() {
public String run() throws IOException, MetaException, TException {
String u = ugi.getUserName();
return client.getDelegationToken(c.getUser(), u);
}
});
return s;
}
use of org.apache.hadoop.hive.metastore.IMetaStoreClient in project hive by apache.
the class PigHCatUtil method getTable.
/*
* The job argument is passed so that configuration overrides can be used to initialize
* the metastore configuration in the special case of an embedded metastore
* (hive.metastore.uris = "").
*/
public Table getTable(String location, String hcatServerUri, String hcatServerPrincipal, Job job) throws IOException {
Pair<String, String> loc_server = new Pair<String, String>(location, hcatServerUri);
Table hcatTable = hcatTableCache.get(loc_server);
if (hcatTable != null) {
return hcatTable;
}
Pair<String, String> dbTablePair = PigHCatUtil.getDBTableNames(location);
String dbName = dbTablePair.first;
String tableName = dbTablePair.second;
Table table = null;
IMetaStoreClient client = null;
try {
client = getHiveMetaClient(hcatServerUri, hcatServerPrincipal, PigHCatUtil.class, job);
table = HCatUtil.getTable(client, dbName, tableName);
} catch (NoSuchObjectException nsoe) {
// prettier error messages to frontend
throw new PigException("Table not found : " + nsoe.getMessage(), PIG_EXCEPTION_CODE);
} catch (Exception e) {
throw new IOException(e);
} finally {
HCatUtil.closeHiveClientQuietly(client);
}
hcatTableCache.put(loc_server, table);
return table;
}
Aggregations