use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.
the class HiveClient method getDBList.
private List<String> getDBList(String databaseMatching, List<String> dbList) throws HadoopException {
if (LOG.isDebugEnabled()) {
LOG.debug("==> HiveClient getDBList databaseMatching : " + databaseMatching + " ExcludedbList :" + dbList);
}
List<String> ret = new ArrayList<String>();
if (con != null) {
Statement stat = null;
ResultSet rs = null;
String sql = "show databases";
if (databaseMatching != null && !databaseMatching.isEmpty()) {
sql = sql + " like \"" + databaseMatching + "\"";
}
try {
stat = con.createStatement();
rs = stat.executeQuery(sql);
while (rs.next()) {
String dbName = rs.getString(1);
if (dbList != null && dbList.contains(dbName)) {
continue;
}
ret.add(rs.getString(1));
}
} catch (SQLTimeoutException sqlt) {
String msgDesc = "Time Out, Unable to execute SQL [" + sql + "].";
HadoopException hdpException = new HadoopException(msgDesc, sqlt);
hdpException.generateResponseDataMap(false, getMessage(sqlt), msgDesc + ERR_MSG, null, null);
if (LOG.isDebugEnabled()) {
LOG.debug("<== HiveClient.getDBList() Error : ", sqlt);
}
throw hdpException;
} catch (SQLException sqle) {
String msgDesc = "Unable to execute SQL [" + sql + "].";
HadoopException hdpException = new HadoopException(msgDesc, sqle);
hdpException.generateResponseDataMap(false, getMessage(sqle), msgDesc + ERR_MSG, null, null);
if (LOG.isDebugEnabled()) {
LOG.debug("<== HiveClient.getDBList() Error : ", sqle);
}
throw hdpException;
} finally {
close(rs);
close(stat);
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("<== HiveClient.getDBList(): " + ret);
}
return ret;
}
use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.
the class HBaseClient method getColumnFamilyList.
public List<String> getColumnFamilyList(final String columnFamilyMatching, final List<String> tableList, final List<String> existingColumnFamilies) {
if (LOG.isDebugEnabled()) {
LOG.debug("==> HbaseClient.getColumnFamilyList() columnFamilyMatching " + columnFamilyMatching + " ExisitingTableList " + tableList + "existingColumnFamilies " + existingColumnFamilies);
}
List<String> ret = null;
final String errMsg = " You can still save the repository and start creating " + "policies, but you would not be able to use autocomplete for " + "resource names. Check ranger_admin.log for more info.";
subj = getLoginSubject();
if (subj != null) {
try {
ret = Subject.doAs(subj, new PrivilegedAction<List<String>>() {
String tblName = null;
@Override
public List<String> run() {
List<String> colfList = new ArrayList<String>();
HBaseAdmin admin = null;
try {
LOG.info("getColumnFamilyList: setting config values from client");
setClientConfigValues(conf);
LOG.info("getColumnFamilyList: checking HbaseAvailability with the new config");
HBaseAdmin.checkHBaseAvailable(conf);
LOG.info("getColumnFamilyList: no exception: HbaseAvailability true");
admin = new HBaseAdmin(conf);
if (tableList != null) {
for (String tableName : tableList) {
tblName = tableName;
HTableDescriptor htd = admin.getTableDescriptor(tblName.getBytes());
if (htd != null) {
for (HColumnDescriptor hcd : htd.getColumnFamilies()) {
String colf = hcd.getNameAsString();
if (colf.matches(columnFamilyMatching)) {
if (existingColumnFamilies != null && existingColumnFamilies.contains(colf)) {
continue;
} else {
colfList.add(colf);
}
}
}
}
}
}
} catch (ZooKeeperConnectionException zce) {
String msgDesc = "getColumnFamilyList: Unable to connect to `ZooKeeper` " + "using given config parameters.";
HadoopException hdpException = new HadoopException(msgDesc, zce);
hdpException.generateResponseDataMap(false, getMessage(zce), msgDesc + errMsg, null, null);
LOG.error(msgDesc + zce);
throw hdpException;
} catch (MasterNotRunningException mnre) {
String msgDesc = "getColumnFamilyList: Looks like `Master` is not running, " + "so couldn't check that running HBase is available or not, " + "Please try again later.";
HadoopException hdpException = new HadoopException(msgDesc, mnre);
hdpException.generateResponseDataMap(false, getMessage(mnre), msgDesc + errMsg, null, null);
LOG.error(msgDesc + mnre);
throw hdpException;
} catch (IOException io) {
String msgDesc = "getColumnFamilyList: Unable to get HBase ColumnFamilyList for " + "[repository:" + getConfigHolder().getDatasourceName() + ",table:" + tblName + ", table-match:" + columnFamilyMatching + "] ";
HadoopException hdpException = new HadoopException(msgDesc, io);
hdpException.generateResponseDataMap(false, getMessage(io), msgDesc + errMsg, null, null);
LOG.error(msgDesc + io);
throw hdpException;
} catch (SecurityException se) {
String msgDesc = "getColumnFamilyList: Unable to get HBase ColumnFamilyList for " + "[repository:" + getConfigHolder().getDatasourceName() + ",table:" + tblName + ", table-match:" + columnFamilyMatching + "] ";
HadoopException hdpException = new HadoopException(msgDesc, se);
hdpException.generateResponseDataMap(false, getMessage(se), msgDesc + errMsg, null, null);
LOG.error(msgDesc + se);
throw hdpException;
} catch (Throwable e) {
String msgDesc = "getColumnFamilyList: Unable to get HBase ColumnFamilyList for " + "[repository:" + getConfigHolder().getDatasourceName() + ",table:" + tblName + ", table-match:" + columnFamilyMatching + "] ";
LOG.error(msgDesc);
HadoopException hdpException = new HadoopException(msgDesc, e);
hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + errMsg, null, null);
LOG.error(msgDesc + e);
throw hdpException;
} finally {
if (admin != null) {
try {
admin.close();
} catch (IOException e) {
LOG.error("Unable to close HBase connection [" + getConfigHolder().getDatasourceName() + "]", e);
}
}
}
return colfList;
}
});
} catch (SecurityException se) {
String msgDesc = "getColumnFamilyList: Unable to connect to HBase Server instance ";
HadoopException hdpException = new HadoopException(msgDesc, se);
hdpException.generateResponseDataMap(false, getMessage(se), msgDesc + errMsg, null, null);
LOG.error(msgDesc + se);
throw hdpException;
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("<== HbaseClient.getColumnFamilyList() " + ret);
}
return ret;
}
use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.
the class StormClient method getStormClient.
public static StormClient getStormClient(String serviceName, Map<String, String> configs) {
StormClient stormClient = null;
if (LOG.isDebugEnabled()) {
LOG.debug("Getting StormClient for datasource: " + serviceName);
LOG.debug("configMap: " + configs);
}
String errMsg = errMessage;
if (configs == null || configs.isEmpty()) {
String msgDesc = "Could not connect as Connection ConfigMap is empty.";
LOG.error(msgDesc);
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, null, null);
throw hdpException;
} else {
String stormUrl = configs.get("nimbus.url");
String stormAdminUser = configs.get("username");
String stormAdminPassword = configs.get("password");
String lookupPrincipal = configs.get("lookupprincipal");
String lookupKeytab = configs.get("lookupkeytab");
String nameRules = configs.get("namerules");
stormClient = new StormClient(stormUrl, stormAdminUser, stormAdminPassword, lookupPrincipal, lookupKeytab, nameRules);
}
return stormClient;
}
use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.
the class StormClient method getTopologyList.
public List<String> getTopologyList(final String topologyNameMatching, final List<String> stormTopologyList) {
if (LOG.isDebugEnabled()) {
LOG.debug("Getting Storm topology list for topologyNameMatching : " + topologyNameMatching);
}
PrivilegedAction<ArrayList<String>> topologyListGetter = new PrivilegedAction<ArrayList<String>>() {
@Override
public ArrayList<String> run() {
if (stormUIUrl == null || stormUIUrl.trim().isEmpty()) {
return null;
}
String[] stormUIUrls = stormUIUrl.trim().split("[,;]");
if (stormUIUrls == null || stormUIUrls.length == 0) {
return null;
}
Client client = Client.create();
ClientResponse response = null;
for (String currentUrl : stormUIUrls) {
if (currentUrl == null || currentUrl.trim().isEmpty()) {
continue;
}
String url = currentUrl.trim() + TOPOLOGY_LIST_API_ENDPOINT;
try {
response = getTopologyResponse(url, client);
if (response != null) {
if (response.getStatus() == 200) {
break;
} else {
response.close();
}
}
} catch (Throwable t) {
String msgDesc = "Exception while getting topology list." + " URL : " + url;
LOG.error(msgDesc, t);
}
}
ArrayList<String> lret = new ArrayList<String>();
try {
if (response != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("getTopologyList():response.getStatus()= " + response.getStatus());
}
if (response.getStatus() == 200) {
String jsonString = response.getEntity(String.class);
Gson gson = new GsonBuilder().setPrettyPrinting().create();
TopologyListResponse topologyListResponse = gson.fromJson(jsonString, TopologyListResponse.class);
if (topologyListResponse != null) {
if (topologyListResponse.getTopologyList() != null) {
for (Topology topology : topologyListResponse.getTopologyList()) {
String topologyName = topology.getName();
if (stormTopologyList != null && stormTopologyList.contains(topologyName)) {
continue;
}
if (LOG.isDebugEnabled()) {
LOG.debug("getTopologyList():Found topology " + topologyName);
LOG.debug("getTopologyList():topology Name=[" + topology.getName() + "], topologyNameMatching=[" + topologyNameMatching + "], existingStormTopologyList=[" + stormTopologyList + "]");
}
if (topologyName != null) {
if (topologyNameMatching == null || topologyNameMatching.isEmpty() || FilenameUtils.wildcardMatch(topology.getName(), topologyNameMatching + "*")) {
if (LOG.isDebugEnabled()) {
LOG.debug("getTopologyList():Adding topology " + topologyName);
}
lret.add(topologyName);
}
}
}
}
}
}
} else {
String msgDesc = "Unable to get a valid response for " + "expected mime type : [" + EXPECTED_MIME_TYPE + "] URL : " + stormUIUrl + " - got null response.";
LOG.error(msgDesc);
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMessage, null, null);
throw hdpException;
}
} catch (HadoopException he) {
throw he;
} catch (Throwable t) {
String msgDesc = "Exception while getting Storm TopologyList." + " URL : " + stormUIUrl;
HadoopException hdpException = new HadoopException(msgDesc, t);
LOG.error(msgDesc, t);
hdpException.generateResponseDataMap(false, BaseClient.getMessage(t), msgDesc + errMessage, null, null);
throw hdpException;
} finally {
if (response != null) {
response.close();
}
if (client != null) {
client.destroy();
}
}
return lret;
}
private ClientResponse getTopologyResponse(String url, Client client) {
if (LOG.isDebugEnabled()) {
LOG.debug("getTopologyResponse():calling " + url);
}
WebResource webResource = client.resource(url);
ClientResponse response = webResource.accept(EXPECTED_MIME_TYPE).get(ClientResponse.class);
if (response != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("getTopologyResponse():response.getStatus()= " + response.getStatus());
}
if (response.getStatus() != 200) {
LOG.info("getTopologyResponse():response.getStatus()= " + response.getStatus() + " for URL " + url + ", failed to get topology list");
String jsonString = response.getEntity(String.class);
LOG.info(jsonString);
}
}
return response;
}
};
List<String> ret = null;
try {
ret = executeUnderKerberos(this.userName, this.password, this.lookupPrincipal, this.lookupKeytab, this.nameRules, topologyListGetter);
} catch (IOException e) {
LOG.error("Unable to get Topology list from [" + stormUIUrl + "]", e);
}
return ret;
}
Aggregations