use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.
the class StormClient method getStormClient.
public static StormClient getStormClient(String serviceName, Map<String, String> configs) {
StormClient stormClient = null;
if (LOG.isDebugEnabled()) {
LOG.debug("Getting StormClient for datasource: " + serviceName);
LOG.debug("configMap: " + configs);
}
String errMsg = errMessage;
if (configs == null || configs.isEmpty()) {
String msgDesc = "Could not connect as Connection ConfigMap is empty.";
LOG.error(msgDesc);
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, null, null);
throw hdpException;
} else {
String stormUrl = configs.get("nimbus.url");
String stormAdminUser = configs.get("username");
String stormAdminPassword = configs.get("password");
String lookupPrincipal = configs.get("lookupprincipal");
String lookupKeytab = configs.get("lookupkeytab");
String nameRules = configs.get("namerules");
stormClient = new StormClient(stormUrl, stormAdminUser, stormAdminPassword, lookupPrincipal, lookupKeytab, nameRules);
}
return stormClient;
}
use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.
the class SqoopClient method getSqoopResourceResponse.
private <T> T getSqoopResourceResponse(ClientResponse response, Class<T> classOfT) {
T resource = null;
try {
if (response != null) {
if (response.getStatus() == HttpStatus.SC_OK) {
String jsonString = response.getEntity(String.class);
Gson gson = new GsonBuilder().setPrettyPrinting().create();
resource = gson.fromJson(jsonString, classOfT);
} else {
String msgDesc = "Unable to get a valid response for " + "expected mime type : [" + EXPECTED_MIME_TYPE + "], sqoopUrl: " + sqoopUrl + " - got http response code " + response.getStatus();
LOG.error(msgDesc);
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + ERROR_MESSAGE, null, null);
throw hdpException;
}
} else {
String msgDesc = "Unable to get a valid response for " + "expected mime type : [" + EXPECTED_MIME_TYPE + "], sqoopUrl: " + sqoopUrl + " - got null response.";
LOG.error(msgDesc);
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + ERROR_MESSAGE, null, null);
throw hdpException;
}
} catch (HadoopException he) {
throw he;
} catch (Throwable t) {
String msgDesc = "Exception while getting sqoop resource response, sqoopUrl: " + sqoopUrl;
HadoopException hdpException = new HadoopException(msgDesc, t);
LOG.error(msgDesc, t);
hdpException.generateResponseDataMap(false, BaseClient.getMessage(t), msgDesc + ERROR_MESSAGE, null, null);
throw hdpException;
} finally {
if (response != null) {
response.close();
}
}
return resource;
}
use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.
the class YarnClient method getQueueList.
public List<String> getQueueList(final String queueNameMatching, final List<String> existingQueueList) {
if (LOG.isDebugEnabled()) {
LOG.debug("Getting Yarn queue list for queueNameMatching : " + queueNameMatching);
}
final String errMsg = errMessage;
List<String> ret = null;
Callable<List<String>> callableYarnQListGetter = new Callable<List<String>>() {
@Override
public List<String> call() {
List<String> yarnQueueListGetter = null;
Subject subj = getLoginSubject();
if (subj != null) {
yarnQueueListGetter = Subject.doAs(subj, new PrivilegedAction<List<String>>() {
@Override
public List<String> run() {
if (yarnQUrl == null || yarnQUrl.trim().isEmpty()) {
return null;
}
String[] yarnQUrls = yarnQUrl.trim().split("[,;]");
if (yarnQUrls == null || yarnQUrls.length == 0) {
return null;
}
Client client = Client.create();
ClientResponse response = null;
for (String currentUrl : yarnQUrls) {
if (currentUrl == null || currentUrl.trim().isEmpty()) {
continue;
}
String url = currentUrl.trim() + YARN_LIST_API_ENDPOINT;
try {
response = getQueueResponse(url, client);
if (response != null) {
if (response.getStatus() == 200) {
break;
} else {
response.close();
}
}
} catch (Throwable t) {
String msgDesc = "Exception while getting Yarn Queue List." + " URL : " + url;
LOG.error(msgDesc, t);
}
}
List<String> lret = new ArrayList<String>();
try {
if (response != null && response.getStatus() == 200) {
String jsonString = response.getEntity(String.class);
Gson gson = new GsonBuilder().setPrettyPrinting().create();
YarnSchedulerResponse yarnQResponse = gson.fromJson(jsonString, YarnSchedulerResponse.class);
if (yarnQResponse != null) {
List<String> yarnQueueList = yarnQResponse.getQueueNames();
if (yarnQueueList != null) {
for (String yarnQueueName : yarnQueueList) {
if (existingQueueList != null && existingQueueList.contains(yarnQueueName)) {
continue;
}
if (queueNameMatching == null || queueNameMatching.isEmpty() || yarnQueueName.startsWith(queueNameMatching)) {
if (LOG.isDebugEnabled()) {
LOG.debug("getQueueList():Adding yarnQueue " + yarnQueueName);
}
lret.add(yarnQueueName);
}
}
}
}
} else {
String msgDesc = "Unable to get a valid response for " + "expected mime type : [" + EXPECTED_MIME_TYPE + "] URL : " + yarnQUrl + " - got null response.";
LOG.error(msgDesc);
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, null, null);
throw hdpException;
}
} catch (HadoopException he) {
throw he;
} catch (Throwable t) {
String msgDesc = "Exception while getting Yarn Queue List." + " URL : " + yarnQUrl;
HadoopException hdpException = new HadoopException(msgDesc, t);
LOG.error(msgDesc, t);
hdpException.generateResponseDataMap(false, BaseClient.getMessage(t), msgDesc + errMsg, null, null);
throw hdpException;
} finally {
if (response != null) {
response.close();
}
if (client != null) {
client.destroy();
}
}
return lret;
}
private ClientResponse getQueueResponse(String url, Client client) {
if (LOG.isDebugEnabled()) {
LOG.debug("getQueueResponse():calling " + url);
}
WebResource webResource = client.resource(url);
ClientResponse response = webResource.accept(EXPECTED_MIME_TYPE).get(ClientResponse.class);
if (response != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("getQueueResponse():response.getStatus()= " + response.getStatus());
}
if (response.getStatus() != 200) {
LOG.info("getQueueResponse():response.getStatus()= " + response.getStatus() + " for URL " + url + ", failed to get queue list");
String jsonString = response.getEntity(String.class);
LOG.info(jsonString);
}
}
return response;
}
});
}
return yarnQueueListGetter;
}
};
try {
ret = timedTask(callableYarnQListGetter, 5, TimeUnit.SECONDS);
} catch (Throwable t) {
LOG.error("Unable to get Yarn Queue list from [" + yarnQUrl + "]", t);
String msgDesc = "Unable to get a valid response for " + "expected mime type : [" + EXPECTED_MIME_TYPE + "] URL : " + yarnQUrl;
HadoopException hdpException = new HadoopException(msgDesc, t);
LOG.error(msgDesc, t);
hdpException.generateResponseDataMap(false, BaseClient.getMessage(t), msgDesc + errMsg, null, null);
throw hdpException;
}
return ret;
}
use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.
the class YarnClient method getYarnClient.
public static YarnClient getYarnClient(String serviceName, Map<String, String> configs) {
YarnClient yarnClient = null;
if (LOG.isDebugEnabled()) {
LOG.debug("Getting YarnClient for datasource: " + serviceName);
}
String errMsg = errMessage;
if (configs == null || configs.isEmpty()) {
String msgDesc = "Could not connect as Connection ConfigMap is empty.";
LOG.error(msgDesc);
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, null, null);
throw hdpException;
} else {
yarnClient = new YarnClient(serviceName, configs);
}
return yarnClient;
}
use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.
the class KnoxClient method getTopologyList.
public List<String> getTopologyList(String topologyNameMatching, List<String> knoxTopologyList) {
// sample URI: https://hdp.example.com:8443/gateway/admin/api/v1/topologies
LOG.debug("Getting Knox topology list for topologyNameMatching : " + topologyNameMatching);
List<String> topologyList = new ArrayList<String>();
String errMsg = " You can still save the repository and start creating " + "policies, but you would not be able to use autocomplete for " + "resource names. Check ranger_admin.log for more info.";
if (topologyNameMatching == null || topologyNameMatching.trim().isEmpty()) {
topologyNameMatching = "";
}
String decryptedPwd = null;
try {
decryptedPwd = PasswordUtils.decryptPassword(password);
} catch (Exception ex) {
LOG.info("Password decryption failed; trying knox connection with received password string");
decryptedPwd = null;
} finally {
if (decryptedPwd == null) {
decryptedPwd = password;
}
}
try {
Client client = null;
ClientResponse response = null;
try {
client = Client.create();
client.addFilter(new HTTPBasicAuthFilter(userName, decryptedPwd));
WebResource webResource = client.resource(knoxUrl);
response = webResource.accept(EXPECTED_MIME_TYPE).get(ClientResponse.class);
LOG.debug("Knox topology list response: " + response);
if (response != null) {
if (response.getStatus() == 200) {
String jsonString = response.getEntity(String.class);
LOG.debug("Knox topology list response JSON string: " + jsonString);
JsonNode rootNode = JsonUtilsV2.getMapper().readTree(jsonString);
JsonNode topologyNode = rootNode.findValue("topology");
if (topologyNode == null) {
return topologyList;
}
Iterator<JsonNode> elements = topologyNode.getElements();
while (elements.hasNext()) {
JsonNode element = elements.next();
JsonNode nameElement = element.get("name");
if (nameElement != null) {
String topologyName = nameElement.getValueAsText();
LOG.debug("Found Knox topologyName: " + topologyName);
if (knoxTopologyList != null && topologyName != null && knoxTopologyList.contains(topologyNameMatching)) {
continue;
}
if (topologyName != null && ("*".equals(topologyNameMatching) || topologyName.startsWith(topologyNameMatching))) {
topologyList.add(topologyName);
}
}
}
} else {
LOG.error("Got invalid REST response from: " + knoxUrl + ", responseStatus: " + response.getStatus());
}
} else {
String msgDesc = "Unable to get a valid response for " + "getTopologyList() call for KnoxUrl : [" + knoxUrl + "] - got null response.";
LOG.error(msgDesc);
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, null, null);
throw hdpException;
}
} finally {
if (response != null) {
response.close();
}
if (client != null) {
client.destroy();
}
}
} catch (HadoopException he) {
throw he;
} catch (Throwable t) {
String msgDesc = "Exception on REST call to KnoxUrl : " + knoxUrl + ".";
HadoopException hdpException = new HadoopException(msgDesc, t);
LOG.error(msgDesc, t);
hdpException.generateResponseDataMap(false, BaseClient.getMessage(t), msgDesc + errMsg, null, null);
throw hdpException;
}
if (LOG.isDebugEnabled()) {
LOG.debug("<== KnoxClient.getTopologyList() Topology Matching: " + topologyNameMatching + " Result : " + topologyList.toString());
}
return topologyList;
}
Aggregations