use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.
the class YarnClient method getYarnResource.
public static List<String> getYarnResource(final YarnClient yarnClient, String yarnQname, List<String> existingQueueName) {
List<String> resultList = new ArrayList<String>();
String errMsg = errMessage;
try {
if (yarnClient == null) {
String msgDesc = "Unable to get Yarn Queue : YarnClient is null.";
LOG.error(msgDesc);
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, null, null);
throw hdpException;
}
if (yarnQname != null) {
String finalyarnQueueName = yarnQname.trim();
resultList = yarnClient.getQueueList(finalyarnQueueName, existingQueueName);
if (resultList != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Returning list of " + resultList.size() + " Yarn Queues");
}
}
}
} catch (HadoopException he) {
throw he;
} catch (Throwable t) {
String msgDesc = "getYarnResource: Unable to get Yarn resources.";
LOG.error(msgDesc, t);
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, BaseClient.getMessage(t), msgDesc + errMsg, null, null);
throw hdpException;
}
return resultList;
}
use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.
the class TimedExecutor method generateHadoopException.
private HadoopException generateHadoopException(Exception e) {
String msgDesc = "Unable to retrieve any files using given parameters, " + "You can still save the repository and start creating policies, " + "but you would not be able to use autocomplete for resource names. " + "Check ranger_admin.log for more info. ";
HadoopException hpe = new HadoopException(e.getMessage(), e);
hpe.generateResponseDataMap(false, hpe.getMessage(e), msgDesc, null, null);
return hpe;
}
use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.
the class ServiceMgr method validateConfig.
public VXResponse validateConfig(RangerService service, ServiceStore svcStore) throws Exception {
VXResponse ret = new VXResponse();
rangerBizUtil.blockAuditorRoleUser();
String authType = PropertiesUtil.getProperty(AUTHENTICATION_TYPE);
String lookupPrincipal = SecureClientLogin.getPrincipal(PropertiesUtil.getProperty(LOOKUP_PRINCIPAL), PropertiesUtil.getProperty(HOST_NAME));
String lookupKeytab = PropertiesUtil.getProperty(LOOKUP_KEYTAB);
String nameRules = PropertiesUtil.getProperty(NAME_RULES);
String rangerPrincipal = SecureClientLogin.getPrincipal(PropertiesUtil.getProperty(ADMIN_USER_PRINCIPAL), PropertiesUtil.getProperty(HOST_NAME));
String rangerkeytab = PropertiesUtil.getProperty(ADMIN_USER_KEYTAB);
if (!StringUtils.isEmpty(authType) && KERBEROS_TYPE.equalsIgnoreCase(authType.trim()) && SecureClientLogin.isKerberosCredentialExists(lookupPrincipal, lookupKeytab)) {
if (service != null && service.getConfigs() != null) {
service.getConfigs().put(HadoopConfigHolder.RANGER_LOOKUP_PRINCIPAL, lookupPrincipal);
service.getConfigs().put(HadoopConfigHolder.RANGER_LOOKUP_KEYTAB, lookupKeytab);
service.getConfigs().put(HadoopConfigHolder.RANGER_NAME_RULES, nameRules);
service.getConfigs().put(HadoopConfigHolder.RANGER_AUTH_TYPE, authType);
}
}
if (!StringUtils.isEmpty(authType) && KERBEROS_TYPE.equalsIgnoreCase(authType.trim()) && SecureClientLogin.isKerberosCredentialExists(rangerPrincipal, rangerkeytab)) {
if (service != null && service.getConfigs() != null) {
service.getConfigs().put(HadoopConfigHolder.RANGER_PRINCIPAL, rangerPrincipal);
service.getConfigs().put(HadoopConfigHolder.RANGER_KEYTAB, rangerkeytab);
service.getConfigs().put(HadoopConfigHolder.RANGER_NAME_RULES, nameRules);
service.getConfigs().put(HadoopConfigHolder.RANGER_AUTH_TYPE, authType);
}
}
RangerBaseService svc = null;
if (service != null) {
Map<String, String> newConfigs = rangerSvcService.getConfigsWithDecryptedPassword(service);
service.setConfigs(newConfigs);
svc = getRangerServiceByService(service, svcStore);
}
if (LOG.isDebugEnabled()) {
LOG.debug("==> ServiceMgr.validateConfig for Service: (" + svc + ")");
}
if (svc != null) {
try {
// Timeout value use during validate config is 10 times that used during lookup
long time = getTimeoutValueForValidateConfigInMilliSeconds(svc);
ValidateCallable callable = new ValidateCallable(svc);
Map<String, Object> responseData = timedExecutor.timedTask(callable, time, TimeUnit.MILLISECONDS);
ret = generateResponseForTestConn(responseData, "");
} catch (Exception e) {
String msg = "Unable to connect repository with given config for " + svc.getServiceName();
HashMap<String, Object> respData = new HashMap<String, Object>();
if (e instanceof HadoopException) {
respData = ((HadoopException) e).getResponseData();
}
ret = generateResponseForTestConn(respData, msg);
LOG.error("==> ServiceMgr.validateConfig Error:" + e);
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("==> ServiceMgr.validateConfig for Response: (" + ret + ")");
}
return ret;
}
use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.
the class KnoxClient method getTopologyList.
public List<String> getTopologyList(String topologyNameMatching, List<String> knoxTopologyList) {
// sample URI: https://hdp.example.com:8443/gateway/admin/api/v1/topologies
LOG.debug("Getting Knox topology list for topologyNameMatching : " + topologyNameMatching);
List<String> topologyList = new ArrayList<String>();
String errMsg = " You can still save the repository and start creating " + "policies, but you would not be able to use autocomplete for " + "resource names. Check ranger_admin.log for more info.";
if (topologyNameMatching == null || topologyNameMatching.trim().isEmpty()) {
topologyNameMatching = "";
}
String decryptedPwd = null;
try {
decryptedPwd = PasswordUtils.decryptPassword(password);
} catch (Exception ex) {
LOG.info("Password decryption failed; trying knox connection with received password string");
decryptedPwd = null;
} finally {
if (decryptedPwd == null) {
decryptedPwd = password;
}
}
try {
Client client = null;
ClientResponse response = null;
try {
client = Client.create();
client.addFilter(new HTTPBasicAuthFilter(userName, decryptedPwd));
WebResource webResource = client.resource(knoxUrl);
response = webResource.accept(EXPECTED_MIME_TYPE).get(ClientResponse.class);
LOG.debug("Knox topology list response: " + response);
if (response != null) {
if (response.getStatus() == 200) {
String jsonString = response.getEntity(String.class);
LOG.debug("Knox topology list response JSON string: " + jsonString);
ObjectMapper objectMapper = new ObjectMapper();
JsonNode rootNode = objectMapper.readTree(jsonString);
JsonNode topologyNode = rootNode.findValue("topology");
if (topologyNode == null) {
return topologyList;
}
Iterator<JsonNode> elements = topologyNode.getElements();
while (elements.hasNext()) {
JsonNode element = elements.next();
JsonNode nameElement = element.get("name");
if (nameElement != null) {
String topologyName = nameElement.getValueAsText();
LOG.debug("Found Knox topologyName: " + topologyName);
if (knoxTopologyList != null && topologyName != null && knoxTopologyList.contains(topologyNameMatching)) {
continue;
}
if (topologyName != null && ("*".equals(topologyNameMatching) || topologyName.startsWith(topologyNameMatching))) {
topologyList.add(topologyName);
}
}
}
} else {
LOG.error("Got invalid REST response from: " + knoxUrl + ", responseStatus: " + response.getStatus());
}
} else {
String msgDesc = "Unable to get a valid response for " + "getTopologyList() call for KnoxUrl : [" + knoxUrl + "] - got null response.";
LOG.error(msgDesc);
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, null, null);
throw hdpException;
}
} finally {
if (response != null) {
response.close();
}
if (client != null) {
client.destroy();
}
}
} catch (HadoopException he) {
throw he;
} catch (Throwable t) {
String msgDesc = "Exception on REST call to KnoxUrl : " + knoxUrl + ".";
HadoopException hdpException = new HadoopException(msgDesc, t);
LOG.error(msgDesc, t);
hdpException.generateResponseDataMap(false, BaseClient.getMessage(t), msgDesc + errMsg, null, null);
throw hdpException;
}
if (LOG.isDebugEnabled()) {
LOG.debug("<== KnoxClient.getTopologyList() Topology Matching: " + topologyNameMatching + " Result : " + topologyList.toString());
}
return topologyList;
}
use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.
the class KMSClient method getKmsKey.
public static List<String> getKmsKey(final KMSClient kmsClient, String keyName, List<String> existingKeyName) {
List<String> resultList = new ArrayList<String>();
String errMsg = errMessage;
try {
if (kmsClient == null) {
String msgDesc = "Unable to get Kms Key : KmsClient is null.";
LOG.error(msgDesc);
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, null, null);
throw hdpException;
}
if (keyName != null) {
String finalkmsKeyName = keyName.trim();
resultList = kmsClient.getKeyList(finalkmsKeyName, existingKeyName);
if (resultList != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Returning list of " + resultList.size() + " Kms Keys");
}
}
}
} catch (HadoopException he) {
resultList = null;
throw he;
} catch (Exception e) {
String msgDesc = "Unable to get a valid response from the provider : " + e.getMessage();
LOG.error(msgDesc, e);
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, null, null);
resultList = null;
throw hdpException;
}
return resultList;
}
Aggregations