use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.
the class KMSClient method getKeyList.
public List<String> getKeyList(final String keyNameMatching, final List<String> existingKeyList) {
String[] providers = null;
try {
providers = createProvider(provider);
} catch (IOException | URISyntaxException e) {
return null;
}
final String errMsg = errMessage;
List<String> lret = null;
for (int i = 0; i < providers.length; i++) {
lret = new ArrayList<String>();
if (LOG.isDebugEnabled()) {
LOG.debug("Getting Kms Key list for keyNameMatching : " + keyNameMatching);
}
String uri = providers[i] + (providers[i].endsWith("/") ? KMS_LIST_API_ENDPOINT : ("/" + KMS_LIST_API_ENDPOINT));
Client client = null;
ClientResponse response = null;
boolean isKerberos = false;
try {
ClientConfig cc = new DefaultClientConfig();
cc.getProperties().put(ClientConfig.PROPERTY_FOLLOW_REDIRECTS, true);
client = Client.create(cc);
if (authType != null && authType.equalsIgnoreCase(AUTH_TYPE_KERBEROS)) {
isKerberos = true;
}
Subject sub = new Subject();
if (!isKerberos) {
uri = uri.concat("?user.name=" + username);
WebResource webResource = client.resource(uri);
response = webResource.accept(EXPECTED_MIME_TYPE).get(ClientResponse.class);
LOG.info("Init Login: security not enabled, using username");
sub = SecureClientLogin.login(username);
} else {
if (!StringUtils.isEmpty(rangerPrincipal) && !StringUtils.isEmpty(rangerKeytab)) {
LOG.info("Init Lookup Login: security enabled, using rangerPrincipal/rangerKeytab");
if (StringUtils.isEmpty(nameRules)) {
nameRules = "DEFAULT";
}
String shortName = new HadoopKerberosName(rangerPrincipal).getShortName();
uri = uri.concat("?doAs=" + shortName);
sub = SecureClientLogin.loginUserFromKeytab(rangerPrincipal, rangerKeytab, nameRules);
} else {
LOG.info("Init Login: using username/password");
String shortName = new HadoopKerberosName(username).getShortName();
uri = uri.concat("?doAs=" + shortName);
String decryptedPwd = PasswordUtils.decryptPassword(password);
sub = SecureClientLogin.loginUserWithPassword(username, decryptedPwd);
}
}
final WebResource webResource = client.resource(uri);
response = Subject.doAs(sub, new PrivilegedAction<ClientResponse>() {
@Override
public ClientResponse run() {
return webResource.accept(EXPECTED_MIME_TYPE).get(ClientResponse.class);
}
});
if (LOG.isDebugEnabled()) {
LOG.debug("getKeyList():calling " + uri);
}
if (response != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("getKeyList():response.getStatus()= " + response.getStatus());
}
if (response.getStatus() == 200) {
String jsonString = response.getEntity(String.class);
Gson gson = new GsonBuilder().setPrettyPrinting().create();
@SuppressWarnings("unchecked") List<String> keys = gson.fromJson(jsonString, List.class);
if (keys != null) {
for (String key : keys) {
if (existingKeyList != null && existingKeyList.contains(key)) {
continue;
}
if (keyNameMatching == null || keyNameMatching.isEmpty() || key.startsWith(keyNameMatching)) {
if (LOG.isDebugEnabled()) {
LOG.debug("getKeyList():Adding kmsKey " + key);
}
lret.add(key);
}
}
return lret;
}
} else if (response.getStatus() == 401) {
LOG.info("getKeyList():response.getStatus()= " + response.getStatus() + " for URL " + uri + ", so returning null list");
String msgDesc = response.getEntity(String.class);
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, null, null);
lret = null;
throw hdpException;
} else if (response.getStatus() == 403) {
LOG.info("getKeyList():response.getStatus()= " + response.getStatus() + " for URL " + uri + ", so returning null list");
String msgDesc = response.getEntity(String.class);
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, null, null);
lret = null;
throw hdpException;
} else {
LOG.info("getKeyList():response.getStatus()= " + response.getStatus() + " for URL " + uri + ", so returning null list");
String jsonString = response.getEntity(String.class);
LOG.info(jsonString);
lret = null;
}
} else {
String msgDesc = "Unable to get a valid response for " + "expected mime type : [" + EXPECTED_MIME_TYPE + "] URL : " + uri + " - got null response.";
LOG.error(msgDesc);
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, null, null);
lret = null;
throw hdpException;
}
} catch (HadoopException he) {
lret = null;
throw he;
} catch (Throwable t) {
String msgDesc = "Exception while getting Kms Key List. URL : " + uri;
HadoopException hdpException = new HadoopException(msgDesc, t);
LOG.error(msgDesc, t);
hdpException.generateResponseDataMap(false, BaseClient.getMessage(t), msgDesc + errMsg, null, null);
lret = null;
throw hdpException;
} finally {
if (response != null) {
response.close();
}
if (client != null) {
client.destroy();
}
if (lret == null) {
if (i != providers.length - 1)
continue;
}
}
}
return lret;
}
use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.
the class KMSClient method getKmsClient.
public static KMSClient getKmsClient(String serviceName, Map<String, String> configs) {
KMSClient kmsClient = null;
if (LOG.isDebugEnabled()) {
LOG.debug("Getting KmsClient for datasource: " + serviceName);
LOG.debug("configMap: " + configs);
}
String errMsg = errMessage;
if (configs == null || configs.isEmpty()) {
String msgDesc = "Could not connect as Connection ConfigMap is empty.";
LOG.error(msgDesc);
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, null, null);
throw hdpException;
} else {
String kmsUrl = configs.get("provider");
String kmsUserName = configs.get("username");
String kmsPassWord = configs.get("password");
String rangerPrincipal = configs.get("rangerprincipal");
String rangerKeytab = configs.get("rangerkeytab");
String nameRules = configs.get("namerules");
String authType = configs.get("authtype");
kmsClient = new KMSClient(kmsUrl, kmsUserName, kmsPassWord, rangerPrincipal, rangerKeytab, nameRules, authType);
}
return kmsClient;
}
use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.
the class KylinClient method getKylinProjectResponse.
private List<KylinProjectResponse> getKylinProjectResponse(ClientResponse response) {
List<KylinProjectResponse> projectResponses = null;
try {
if (response != null && response.getStatus() == HttpStatus.SC_OK) {
String jsonString = response.getEntity(String.class);
Gson gson = new GsonBuilder().setPrettyPrinting().create();
projectResponses = gson.fromJson(jsonString, new TypeToken<List<KylinProjectResponse>>() {
}.getType());
} else {
String msgDesc = "Unable to get a valid response for " + "expected mime type : [" + EXPECTED_MIME_TYPE + "], kylinUrl: " + kylinUrl + " - got null response.";
LOG.error(msgDesc);
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + ERROR_MESSAGE, null, null);
throw hdpException;
}
} catch (HadoopException he) {
throw he;
} catch (Throwable t) {
String msgDesc = "Exception while getting kylin project response, kylinUrl: " + kylinUrl;
HadoopException hdpException = new HadoopException(msgDesc, t);
LOG.error(msgDesc, t);
hdpException.generateResponseDataMap(false, BaseClient.getMessage(t), msgDesc + ERROR_MESSAGE, null, null);
throw hdpException;
} finally {
if (response != null) {
response.close();
}
}
return projectResponses;
}
use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.
the class SqoopClient method getSqoopResourceResponse.
private <T> T getSqoopResourceResponse(ClientResponse response, Class<T> classOfT) {
T resource = null;
try {
if (response != null && response.getStatus() == HttpStatus.SC_OK) {
String jsonString = response.getEntity(String.class);
Gson gson = new GsonBuilder().setPrettyPrinting().create();
resource = gson.fromJson(jsonString, classOfT);
} else {
String msgDesc = "Unable to get a valid response for " + "expected mime type : [" + EXPECTED_MIME_TYPE + "], sqoopUrl: " + sqoopUrl + " - got null response.";
LOG.error(msgDesc);
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + ERROR_MESSAGE, null, null);
throw hdpException;
}
} catch (HadoopException he) {
throw he;
} catch (Throwable t) {
String msgDesc = "Exception while getting sqoop resource response, sqoopUrl: " + sqoopUrl;
HadoopException hdpException = new HadoopException(msgDesc, t);
LOG.error(msgDesc, t);
hdpException.generateResponseDataMap(false, BaseClient.getMessage(t), msgDesc + ERROR_MESSAGE, null, null);
throw hdpException;
} finally {
if (response != null) {
response.close();
}
}
return resource;
}
use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.
the class YarnClient method getQueueList.
public List<String> getQueueList(final String queueNameMatching, final List<String> existingQueueList) {
if (LOG.isDebugEnabled()) {
LOG.debug("Getting Yarn queue list for queueNameMatching : " + queueNameMatching);
}
final String errMsg = errMessage;
List<String> ret = null;
Callable<List<String>> callableYarnQListGetter = new Callable<List<String>>() {
@Override
public List<String> call() {
List<String> yarnQueueListGetter = null;
Subject subj = getLoginSubject();
if (subj != null) {
yarnQueueListGetter = Subject.doAs(subj, new PrivilegedAction<List<String>>() {
@Override
public List<String> run() {
if (yarnQUrl == null || yarnQUrl.trim().isEmpty()) {
return null;
}
String[] yarnQUrls = yarnQUrl.trim().split("[,;]");
if (yarnQUrls == null || yarnQUrls.length == 0) {
return null;
}
Client client = Client.create();
ClientResponse response = null;
for (String currentUrl : yarnQUrls) {
if (currentUrl == null || currentUrl.trim().isEmpty()) {
continue;
}
String url = currentUrl.trim() + YARN_LIST_API_ENDPOINT;
try {
response = getQueueResponse(url, client);
if (response != null) {
if (response.getStatus() == 200) {
break;
} else {
response.close();
}
}
} catch (Throwable t) {
String msgDesc = "Exception while getting Yarn Queue List." + " URL : " + url;
LOG.error(msgDesc, t);
}
}
List<String> lret = new ArrayList<String>();
try {
if (response != null && response.getStatus() == 200) {
String jsonString = response.getEntity(String.class);
Gson gson = new GsonBuilder().setPrettyPrinting().create();
YarnSchedulerResponse yarnQResponse = gson.fromJson(jsonString, YarnSchedulerResponse.class);
if (yarnQResponse != null) {
List<String> yarnQueueList = yarnQResponse.getQueueNames();
if (yarnQueueList != null) {
for (String yarnQueueName : yarnQueueList) {
if (existingQueueList != null && existingQueueList.contains(yarnQueueName)) {
continue;
}
if (queueNameMatching == null || queueNameMatching.isEmpty() || yarnQueueName.startsWith(queueNameMatching)) {
if (LOG.isDebugEnabled()) {
LOG.debug("getQueueList():Adding yarnQueue " + yarnQueueName);
}
lret.add(yarnQueueName);
}
}
}
}
} else {
String msgDesc = "Unable to get a valid response for " + "expected mime type : [" + EXPECTED_MIME_TYPE + "] URL : " + yarnQUrl + " - got null response.";
LOG.error(msgDesc);
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, null, null);
throw hdpException;
}
} catch (HadoopException he) {
throw he;
} catch (Throwable t) {
String msgDesc = "Exception while getting Yarn Queue List." + " URL : " + yarnQUrl;
HadoopException hdpException = new HadoopException(msgDesc, t);
LOG.error(msgDesc, t);
hdpException.generateResponseDataMap(false, BaseClient.getMessage(t), msgDesc + errMsg, null, null);
throw hdpException;
} finally {
if (response != null) {
response.close();
}
if (client != null) {
client.destroy();
}
}
return lret;
}
private ClientResponse getQueueResponse(String url, Client client) {
if (LOG.isDebugEnabled()) {
LOG.debug("getQueueResponse():calling " + url);
}
WebResource webResource = client.resource(url);
ClientResponse response = webResource.accept(EXPECTED_MIME_TYPE).get(ClientResponse.class);
if (response != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("getQueueResponse():response.getStatus()= " + response.getStatus());
}
if (response.getStatus() != 200) {
LOG.info("getQueueResponse():response.getStatus()= " + response.getStatus() + " for URL " + url + ", failed to get queue list");
String jsonString = response.getEntity(String.class);
LOG.info(jsonString);
}
}
return response;
}
});
}
return yarnQueueListGetter;
}
};
try {
ret = timedTask(callableYarnQListGetter, 5, TimeUnit.SECONDS);
} catch (Throwable t) {
LOG.error("Unable to get Yarn Queue list from [" + yarnQUrl + "]", t);
String msgDesc = "Unable to get a valid response for " + "expected mime type : [" + EXPECTED_MIME_TYPE + "] URL : " + yarnQUrl;
HadoopException hdpException = new HadoopException(msgDesc, t);
LOG.error(msgDesc, t);
hdpException.generateResponseDataMap(false, BaseClient.getMessage(t), msgDesc + errMsg, null, null);
throw hdpException;
}
return ret;
}
Aggregations