use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.
the class KylinClient method getKylinClient.
public static KylinClient getKylinClient(String serviceName, Map<String, String> configs) {
KylinClient kylinClient = null;
if (LOG.isDebugEnabled()) {
LOG.debug("Getting KylinClient for datasource: " + serviceName);
}
if (MapUtils.isEmpty(configs)) {
String msgDesc = "Could not connect kylin as connection configMap is empty.";
LOG.error(msgDesc);
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + ERROR_MESSAGE, null, null);
throw hdpException;
} else {
kylinClient = new KylinClient(serviceName, configs);
}
return kylinClient;
}
use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.
the class PrestoClient method getColumns.
private List<String> getColumns(String needle, List<String> catalogs, List<String> schemas, List<String> tables, List<String> columns) throws HadoopException {
List<String> ret = new ArrayList<>();
if (con != null) {
String regex = null;
ResultSet rs = null;
String sql = null;
Statement stat = null;
if (needle != null && !needle.isEmpty()) {
regex = needle;
}
if (catalogs != null && !catalogs.isEmpty() && schemas != null && !schemas.isEmpty() && tables != null && !tables.isEmpty()) {
try {
for (String catalog : catalogs) {
for (String schema : schemas) {
for (String table : tables) {
sql = "SHOW COLUMNS FROM \"" + StringEscapeUtils.escapeSql(catalog) + "\"." + "\"" + StringEscapeUtils.escapeSql(schema) + "\"." + "\"" + StringEscapeUtils.escapeSql(table) + "\"";
try {
stat = con.createStatement();
rs = stat.executeQuery(sql);
while (rs.next()) {
String column = rs.getString(1);
if (columns != null && columns.contains(column)) {
continue;
}
if (regex == null) {
ret.add(column);
} else if (FilenameUtils.wildcardMatch(column, regex)) {
ret.add(column);
}
}
} finally {
close(rs);
close(stat);
stat = null;
rs = null;
}
}
}
}
} catch (SQLTimeoutException sqlt) {
String msgDesc = "Time Out, Unable to execute SQL [" + sql + "].";
HadoopException hdpException = new HadoopException(msgDesc, sqlt);
hdpException.generateResponseDataMap(false, getMessage(sqlt), msgDesc + ERR_MSG, null, null);
if (LOG.isDebugEnabled()) {
LOG.debug("<== PrestoClient.getColumns() Error : ", sqlt);
}
throw hdpException;
} catch (SQLException sqle) {
String msgDesc = "Unable to execute SQL [" + sql + "].";
HadoopException hdpException = new HadoopException(msgDesc, sqle);
hdpException.generateResponseDataMap(false, getMessage(sqle), msgDesc + ERR_MSG, null, null);
if (LOG.isDebugEnabled()) {
LOG.debug("<== PrestoClient.getColumns() Error : ", sqle);
}
throw hdpException;
}
}
}
return ret;
}
use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.
the class TimedExecutor method generateHadoopException.
private HadoopException generateHadoopException(Exception e) {
String msgDesc = "Unable to retrieve any files using given parameters, " + "You can still save the repository and start creating policies, " + "but you would not be able to use autocomplete for resource names. " + "Check ranger_admin.log for more info. ";
HadoopException hpe = new HadoopException(e.getMessage(), e);
hpe.generateResponseDataMap(false, hpe.getMessage(e), msgDesc, null, null);
return hpe;
}
use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.
the class KnoxClient method getServiceList.
public List<String> getServiceList(List<String> knoxTopologyList, String serviceNameMatching, List<String> knoxServiceList) {
// sample URI: .../admin/api/v1/topologies/<topologyName>
if (LOG.isDebugEnabled()) {
LOG.debug("==> KnoxClient.getServiceList() Service Name: " + serviceNameMatching);
}
List<String> serviceList = new ArrayList<String>();
String errMsg = " You can still save the repository and start creating " + "policies, but you would not be able to use autocomplete for " + "resource names. Check ranger_admin.log for more info.";
if (serviceNameMatching == null || serviceNameMatching.trim().isEmpty()) {
serviceNameMatching = "";
}
String decryptedPwd = null;
try {
decryptedPwd = PasswordUtils.decryptPassword(password);
} catch (Exception ex) {
LOG.info("Password decryption failed; trying knox connection with received password string");
decryptedPwd = null;
} finally {
if (decryptedPwd == null) {
decryptedPwd = password;
}
}
try {
Client client = null;
ClientResponse response = null;
try {
client = Client.create();
client.addFilter(new HTTPBasicAuthFilter(userName, decryptedPwd));
for (String topologyName : knoxTopologyList) {
WebResource webResource = client.resource(knoxUrl + "/" + topologyName);
response = webResource.accept(EXPECTED_MIME_TYPE).get(ClientResponse.class);
LOG.debug("Knox service lookup response: " + response);
if (response != null) {
if (response.getStatus() == 200) {
String jsonString = response.getEntity(String.class);
LOG.debug("Knox service lookup response JSON string: " + jsonString);
JsonNode rootNode = JsonUtilsV2.getMapper().readTree(jsonString);
JsonNode topologyNode = rootNode.findValue("topology");
if (topologyNode != null) {
JsonNode servicesNode = topologyNode.get("service");
if (servicesNode != null) {
Iterator<JsonNode> services = servicesNode.getElements();
while (services.hasNext()) {
JsonNode service = services.next();
JsonNode serviceElement = service.get("role");
if (serviceElement != null) {
String serviceName = serviceElement.getValueAsText();
LOG.debug("Knox serviceName: " + serviceName);
if (serviceName == null || (knoxServiceList != null && knoxServiceList.contains(serviceName))) {
continue;
}
if (serviceName.startsWith(serviceNameMatching) || "*".equals(serviceNameMatching)) {
serviceList.add(serviceName);
}
}
}
}
}
} else {
LOG.error("Got invalid REST response from: " + knoxUrl + ", responsStatus: " + response.getStatus());
}
} else {
String msgDesc = "Unable to get a valid response for " + "getServiceList() call for KnoxUrl : [" + knoxUrl + "] - got null response.";
LOG.error(msgDesc);
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, null, null);
throw hdpException;
}
}
} finally {
if (response != null) {
response.close();
}
if (client != null) {
client.destroy();
}
}
} catch (HadoopException he) {
throw he;
} catch (Throwable t) {
String msgDesc = "Exception on REST call to KnoxUrl : " + knoxUrl + ".";
HadoopException hdpException = new HadoopException(msgDesc, t);
LOG.error(msgDesc, t);
hdpException.generateResponseDataMap(false, BaseClient.getMessage(t), msgDesc + errMsg, null, null);
throw hdpException;
}
return serviceList;
}
use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.
the class KnoxClient method getKnoxClient.
public static KnoxClient getKnoxClient(String serviceName, Map<String, String> configs) {
KnoxClient knoxClient = null;
if (LOG.isDebugEnabled()) {
LOG.debug("Getting knoxClient for ServiceName: " + serviceName);
LOG.debug("configMap: " + configs);
}
String errMsg = " You can still save the repository and start creating " + "policies, but you would not be able to use autocomplete for " + "resource names. Check ranger_admin.log for more info.";
if (configs != null && !configs.isEmpty()) {
String knoxUrl = configs.get("knox.url");
String knoxAdminUser = configs.get("username");
String knoxAdminPassword = configs.get("password");
knoxClient = new KnoxClient(knoxUrl, knoxAdminUser, knoxAdminPassword);
} else {
String msgDesc = "Could not connect as Connection ConfigMap is empty.";
LOG.error(msgDesc);
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, null, null);
throw hdpException;
}
return knoxClient;
}
Aggregations