Search in sources :

Example 6 with HadoopException

use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.

the class HBaseClient method getHBaseStatus.

public boolean getHBaseStatus() throws HadoopException {
    boolean hbaseStatus = false;
    subj = getLoginSubject();
    final String errMsg = " You can still save the repository and start creating " + "policies, but you would not be able to use autocomplete for " + "resource names. Check ranger_admin.log for more info.";
    if (subj != null) {
        try {
            hbaseStatus = Subject.doAs(subj, new PrivilegedAction<Boolean>() {

                @Override
                public Boolean run() {
                    Boolean hbaseStatus1 = false;
                    try {
                        LOG.info("getHBaseStatus: creating default Hbase configuration");
                        LOG.info("getHBaseStatus: setting config values from client");
                        setClientConfigValues(conf);
                        LOG.info("getHBaseStatus: checking HbaseAvailability with the new config");
                        HBaseAdmin.checkHBaseAvailable(conf);
                        LOG.info("getHBaseStatus: no exception: HbaseAvailability true");
                        hbaseStatus1 = true;
                    } catch (ZooKeeperConnectionException zce) {
                        String msgDesc = "getHBaseStatus: Unable to connect to `ZooKeeper` " + "using given config parameters.";
                        HadoopException hdpException = new HadoopException(msgDesc, zce);
                        hdpException.generateResponseDataMap(false, getMessage(zce), msgDesc + errMsg, null, null);
                        LOG.error(msgDesc + zce);
                        throw hdpException;
                    } catch (MasterNotRunningException mnre) {
                        String msgDesc = "getHBaseStatus: Looks like `Master` is not running, " + "so couldn't check that running HBase is available or not, " + "Please try again later.";
                        HadoopException hdpException = new HadoopException(msgDesc, mnre);
                        hdpException.generateResponseDataMap(false, getMessage(mnre), msgDesc + errMsg, null, null);
                        LOG.error(msgDesc + mnre);
                        throw hdpException;
                    } catch (ServiceException se) {
                        String msgDesc = "getHBaseStatus: Unable to check availability of " + "Hbase environment [" + getConfigHolder().getDatasourceName() + "].";
                        HadoopException hdpException = new HadoopException(msgDesc, se);
                        hdpException.generateResponseDataMap(false, getMessage(se), msgDesc + errMsg, null, null);
                        LOG.error(msgDesc + se);
                        throw hdpException;
                    } catch (IOException io) {
                        String msgDesc = "getHBaseStatus: Unable to check availability of" + " Hbase environment [" + getConfigHolder().getDatasourceName() + "].";
                        HadoopException hdpException = new HadoopException(msgDesc, io);
                        hdpException.generateResponseDataMap(false, getMessage(io), msgDesc + errMsg, null, null);
                        LOG.error(msgDesc + io);
                        throw hdpException;
                    } catch (Throwable e) {
                        String msgDesc = "getHBaseStatus: Unable to check availability of" + " Hbase environment [" + getConfigHolder().getDatasourceName() + "].";
                        LOG.error(msgDesc + e);
                        hbaseStatus1 = false;
                        HadoopException hdpException = new HadoopException(msgDesc, e);
                        hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + errMsg, null, null);
                        throw hdpException;
                    }
                    return hbaseStatus1;
                }
            });
        } catch (SecurityException se) {
            String msgDesc = "getHBaseStatus: Unable to connect to HBase Server instance ";
            HadoopException hdpException = new HadoopException(msgDesc, se);
            hdpException.generateResponseDataMap(false, getMessage(se), msgDesc + errMsg, null, null);
            LOG.error(msgDesc + se);
            throw hdpException;
        }
    } else {
        LOG.error("getHBaseStatus: secure login not done, subject is null");
    }
    return hbaseStatus;
}
Also used : ServiceException(com.google.protobuf.ServiceException) PrivilegedAction(java.security.PrivilegedAction) MasterNotRunningException(org.apache.hadoop.hbase.MasterNotRunningException) ZooKeeperConnectionException(org.apache.hadoop.hbase.ZooKeeperConnectionException) HadoopException(org.apache.ranger.plugin.client.HadoopException) IOException(java.io.IOException)

Example 7 with HadoopException

use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.

the class HBaseClient method getTableList.

public List<String> getTableList(final String tableNameMatching, final List<String> existingTableList) throws HadoopException {
    if (LOG.isDebugEnabled()) {
        LOG.debug("==> HbaseClient.getTableList()  tableNameMatching " + tableNameMatching + " ExisitingTableList " + existingTableList);
    }
    List<String> ret = null;
    final String errMsg = " You can still save the repository and start creating " + "policies, but you would not be able to use autocomplete for " + "resource names. Check ranger_admin.log for more info.";
    subj = getLoginSubject();
    if (subj != null) {
        ret = Subject.doAs(subj, new PrivilegedAction<List<String>>() {

            @Override
            public List<String> run() {
                List<String> tableList = new ArrayList<String>();
                HBaseAdmin admin = null;
                try {
                    LOG.info("getTableList: setting config values from client");
                    setClientConfigValues(conf);
                    LOG.info("getTableList: checking HbaseAvailability with the new config");
                    HBaseAdmin.checkHBaseAvailable(conf);
                    LOG.info("getTableList: no exception: HbaseAvailability true");
                    admin = new HBaseAdmin(conf);
                    HTableDescriptor[] htds = admin.listTables(tableNameMatching);
                    if (htds != null) {
                        for (HTableDescriptor htd : htds) {
                            String tableName = htd.getNameAsString();
                            if (existingTableList != null && existingTableList.contains(tableName)) {
                                continue;
                            } else {
                                tableList.add(htd.getNameAsString());
                            }
                        }
                    } else {
                        LOG.error("getTableList: null HTableDescription received from HBaseAdmin.listTables");
                    }
                } catch (ZooKeeperConnectionException zce) {
                    String msgDesc = "getTableList: Unable to connect to `ZooKeeper` " + "using given config parameters.";
                    HadoopException hdpException = new HadoopException(msgDesc, zce);
                    hdpException.generateResponseDataMap(false, getMessage(zce), msgDesc + errMsg, null, null);
                    LOG.error(msgDesc + zce);
                    throw hdpException;
                } catch (MasterNotRunningException mnre) {
                    String msgDesc = "getTableList: Looks like `Master` is not running, " + "so couldn't check that running HBase is available or not, " + "Please try again later.";
                    HadoopException hdpException = new HadoopException(msgDesc, mnre);
                    hdpException.generateResponseDataMap(false, getMessage(mnre), msgDesc + errMsg, null, null);
                    LOG.error(msgDesc + mnre);
                    throw hdpException;
                } catch (IOException io) {
                    String msgDesc = "getTableList: Unable to get HBase table List for [repository:" + getConfigHolder().getDatasourceName() + ",table-match:" + tableNameMatching + "].";
                    HadoopException hdpException = new HadoopException(msgDesc, io);
                    hdpException.generateResponseDataMap(false, getMessage(io), msgDesc + errMsg, null, null);
                    LOG.error(msgDesc + io);
                    throw hdpException;
                } catch (Throwable e) {
                    String msgDesc = "getTableList : Unable to get HBase table List for [repository:" + getConfigHolder().getDatasourceName() + ",table-match:" + tableNameMatching + "].";
                    LOG.error(msgDesc + e);
                    HadoopException hdpException = new HadoopException(msgDesc, e);
                    hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + errMsg, null, null);
                    throw hdpException;
                } finally {
                    if (admin != null) {
                        try {
                            admin.close();
                        } catch (IOException e) {
                            LOG.error("Unable to close HBase connection [" + getConfigHolder().getDatasourceName() + "]", e);
                        }
                    }
                }
                return tableList;
            }
        });
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug("<== HbaseClient.getTableList() " + ret);
    }
    return ret;
}
Also used : HBaseAdmin(org.apache.hadoop.hbase.client.HBaseAdmin) PrivilegedAction(java.security.PrivilegedAction) MasterNotRunningException(org.apache.hadoop.hbase.MasterNotRunningException) ZooKeeperConnectionException(org.apache.hadoop.hbase.ZooKeeperConnectionException) HadoopException(org.apache.ranger.plugin.client.HadoopException) IOException(java.io.IOException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor)

Example 8 with HadoopException

use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.

the class HiveClient method getClmListFromHM.

private List<String> getClmListFromHM(String columnNameMatching, List<String> dbList, List<String> tblList, List<String> colList) throws HadoopException {
    if (LOG.isDebugEnabled()) {
        LOG.debug("==> HiveClient.getClmListFromHM() columnNameMatching: " + columnNameMatching + " dbList :" + dbList + " tblList: " + tblList + " colList: " + colList);
    }
    List<String> ret = new ArrayList<String>();
    String columnNameMatchingRegEx = null;
    if (columnNameMatching != null && !columnNameMatching.isEmpty()) {
        columnNameMatchingRegEx = columnNameMatching;
    }
    if (hiveClient != null && dbList != null && !dbList.isEmpty() && tblList != null && !tblList.isEmpty()) {
        for (String db : dbList) {
            for (String tbl : tblList) {
                try {
                    List<FieldSchema> hiveSch = hiveClient.getFields(db, tbl);
                    if (hiveSch != null) {
                        for (FieldSchema sch : hiveSch) {
                            String columnName = sch.getName();
                            if (colList != null && colList.contains(columnName)) {
                                continue;
                            }
                            if (columnNameMatchingRegEx == null) {
                                ret.add(columnName);
                            } else if (FilenameUtils.wildcardMatch(columnName, columnNameMatchingRegEx)) {
                                ret.add(columnName);
                            }
                        }
                    }
                } catch (TException e) {
                    String msgDesc = "Unable to get Columns.";
                    HadoopException hdpException = new HadoopException(msgDesc, e);
                    hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + ERR_MSG, null, null);
                    if (LOG.isDebugEnabled()) {
                        LOG.debug("<== HiveClient.getClmListFromHM() Error : ", e);
                    }
                    throw hdpException;
                }
            }
        }
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug("<== HiveClient.getClmListFromHM() " + ret);
    }
    return ret;
}
Also used : TException(org.apache.thrift.TException) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ArrayList(java.util.ArrayList) HadoopException(org.apache.ranger.plugin.client.HadoopException)

Example 9 with HadoopException

use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.

the class KylinClient method getKylinClient.

public static KylinClient getKylinClient(String serviceName, Map<String, String> configs) {
    KylinClient kylinClient = null;
    if (LOG.isDebugEnabled()) {
        LOG.debug("Getting KylinClient for datasource: " + serviceName);
    }
    if (MapUtils.isEmpty(configs)) {
        String msgDesc = "Could not connect kylin as connection configMap is empty.";
        LOG.error(msgDesc);
        HadoopException hdpException = new HadoopException(msgDesc);
        hdpException.generateResponseDataMap(false, msgDesc, msgDesc + ERROR_MESSAGE, null, null);
        throw hdpException;
    } else {
        kylinClient = new KylinClient(serviceName, configs);
    }
    return kylinClient;
}
Also used : HadoopException(org.apache.ranger.plugin.client.HadoopException)

Example 10 with HadoopException

use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.

the class SqoopClient method getSqoopClient.

public static SqoopClient getSqoopClient(String serviceName, Map<String, String> configs) {
    SqoopClient sqoopClient = null;
    if (LOG.isDebugEnabled()) {
        LOG.debug("Getting SqoopClient for datasource: " + serviceName);
    }
    if (MapUtils.isEmpty(configs)) {
        String msgDesc = "Could not connect sqoop as Connection ConfigMap is empty.";
        LOG.error(msgDesc);
        HadoopException hdpException = new HadoopException(msgDesc);
        hdpException.generateResponseDataMap(false, msgDesc, msgDesc + ERROR_MESSAGE, null, null);
        throw hdpException;
    } else {
        sqoopClient = new SqoopClient(serviceName, configs);
    }
    return sqoopClient;
}
Also used : HadoopException(org.apache.ranger.plugin.client.HadoopException)

Aggregations

HadoopException (org.apache.ranger.plugin.client.HadoopException)29 ArrayList (java.util.ArrayList)13 IOException (java.io.IOException)9 PrivilegedAction (java.security.PrivilegedAction)6 Gson (com.google.gson.Gson)5 GsonBuilder (com.google.gson.GsonBuilder)5 Client (com.sun.jersey.api.client.Client)5 ClientResponse (com.sun.jersey.api.client.ClientResponse)5 WebResource (com.sun.jersey.api.client.WebResource)5 BaseClient (org.apache.ranger.plugin.client.BaseClient)5 SQLException (java.sql.SQLException)4 SQLTimeoutException (java.sql.SQLTimeoutException)4 ResultSet (java.sql.ResultSet)3 Statement (java.sql.Statement)3 List (java.util.List)3 Subject (javax.security.auth.Subject)3 MasterNotRunningException (org.apache.hadoop.hbase.MasterNotRunningException)3 ZooKeeperConnectionException (org.apache.hadoop.hbase.ZooKeeperConnectionException)3 HTTPBasicAuthFilter (com.sun.jersey.api.client.filter.HTTPBasicAuthFilter)2 MalformedURLException (java.net.MalformedURLException)2