Search in sources :

Example 21 with HadoopException

use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.

the class YarnClient method getYarnClient.

public static YarnClient getYarnClient(String serviceName, Map<String, String> configs) {
    YarnClient yarnClient = null;
    if (LOG.isDebugEnabled()) {
        LOG.debug("Getting YarnClient for datasource: " + serviceName);
    }
    String errMsg = errMessage;
    if (configs == null || configs.isEmpty()) {
        String msgDesc = "Could not connect as Connection ConfigMap is empty.";
        LOG.error(msgDesc);
        HadoopException hdpException = new HadoopException(msgDesc);
        hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, null, null);
        throw hdpException;
    } else {
        yarnClient = new YarnClient(serviceName, configs);
    }
    return yarnClient;
}
Also used : HadoopException(org.apache.ranger.plugin.client.HadoopException)

Example 22 with HadoopException

use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.

the class HdfsClient method listFilesInternal.

private List<String> listFilesInternal(String baseDir, String fileMatching, final List<String> pathList) throws HadoopException {
    List<String> fileList = new ArrayList<String>();
    String errMsg = " You can still save the repository and start creating " + "policies, but you would not be able to use autocomplete for " + "resource names. Check ranger_admin.log for more info.";
    try {
        String dirPrefix = (baseDir.endsWith("/") ? baseDir : (baseDir + "/"));
        String filterRegEx = null;
        if (fileMatching != null && fileMatching.trim().length() > 0) {
            filterRegEx = fileMatching.trim();
        }
        UserGroupInformation.setConfiguration(conf);
        FileSystem fs = null;
        try {
            fs = FileSystem.get(conf);
            Path basePath = new Path(baseDir);
            FileStatus[] fileStatus = fs.listStatus(basePath);
            if (LOG.isDebugEnabled()) {
                LOG.debug("<== HdfsClient fileStatus : " + fileStatus.length + " PathList :" + pathList);
            }
            if (fileStatus != null) {
                if (fs.exists(basePath) && ArrayUtils.isEmpty(fileStatus)) {
                    fileList.add(basePath.toString());
                } else {
                    for (FileStatus stat : fileStatus) {
                        Path path = stat.getPath();
                        String pathComponent = path.getName();
                        String prefixedPath = dirPrefix + pathComponent;
                        if (pathList != null && pathList.contains(prefixedPath)) {
                            continue;
                        }
                        if (filterRegEx == null) {
                            fileList.add(prefixedPath);
                        } else if (FilenameUtils.wildcardMatch(pathComponent, fileMatching)) {
                            fileList.add(prefixedPath);
                        }
                    }
                }
            }
        } catch (UnknownHostException uhe) {
            String msgDesc = "listFilesInternal: Unable to connect using given config parameters" + " of Hadoop environment [" + getSerivceName() + "].";
            HadoopException hdpException = new HadoopException(msgDesc, uhe);
            hdpException.generateResponseDataMap(false, getMessage(uhe), msgDesc + errMsg, null, null);
            if (LOG.isDebugEnabled()) {
                LOG.debug("<== HdfsClient listFilesInternal Error : " + uhe);
            }
            throw hdpException;
        } catch (FileNotFoundException fne) {
            String msgDesc = "listFilesInternal: Unable to locate files using given config parameters " + "of Hadoop environment [" + getSerivceName() + "].";
            HadoopException hdpException = new HadoopException(msgDesc, fne);
            hdpException.generateResponseDataMap(false, getMessage(fne), msgDesc + errMsg, null, null);
            if (LOG.isDebugEnabled()) {
                LOG.debug("<== HdfsClient listFilesInternal Error : " + fne);
            }
            throw hdpException;
        }
    } catch (IOException ioe) {
        String msgDesc = "listFilesInternal: Unable to get listing of files for directory " + baseDir + fileMatching + "] from Hadoop environment [" + getSerivceName() + "].";
        HadoopException hdpException = new HadoopException(msgDesc, ioe);
        hdpException.generateResponseDataMap(false, getMessage(ioe), msgDesc + errMsg, null, null);
        if (LOG.isDebugEnabled()) {
            LOG.debug("<== HdfsClient listFilesInternal Error : " + ioe);
        }
        throw hdpException;
    } catch (IllegalArgumentException iae) {
        String msgDesc = "Unable to get listing of files for directory [" + baseDir + "] from Hadoop environment [" + getSerivceName() + "].";
        HadoopException hdpException = new HadoopException(msgDesc, iae);
        hdpException.generateResponseDataMap(false, getMessage(iae), msgDesc + errMsg, null, null);
        if (LOG.isDebugEnabled()) {
            LOG.debug("<== HdfsClient listFilesInternal Error : " + iae);
        }
        throw hdpException;
    }
    return fileList;
}
Also used : Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) UnknownHostException(java.net.UnknownHostException) FileSystem(org.apache.hadoop.fs.FileSystem) FileNotFoundException(java.io.FileNotFoundException) HadoopException(org.apache.ranger.plugin.client.HadoopException) IOException(java.io.IOException)

Example 23 with HadoopException

use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.

the class HiveClient method initConnection.

private void initConnection(String userName, String password) throws HadoopException {
    if (enableHiveMetastoreLookup) {
        try {
            HiveConf conf = new HiveConf();
            if (!StringUtils.isEmpty(hiveSiteFilePath)) {
                File f = new File(hiveSiteFilePath);
                if (f.exists()) {
                    conf.addResource(f.toURI().toURL());
                } else {
                    if (LOG.isDebugEnabled()) {
                        LOG.debug("Hive site conf file path " + hiveSiteFilePath + " does not exists for Hive Metastore lookup");
                    }
                }
            } else {
                if (LOG.isDebugEnabled()) {
                    LOG.debug("Hive site conf file path property not found for Hive Metastore lookup");
                }
            }
            hiveClient = new HiveMetaStoreClient(conf);
        } catch (HadoopException he) {
            String msgDesc = "initConnection: Class or its nullary constructor might not accessible." + "So unable to initiate connection to hive thrift server instance.";
            HadoopException hdpException = new HadoopException(msgDesc, he);
            hdpException.generateResponseDataMap(false, getMessage(he), msgDesc + ERR_MSG, null, null);
            if (LOG.isDebugEnabled()) {
                LOG.debug(msgDesc, hdpException);
            }
            throw hdpException;
        } catch (MalformedURLException e) {
            String msgDesc = "initConnection: URL might be malformed." + "So unable to initiate connection to hive thrift server instance.";
            HadoopException hdpException = new HadoopException(msgDesc, e);
            hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + ERR_MSG, null, null);
            if (LOG.isDebugEnabled()) {
                LOG.debug(msgDesc, hdpException);
            }
            throw hdpException;
        } catch (MetaException e) {
            String msgDesc = "initConnection: Meta info is not proper." + "So unable to initiate connection to hive thrift server instance.";
            HadoopException hdpException = new HadoopException(msgDesc, e);
            hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + ERR_MSG, null, null);
            if (LOG.isDebugEnabled()) {
                LOG.debug(msgDesc, hdpException);
            }
            throw hdpException;
        } catch (Throwable t) {
            String msgDesc = "Unable to connect to Hive Thrift Server instance";
            HadoopException hdpException = new HadoopException(msgDesc, t);
            hdpException.generateResponseDataMap(false, getMessage(t), msgDesc + ERR_MSG, null, null);
            if (LOG.isDebugEnabled()) {
                LOG.debug(msgDesc, hdpException);
            }
            throw hdpException;
        }
    } else {
        Properties prop = getConfigHolder().getRangerSection();
        String driverClassName = prop.getProperty("jdbc.driverClassName");
        String url = prop.getProperty("jdbc.url");
        if (driverClassName != null) {
            try {
                Driver driver = (Driver) Class.forName(driverClassName).newInstance();
                DriverManager.registerDriver(driver);
            } catch (SQLException e) {
                String msgDesc = "initConnection: Caught SQLException while registering " + "Hive driver, so Unable to connect to Hive Thrift Server instance.";
                HadoopException hdpException = new HadoopException(msgDesc, e);
                hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + ERR_MSG, null, null);
                if (LOG.isDebugEnabled()) {
                    LOG.debug(msgDesc, hdpException);
                }
                throw hdpException;
            } catch (IllegalAccessException ilae) {
                String msgDesc = "initConnection: Class or its nullary constructor might not accessible." + "So unable to initiate connection to hive thrift server instance.";
                HadoopException hdpException = new HadoopException(msgDesc, ilae);
                hdpException.generateResponseDataMap(false, getMessage(ilae), msgDesc + ERR_MSG, null, null);
                if (LOG.isDebugEnabled()) {
                    LOG.debug(msgDesc, hdpException);
                }
                throw hdpException;
            } catch (InstantiationException ie) {
                String msgDesc = "initConnection: Class may not have its nullary constructor or " + "may be the instantiation fails for some other reason." + "So unable to initiate connection to hive thrift server instance.";
                HadoopException hdpException = new HadoopException(msgDesc, ie);
                hdpException.generateResponseDataMap(false, getMessage(ie), msgDesc + ERR_MSG, null, null);
                if (LOG.isDebugEnabled()) {
                    LOG.debug(msgDesc, hdpException);
                }
                throw hdpException;
            } catch (ExceptionInInitializerError eie) {
                String msgDesc = "initConnection: Got ExceptionInInitializerError, " + "The initialization provoked by this method fails." + "So unable to initiate connection to hive thrift server instance.";
                HadoopException hdpException = new HadoopException(msgDesc, eie);
                hdpException.generateResponseDataMap(false, getMessage(eie), msgDesc + ERR_MSG, null, null);
                if (LOG.isDebugEnabled()) {
                    LOG.debug(msgDesc, hdpException);
                }
                throw hdpException;
            } catch (SecurityException se) {
                String msgDesc = "initConnection: unable to initiate connection to hive thrift server instance," + " The caller's class loader is not the same as or an ancestor " + "of the class loader for the current class and invocation of " + "s.checkPackageAccess() denies access to the package of this class.";
                HadoopException hdpException = new HadoopException(msgDesc, se);
                hdpException.generateResponseDataMap(false, getMessage(se), msgDesc + ERR_MSG, null, null);
                if (LOG.isDebugEnabled()) {
                    LOG.debug(msgDesc, hdpException);
                }
                throw hdpException;
            } catch (Throwable t) {
                String msgDesc = "initConnection: Unable to connect to Hive Thrift Server instance, " + "please provide valid value of field : {jdbc.driverClassName}.";
                HadoopException hdpException = new HadoopException(msgDesc, t);
                hdpException.generateResponseDataMap(false, getMessage(t), msgDesc + ERR_MSG, null, "jdbc.driverClassName");
                if (LOG.isDebugEnabled()) {
                    LOG.debug(msgDesc, hdpException);
                }
                throw hdpException;
            }
        }
        try {
            if (userName == null && password == null) {
                con = DriverManager.getConnection(url);
            } else {
                String decryptedPwd = null;
                try {
                    decryptedPwd = PasswordUtils.decryptPassword(password);
                } catch (Exception ex) {
                    LOG.info("Password decryption failed; trying Hive connection with received password string");
                    decryptedPwd = null;
                } finally {
                    if (decryptedPwd == null) {
                        decryptedPwd = password;
                    }
                }
                con = DriverManager.getConnection(url, userName, decryptedPwd);
            }
        } catch (SQLException e) {
            String msgDesc = "Unable to connect to Hive Thrift Server instance.";
            HadoopException hdpException = new HadoopException(msgDesc, e);
            hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + ERR_MSG, null, null);
            if (LOG.isDebugEnabled()) {
                LOG.debug(msgDesc, hdpException);
            }
            throw hdpException;
        } catch (SecurityException se) {
            String msgDesc = "Unable to connect to Hive Thrift Server instance.";
            HadoopException hdpException = new HadoopException(msgDesc, se);
            hdpException.generateResponseDataMap(false, getMessage(se), msgDesc + ERR_MSG, null, null);
            if (LOG.isDebugEnabled()) {
                LOG.debug(msgDesc, hdpException);
            }
            throw hdpException;
        } catch (Throwable t) {
            String msgDesc = "Unable to connect to Hive Thrift Server instance";
            HadoopException hdpException = new HadoopException(msgDesc, t);
            hdpException.generateResponseDataMap(false, getMessage(t), msgDesc + ERR_MSG, null, url);
            if (LOG.isDebugEnabled()) {
                LOG.debug(msgDesc, hdpException);
            }
            throw hdpException;
        }
    }
}
Also used : MalformedURLException(java.net.MalformedURLException) HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) SQLException(java.sql.SQLException) Driver(java.sql.Driver) HadoopException(org.apache.ranger.plugin.client.HadoopException) Properties(java.util.Properties) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) SQLTimeoutException(java.sql.SQLTimeoutException) SQLException(java.sql.SQLException) MalformedURLException(java.net.MalformedURLException) TException(org.apache.thrift.TException) HadoopException(org.apache.ranger.plugin.client.HadoopException) HiveConf(org.apache.hadoop.hive.conf.HiveConf) File(java.io.File) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 24 with HadoopException

use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.

the class HiveClient method getClmList.

private List<String> getClmList(String columnNameMatching, List<String> dbList, List<String> tblList, List<String> colList) throws HadoopException {
    if (LOG.isDebugEnabled()) {
        LOG.debug("==> HiveClient.getClmList() columnNameMatching: " + columnNameMatching + " dbList :" + dbList + " tblList: " + tblList + " colList: " + colList);
    }
    List<String> ret = new ArrayList<String>();
    if (con != null) {
        String columnNameMatchingRegEx = null;
        if (columnNameMatching != null && !columnNameMatching.isEmpty()) {
            columnNameMatchingRegEx = columnNameMatching;
        }
        Statement stat = null;
        ResultSet rs = null;
        String sql = null;
        if (dbList != null && !dbList.isEmpty() && tblList != null && !tblList.isEmpty()) {
            for (String db : dbList) {
                for (String tbl : tblList) {
                    try {
                        sql = "use " + db;
                        try {
                            stat = con.createStatement();
                            stat.execute(sql);
                        } finally {
                            close(stat);
                        }
                        sql = "describe  " + tbl;
                        stat = con.createStatement();
                        rs = stat.executeQuery(sql);
                        while (rs.next()) {
                            String columnName = rs.getString(1);
                            if (colList != null && colList.contains(columnName)) {
                                continue;
                            }
                            if (columnNameMatchingRegEx == null) {
                                ret.add(columnName);
                            } else if (FilenameUtils.wildcardMatch(columnName, columnNameMatchingRegEx)) {
                                ret.add(columnName);
                            }
                        }
                    } catch (SQLTimeoutException sqlt) {
                        String msgDesc = "Time Out, Unable to execute SQL [" + sql + "].";
                        HadoopException hdpException = new HadoopException(msgDesc, sqlt);
                        hdpException.generateResponseDataMap(false, getMessage(sqlt), msgDesc + ERR_MSG, null, null);
                        if (LOG.isDebugEnabled()) {
                            LOG.debug("<== HiveClient.getClmList() Error : ", sqlt);
                        }
                        throw hdpException;
                    } catch (SQLException sqle) {
                        String msgDesc = "Unable to execute SQL [" + sql + "].";
                        HadoopException hdpException = new HadoopException(msgDesc, sqle);
                        hdpException.generateResponseDataMap(false, getMessage(sqle), msgDesc + ERR_MSG, null, null);
                        if (LOG.isDebugEnabled()) {
                            LOG.debug("<== HiveClient.getClmList() Error : ", sqle);
                        }
                        throw hdpException;
                    } finally {
                        close(rs);
                        close(stat);
                    }
                }
            }
        }
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug("<== HiveClient.getClmList() " + ret);
    }
    return ret;
}
Also used : SQLException(java.sql.SQLException) Statement(java.sql.Statement) ArrayList(java.util.ArrayList) ResultSet(java.sql.ResultSet) SQLTimeoutException(java.sql.SQLTimeoutException) HadoopException(org.apache.ranger.plugin.client.HadoopException)

Example 25 with HadoopException

use of org.apache.ranger.plugin.client.HadoopException in project ranger by apache.

the class HiveClient method getTblList.

private List<String> getTblList(String tableNameMatching, List<String> dbList, List<String> tblList) throws HadoopException {
    if (LOG.isDebugEnabled()) {
        LOG.debug("==> HiveClient getTblList() tableNameMatching : " + tableNameMatching + " ExcludedbList :" + dbList + "ExcludeTableList :" + tblList);
    }
    List<String> ret = new ArrayList<String>();
    if (con != null) {
        Statement stat = null;
        ResultSet rs = null;
        String sql = null;
        try {
            if (dbList != null && !dbList.isEmpty()) {
                for (String db : dbList) {
                    sql = "use " + db;
                    try {
                        stat = con.createStatement();
                        stat.execute(sql);
                    } finally {
                        close(stat);
                        stat = null;
                    }
                    sql = "show tables ";
                    if (tableNameMatching != null && !tableNameMatching.isEmpty()) {
                        sql = sql + " like \"" + tableNameMatching + "\"";
                    }
                    try {
                        stat = con.createStatement();
                        rs = stat.executeQuery(sql);
                        while (rs.next()) {
                            String tblName = rs.getString(1);
                            if (tblList != null && tblList.contains(tblName)) {
                                continue;
                            }
                            ret.add(tblName);
                        }
                    } finally {
                        close(rs);
                        close(stat);
                        rs = null;
                        stat = null;
                    }
                }
            }
        } catch (SQLTimeoutException sqlt) {
            String msgDesc = "Time Out, Unable to execute SQL [" + sql + "].";
            HadoopException hdpException = new HadoopException(msgDesc, sqlt);
            hdpException.generateResponseDataMap(false, getMessage(sqlt), msgDesc + ERR_MSG, null, null);
            if (LOG.isDebugEnabled()) {
                LOG.debug("<== HiveClient.getTblList() Error : ", sqlt);
            }
            throw hdpException;
        } catch (SQLException sqle) {
            String msgDesc = "Unable to execute SQL [" + sql + "].";
            HadoopException hdpException = new HadoopException(msgDesc, sqle);
            hdpException.generateResponseDataMap(false, getMessage(sqle), msgDesc + ERR_MSG, null, null);
            if (LOG.isDebugEnabled()) {
                LOG.debug("<== HiveClient.getTblList() Error : ", sqle);
            }
            throw hdpException;
        }
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug("<== HiveClient getTblList() " + ret);
    }
    return ret;
}
Also used : SQLException(java.sql.SQLException) Statement(java.sql.Statement) ArrayList(java.util.ArrayList) ResultSet(java.sql.ResultSet) SQLTimeoutException(java.sql.SQLTimeoutException) HadoopException(org.apache.ranger.plugin.client.HadoopException)

Aggregations

HadoopException (org.apache.ranger.plugin.client.HadoopException)29 ArrayList (java.util.ArrayList)13 IOException (java.io.IOException)9 PrivilegedAction (java.security.PrivilegedAction)6 Gson (com.google.gson.Gson)5 GsonBuilder (com.google.gson.GsonBuilder)5 Client (com.sun.jersey.api.client.Client)5 ClientResponse (com.sun.jersey.api.client.ClientResponse)5 WebResource (com.sun.jersey.api.client.WebResource)5 BaseClient (org.apache.ranger.plugin.client.BaseClient)5 SQLException (java.sql.SQLException)4 SQLTimeoutException (java.sql.SQLTimeoutException)4 ResultSet (java.sql.ResultSet)3 Statement (java.sql.Statement)3 List (java.util.List)3 Subject (javax.security.auth.Subject)3 MasterNotRunningException (org.apache.hadoop.hbase.MasterNotRunningException)3 ZooKeeperConnectionException (org.apache.hadoop.hbase.ZooKeeperConnectionException)3 HTTPBasicAuthFilter (com.sun.jersey.api.client.filter.HTTPBasicAuthFilter)2 MalformedURLException (java.net.MalformedURLException)2