use of org.apache.hadoop.hbase.security.UserProvider in project hbase by apache.
the class ConnectionFactory method createConnection.
/**
* Create a new Connection instance using the passed <code>conf</code> instance. Connection
* encapsulates all housekeeping for a connection to the cluster. All tables and interfaces
* created from returned connection share zookeeper connection, meta cache, and connections to
* region servers and masters. <br>
* The caller is responsible for calling {@link Connection#close()} on the returned connection
* instance. Typical usage:
*
* <pre>
* Connection connection = ConnectionFactory.createConnection(conf);
* Table table = connection.getTable(TableName.valueOf("table1"));
* try {
* table.get(...);
* ...
* } finally {
* table.close();
* connection.close();
* }
* </pre>
*
* @param conf configuration
* @param user the user the connection is for
* @param pool the thread pool to use for batch operations
* @return Connection object for <code>conf</code>
*/
public static Connection createConnection(Configuration conf, ExecutorService pool, User user) throws IOException {
if (user == null) {
UserProvider provider = UserProvider.instantiate(conf);
user = provider.getCurrent();
}
String className = conf.get(ClusterConnection.HBASE_CLIENT_CONNECTION_IMPL, ConnectionImplementation.class.getName());
Class<?> clazz;
try {
clazz = Class.forName(className);
} catch (ClassNotFoundException e) {
throw new IOException(e);
}
try {
// Default HCM#HCI is not accessible; make it so before invoking.
Constructor<?> constructor = clazz.getDeclaredConstructor(Configuration.class, ExecutorService.class, User.class);
constructor.setAccessible(true);
return (Connection) constructor.newInstance(conf, pool, user);
} catch (Exception e) {
throw new IOException(e);
}
}
use of org.apache.hadoop.hbase.security.UserProvider in project hbase by apache.
the class AuthUtil method getAuthChore.
/**
* Checks if security is enabled and if so, launches chore for refreshing kerberos ticket.
* @param conf the hbase service configuration
* @return a ScheduledChore for renewals, if needed, and null otherwise.
*/
public static ScheduledChore getAuthChore(Configuration conf) throws IOException {
UserProvider userProvider = UserProvider.instantiate(conf);
// login the principal (if using secure Hadoop)
boolean securityEnabled = userProvider.isHadoopSecurityEnabled() && userProvider.isHBaseSecurityEnabled();
if (!securityEnabled)
return null;
String host = null;
try {
host = Strings.domainNamePointerToHostName(DNS.getDefaultHost(conf.get("hbase.client.dns.interface", "default"), conf.get("hbase.client.dns.nameserver", "default")));
userProvider.login("hbase.client.keytab.file", "hbase.client.kerberos.principal", host);
} catch (UnknownHostException e) {
LOG.error("Error resolving host name: " + e.getMessage(), e);
throw e;
} catch (IOException e) {
LOG.error("Error while trying to perform the initial login: " + e.getMessage(), e);
throw e;
}
final UserGroupInformation ugi = userProvider.getCurrent().getUGI();
Stoppable stoppable = new Stoppable() {
private volatile boolean isStopped = false;
@Override
public void stop(String why) {
isStopped = true;
}
@Override
public boolean isStopped() {
return isStopped;
}
};
// if you're in debug mode this is useful to avoid getting spammed by the getTGT()
// you can increase this, keeping in mind that the default refresh window is 0.8
// e.g. 5min tgt * 0.8 = 4min refresh so interval is better be way less than 1min
// 30sec
final int CHECK_TGT_INTERVAL = 30 * 1000;
ScheduledChore refreshCredentials = new ScheduledChore("RefreshCredentials", stoppable, CHECK_TGT_INTERVAL) {
@Override
protected void chore() {
try {
ugi.checkTGTAndReloginFromKeytab();
} catch (IOException e) {
LOG.error("Got exception while trying to refresh credentials: " + e.getMessage(), e);
}
}
};
return refreshCredentials;
}
use of org.apache.hadoop.hbase.security.UserProvider in project hbase by apache.
the class TableAuthManager method initGlobal.
/**
* Returns a new {@code PermissionCache} initialized with permission assignments
* from the {@code hbase.superuser} configuration key.
*/
private PermissionCache<Permission> initGlobal(Configuration conf) throws IOException {
UserProvider userProvider = UserProvider.instantiate(conf);
User user = userProvider.getCurrent();
if (user == null) {
throw new IOException("Unable to obtain the current user, " + "authorization checks for internal operations will not work correctly!");
}
PermissionCache<Permission> newCache = new PermissionCache<>();
String currentUser = user.getShortName();
// the system user is always included
List<String> superusers = Lists.asList(currentUser, conf.getStrings(Superusers.SUPERUSER_CONF_KEY, new String[0]));
if (superusers != null) {
for (String name : superusers) {
if (AuthUtil.isGroupPrincipal(name)) {
newCache.putGroup(AuthUtil.getGroupName(name), new Permission(Permission.Action.values()));
} else {
newCache.putUser(name, new Permission(Permission.Action.values()));
}
}
}
return newCache;
}
use of org.apache.hadoop.hbase.security.UserProvider in project hbase by apache.
the class HBaseFsck method preCheckPermission.
private void preCheckPermission() throws IOException, AccessDeniedException {
if (shouldIgnorePreCheckPermission()) {
return;
}
Path hbaseDir = FSUtils.getRootDir(getConf());
FileSystem fs = hbaseDir.getFileSystem(getConf());
UserProvider userProvider = UserProvider.instantiate(getConf());
UserGroupInformation ugi = userProvider.getCurrent().getUGI();
FileStatus[] files = fs.listStatus(hbaseDir);
for (FileStatus file : files) {
try {
FSUtils.checkAccess(ugi, file, FsAction.WRITE);
} catch (AccessDeniedException ace) {
LOG.warn("Got AccessDeniedException when preCheckPermission ", ace);
errors.reportError(ERROR_CODE.WRONG_USAGE, "Current user " + ugi.getUserName() + " does not have write perms to " + file.getPath() + ". Please rerun hbck as hdfs user " + file.getOwner());
throw ace;
}
}
}
use of org.apache.hadoop.hbase.security.UserProvider in project hbase by apache.
the class RESTServer method main.
/**
* The main method for the HBase rest server.
* @param args command-line arguments
* @throws Exception exception
*/
public static void main(String[] args) throws Exception {
LOG.info("***** STARTING service '" + RESTServer.class.getSimpleName() + "' *****");
VersionInfo.logVersion();
Configuration conf = HBaseConfiguration.create();
UserProvider userProvider = UserProvider.instantiate(conf);
Pair<FilterHolder, Class<? extends ServletContainer>> pair = loginServerPrincipal(userProvider, conf);
FilterHolder authFilter = pair.getFirst();
RESTServlet servlet = RESTServlet.getInstance(conf, userProvider);
parseCommandLine(args, servlet);
// set up the Jersey servlet container for Jetty
ResourceConfig application = new ResourceConfig().packages("org.apache.hadoop.hbase.rest").register(Jackson1Feature.class);
ServletHolder sh = new ServletHolder(new ServletContainer(application));
// Set the default max thread number to 100 to limit
// the number of concurrent requests so that REST server doesn't OOM easily.
// Jetty set the default max thread number to 250, if we don't set it.
//
// Our default min thread number 2 is the same as that used by Jetty.
int maxThreads = servlet.getConfiguration().getInt(REST_THREAD_POOL_THREADS_MAX, 100);
int minThreads = servlet.getConfiguration().getInt(REST_THREAD_POOL_THREADS_MIN, 2);
// Use the default queue (unbounded with Jetty 9.3) if the queue size is negative, otherwise use
// bounded {@link ArrayBlockingQueue} with the given size
int queueSize = servlet.getConfiguration().getInt(REST_THREAD_POOL_TASK_QUEUE_SIZE, -1);
int idleTimeout = servlet.getConfiguration().getInt(REST_THREAD_POOL_THREAD_IDLE_TIMEOUT, 60000);
QueuedThreadPool threadPool = queueSize > 0 ? new QueuedThreadPool(maxThreads, minThreads, idleTimeout, new ArrayBlockingQueue<>(queueSize)) : new QueuedThreadPool(maxThreads, minThreads, idleTimeout);
Server server = new Server(threadPool);
// Setup JMX
MBeanContainer mbContainer = new MBeanContainer(ManagementFactory.getPlatformMBeanServer());
server.addEventListener(mbContainer);
server.addBean(mbContainer);
String host = servlet.getConfiguration().get("hbase.rest.host", "0.0.0.0");
int servicePort = servlet.getConfiguration().getInt("hbase.rest.port", 8080);
HttpConfiguration httpConfig = new HttpConfiguration();
httpConfig.setSecureScheme("https");
httpConfig.setSecurePort(servicePort);
httpConfig.setSendServerVersion(false);
httpConfig.setSendDateHeader(false);
ServerConnector serverConnector;
if (conf.getBoolean(REST_SSL_ENABLED, false)) {
HttpConfiguration httpsConfig = new HttpConfiguration(httpConfig);
httpsConfig.addCustomizer(new SecureRequestCustomizer());
SslContextFactory sslCtxFactory = new SslContextFactory();
String keystore = conf.get(REST_SSL_KEYSTORE_STORE);
String password = HBaseConfiguration.getPassword(conf, REST_SSL_KEYSTORE_PASSWORD, null);
String keyPassword = HBaseConfiguration.getPassword(conf, REST_SSL_KEYSTORE_KEYPASSWORD, password);
sslCtxFactory.setKeyStorePath(keystore);
sslCtxFactory.setKeyStorePassword(password);
sslCtxFactory.setKeyManagerPassword(keyPassword);
String[] excludeCiphers = servlet.getConfiguration().getStrings(REST_SSL_EXCLUDE_CIPHER_SUITES, ArrayUtils.EMPTY_STRING_ARRAY);
if (excludeCiphers.length != 0) {
sslCtxFactory.setExcludeCipherSuites(excludeCiphers);
}
String[] includeCiphers = servlet.getConfiguration().getStrings(REST_SSL_INCLUDE_CIPHER_SUITES, ArrayUtils.EMPTY_STRING_ARRAY);
if (includeCiphers.length != 0) {
sslCtxFactory.setIncludeCipherSuites(includeCiphers);
}
String[] excludeProtocols = servlet.getConfiguration().getStrings(REST_SSL_EXCLUDE_PROTOCOLS, ArrayUtils.EMPTY_STRING_ARRAY);
if (excludeProtocols.length != 0) {
sslCtxFactory.setExcludeProtocols(excludeProtocols);
}
String[] includeProtocols = servlet.getConfiguration().getStrings(REST_SSL_INCLUDE_PROTOCOLS, ArrayUtils.EMPTY_STRING_ARRAY);
if (includeProtocols.length != 0) {
sslCtxFactory.setIncludeProtocols(includeProtocols);
}
serverConnector = new ServerConnector(server, new SslConnectionFactory(sslCtxFactory, HttpVersion.HTTP_1_1.toString()), new HttpConnectionFactory(httpsConfig));
} else {
serverConnector = new ServerConnector(server, new HttpConnectionFactory(httpConfig));
}
int acceptQueueSize = servlet.getConfiguration().getInt(REST_CONNECTOR_ACCEPT_QUEUE_SIZE, -1);
if (acceptQueueSize >= 0) {
serverConnector.setAcceptQueueSize(acceptQueueSize);
}
serverConnector.setPort(servicePort);
serverConnector.setHost(host);
server.addConnector(serverConnector);
server.setStopAtShutdown(true);
// set up context
ServletContextHandler ctxHandler = new ServletContextHandler(server, "/", ServletContextHandler.SESSIONS);
ctxHandler.addServlet(sh, PATH_SPEC_ANY);
if (authFilter != null) {
ctxHandler.addFilter(authFilter, PATH_SPEC_ANY, EnumSet.of(DispatcherType.REQUEST));
}
// Load filters from configuration.
String[] filterClasses = servlet.getConfiguration().getStrings(FILTER_CLASSES, ArrayUtils.EMPTY_STRING_ARRAY);
for (String filter : filterClasses) {
filter = filter.trim();
ctxHandler.addFilter(filter, PATH_SPEC_ANY, EnumSet.of(DispatcherType.REQUEST));
}
addCSRFFilter(ctxHandler, conf);
HttpServerUtil.constrainHttpMethods(ctxHandler);
// Put up info server.
int port = conf.getInt("hbase.rest.info.port", 8085);
if (port >= 0) {
conf.setLong("startcode", System.currentTimeMillis());
String a = conf.get("hbase.rest.info.bindAddress", "0.0.0.0");
InfoServer infoServer = new InfoServer("rest", a, port, false, conf);
infoServer.setAttribute("hbase.conf", conf);
infoServer.start();
}
// start server
server.start();
server.join();
LOG.info("***** STOPPING service '" + RESTServer.class.getSimpleName() + "' *****");
}
Aggregations