Search in sources :

Example 1 with ConfigurationFactory

use of org.apache.phoenix.query.ConfigurationFactory in project phoenix by apache.

the class SecureUserConnectionsTest method setupKdc.

@BeforeClass
public static void setupKdc() throws Exception {
    ensureIsEmptyDirectory(KDC_DIR);
    ensureIsEmptyDirectory(KEYTAB_DIR);
    // Create and start the KDC. MiniKDC appears to have a race condition in how it does
    // port allocation (with apache-ds). See PHOENIX-3287.
    boolean started = false;
    for (int i = 0; !started && i < KDC_START_ATTEMPTS; i++) {
        Properties kdcConf = MiniKdc.createConf();
        kdcConf.put(MiniKdc.DEBUG, true);
        KDC = new MiniKdc(kdcConf, KDC_DIR);
        try {
            KDC.start();
            started = true;
        } catch (Exception e) {
            LOG.warn("PHOENIX-3287: Failed to start KDC, retrying..", e);
        }
    }
    assertTrue("The embedded KDC failed to start successfully after " + KDC_START_ATTEMPTS + " attempts.", started);
    createUsers(NUM_USERS);
    createServiceUsers(NUM_USERS);
    final Configuration conf = new Configuration(false);
    conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
    conf.set(User.HBASE_SECURITY_CONF_KEY, "kerberos");
    conf.setBoolean(User.HBASE_SECURITY_AUTHORIZATION_CONF_KEY, true);
    UserGroupInformation.setConfiguration(conf);
    // Clear the cached singletons so we can inject our own.
    InstanceResolver.clearSingletons();
    // Make sure the ConnectionInfo doesn't try to pull a default Configuration
    InstanceResolver.getSingleton(ConfigurationFactory.class, new ConfigurationFactory() {

        @Override
        public Configuration getConfiguration() {
            return conf;
        }

        @Override
        public Configuration getConfiguration(Configuration confToClone) {
            Configuration copy = new Configuration(conf);
            copy.addResource(confToClone);
            return copy;
        }
    });
    updateDefaultRealm();
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) MiniKdc(org.apache.hadoop.minikdc.MiniKdc) ConfigurationFactory(org.apache.phoenix.query.ConfigurationFactory) Properties(java.util.Properties) IOException(java.io.IOException) BeforeClass(org.junit.BeforeClass)

Example 2 with ConfigurationFactory

use of org.apache.phoenix.query.ConfigurationFactory in project phoenix by apache.

the class HttpParamImpersonationQueryServerIT method setUp.

/**
 * Setup and start kerberos, hbase
 */
@BeforeClass
public static void setUp() throws Exception {
    final Configuration conf = UTIL.getConfiguration();
    // Ensure the dirs we need are created/empty
    ensureIsEmptyDirectory(TEMP_DIR);
    ensureIsEmptyDirectory(KEYTAB_DIR);
    KEYTAB = new File(KEYTAB_DIR, "test.keytab");
    // Start a MiniKDC
    KDC = UTIL.setupMiniKdc(KEYTAB);
    // Create a service principal and spnego principal in one keytab
    // NB. Due to some apparent limitations between HDFS and HBase in the same JVM, trying to
    // use separate identies for HBase and HDFS results in a GSS initiate error. The quick
    // solution is to just use a single "service" principal instead of "hbase" and "hdfs"
    // (or "dn" and "nn") per usual.
    KDC.createPrincipal(KEYTAB, SPNEGO_PRINCIPAL, PQS_PRINCIPAL, SERVICE_PRINCIPAL);
    // Start ZK by hand
    UTIL.startMiniZKCluster();
    // Create a number of unprivileged users
    createUsers(2);
    // Set configuration for HBase
    HBaseKerberosUtils.setPrincipalForTesting(SERVICE_PRINCIPAL + "@" + KDC.getRealm());
    HBaseKerberosUtils.setSecuredConfiguration(conf);
    setHdfsSecuredConfiguration(conf);
    UserGroupInformation.setConfiguration(conf);
    conf.setInt(HConstants.MASTER_PORT, 0);
    conf.setInt(HConstants.MASTER_INFO_PORT, 0);
    conf.setInt(HConstants.REGIONSERVER_PORT, 0);
    conf.setInt(HConstants.REGIONSERVER_INFO_PORT, 0);
    conf.setStrings(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, AccessController.class.getName());
    conf.setStrings(CoprocessorHost.REGIONSERVER_COPROCESSOR_CONF_KEY, AccessController.class.getName());
    conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, AccessController.class.getName(), TokenProvider.class.getName());
    // Secure Phoenix setup
    conf.set("phoenix.queryserver.kerberos.http.principal", SPNEGO_PRINCIPAL + "@" + KDC.getRealm());
    conf.set("phoenix.queryserver.http.keytab.file", KEYTAB.getAbsolutePath());
    conf.set("phoenix.queryserver.kerberos.principal", PQS_PRINCIPAL + "@" + KDC.getRealm());
    conf.set("phoenix.queryserver.keytab.file", KEYTAB.getAbsolutePath());
    conf.setBoolean(QueryServices.QUERY_SERVER_DISABLE_KERBEROS_LOGIN, true);
    conf.setInt(QueryServices.QUERY_SERVER_HTTP_PORT_ATTRIB, 0);
    // Required so that PQS can impersonate the end-users to HBase
    conf.set("hadoop.proxyuser.phoenixqs.groups", "*");
    conf.set("hadoop.proxyuser.phoenixqs.hosts", "*");
    // user1 is allowed to impersonate others, user2 is not
    conf.set("hadoop.proxyuser.user1.groups", "*");
    conf.set("hadoop.proxyuser.user1.hosts", "*");
    conf.setBoolean(QueryServices.QUERY_SERVER_WITH_REMOTEUSEREXTRACTOR_ATTRIB, true);
    // Clear the cached singletons so we can inject our own.
    InstanceResolver.clearSingletons();
    // Make sure the ConnectionInfo doesn't try to pull a default Configuration
    InstanceResolver.getSingleton(ConfigurationFactory.class, new ConfigurationFactory() {

        @Override
        public Configuration getConfiguration() {
            return conf;
        }

        @Override
        public Configuration getConfiguration(Configuration confToClone) {
            Configuration copy = new Configuration(conf);
            copy.addResource(confToClone);
            return copy;
        }
    });
    updateDefaultRealm();
    // Start HDFS
    UTIL.startMiniDFSCluster(1);
    // Use LocalHBaseCluster to avoid HBaseTestingUtility from doing something wrong
    // NB. I'm not actually sure what HTU does incorrect, but this was pulled from some test
    // classes in HBase itself. I couldn't get HTU to work myself (2017/07/06)
    Path rootdir = UTIL.getDataTestDirOnTestFS(HttpParamImpersonationQueryServerIT.class.getSimpleName());
    FSUtils.setRootDir(conf, rootdir);
    HBASE_CLUSTER = new LocalHBaseCluster(conf, 1);
    HBASE_CLUSTER.startup();
    // Then fork a thread with PQS in it.
    startQueryServer();
}
Also used : Path(org.apache.hadoop.fs.Path) TokenProvider(org.apache.hadoop.hbase.security.token.TokenProvider) AccessController(org.apache.hadoop.hbase.security.access.AccessController) Configuration(org.apache.hadoop.conf.Configuration) ConfigurationFactory(org.apache.phoenix.query.ConfigurationFactory) LocalHBaseCluster(org.apache.hadoop.hbase.LocalHBaseCluster) File(java.io.File) BeforeClass(org.junit.BeforeClass)

Example 3 with ConfigurationFactory

use of org.apache.phoenix.query.ConfigurationFactory in project phoenix by apache.

the class SecureQueryServerIT method setUp.

/**
 * Setup and start kerberos, hbase
 */
@BeforeClass
public static void setUp() throws Exception {
    final Configuration conf = UTIL.getConfiguration();
    // Ensure the dirs we need are created/empty
    ensureIsEmptyDirectory(TEMP_DIR);
    ensureIsEmptyDirectory(KEYTAB_DIR);
    KEYTAB = new File(KEYTAB_DIR, "test.keytab");
    // Start a MiniKDC
    KDC = UTIL.setupMiniKdc(KEYTAB);
    // Create a service principal and spnego principal in one keytab
    // NB. Due to some apparent limitations between HDFS and HBase in the same JVM, trying to
    // use separate identies for HBase and HDFS results in a GSS initiate error. The quick
    // solution is to just use a single "service" principal instead of "hbase" and "hdfs"
    // (or "dn" and "nn") per usual.
    KDC.createPrincipal(KEYTAB, SPNEGO_PRINCIPAL, PQS_PRINCIPAL, SERVICE_PRINCIPAL);
    // Start ZK by hand
    UTIL.startMiniZKCluster();
    // Create a number of unprivileged users
    createUsers(3);
    // Set configuration for HBase
    HBaseKerberosUtils.setPrincipalForTesting(SERVICE_PRINCIPAL + "@" + KDC.getRealm());
    HBaseKerberosUtils.setSecuredConfiguration(conf);
    setHdfsSecuredConfiguration(conf);
    UserGroupInformation.setConfiguration(conf);
    conf.setInt(HConstants.MASTER_PORT, 0);
    conf.setInt(HConstants.MASTER_INFO_PORT, 0);
    conf.setInt(HConstants.REGIONSERVER_PORT, 0);
    conf.setInt(HConstants.REGIONSERVER_INFO_PORT, 0);
    conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, TokenProvider.class.getName());
    // Secure Phoenix setup
    conf.set("phoenix.queryserver.kerberos.http.principal", SPNEGO_PRINCIPAL + "@" + KDC.getRealm());
    conf.set("phoenix.queryserver.http.keytab.file", KEYTAB.getAbsolutePath());
    conf.set("phoenix.queryserver.kerberos.principal", PQS_PRINCIPAL + "@" + KDC.getRealm());
    conf.set("phoenix.queryserver.keytab.file", KEYTAB.getAbsolutePath());
    conf.setBoolean(QueryServices.QUERY_SERVER_DISABLE_KERBEROS_LOGIN, true);
    conf.setInt(QueryServices.QUERY_SERVER_HTTP_PORT_ATTRIB, 0);
    // Required so that PQS can impersonate the end-users to HBase
    conf.set("hadoop.proxyuser.phoenixqs.groups", "*");
    conf.set("hadoop.proxyuser.phoenixqs.hosts", "*");
    // Clear the cached singletons so we can inject our own.
    InstanceResolver.clearSingletons();
    // Make sure the ConnectionInfo doesn't try to pull a default Configuration
    InstanceResolver.getSingleton(ConfigurationFactory.class, new ConfigurationFactory() {

        @Override
        public Configuration getConfiguration() {
            return conf;
        }

        @Override
        public Configuration getConfiguration(Configuration confToClone) {
            Configuration copy = new Configuration(conf);
            copy.addResource(confToClone);
            return copy;
        }
    });
    updateDefaultRealm();
    // Start HDFS
    UTIL.startMiniDFSCluster(1);
    // Use LocalHBaseCluster to avoid HBaseTestingUtility from doing something wrong
    // NB. I'm not actually sure what HTU does incorrect, but this was pulled from some test
    // classes in HBase itself. I couldn't get HTU to work myself (2017/07/06)
    Path rootdir = UTIL.getDataTestDirOnTestFS(SecureQueryServerIT.class.getSimpleName());
    FSUtils.setRootDir(conf, rootdir);
    HBASE_CLUSTER = new LocalHBaseCluster(conf, 1);
    HBASE_CLUSTER.startup();
    // Then fork a thread with PQS in it.
    startQueryServer();
}
Also used : Path(org.apache.hadoop.fs.Path) TokenProvider(org.apache.hadoop.hbase.security.token.TokenProvider) Configuration(org.apache.hadoop.conf.Configuration) ConfigurationFactory(org.apache.phoenix.query.ConfigurationFactory) LocalHBaseCluster(org.apache.hadoop.hbase.LocalHBaseCluster) File(java.io.File) BeforeClass(org.junit.BeforeClass)

Aggregations

Configuration (org.apache.hadoop.conf.Configuration)3 ConfigurationFactory (org.apache.phoenix.query.ConfigurationFactory)3 BeforeClass (org.junit.BeforeClass)3 File (java.io.File)2 Path (org.apache.hadoop.fs.Path)2 LocalHBaseCluster (org.apache.hadoop.hbase.LocalHBaseCluster)2 TokenProvider (org.apache.hadoop.hbase.security.token.TokenProvider)2 IOException (java.io.IOException)1 Properties (java.util.Properties)1 AccessController (org.apache.hadoop.hbase.security.access.AccessController)1 MiniKdc (org.apache.hadoop.minikdc.MiniKdc)1