Search in sources :

Example 1 with KerberosToken

use of org.apache.accumulo.core.client.security.tokens.KerberosToken in project accumulo by apache.

the class DeleteIT method test.

@Test
public void test() throws Exception {
    Connector c = getConnector();
    String tableName = getUniqueNames(1)[0];
    c.tableOperations().create(tableName);
    AuthenticationToken token = getAdminToken();
    if (token instanceof KerberosToken) {
        deleteTest(c, getCluster(), getAdminPrincipal(), null, tableName, getAdminUser().getKeytab().getAbsolutePath());
    } else if (token instanceof PasswordToken) {
        PasswordToken passwdToken = (PasswordToken) token;
        deleteTest(c, getCluster(), getAdminPrincipal(), new String(passwdToken.getPassword(), UTF_8), tableName, null);
    }
}
Also used : Connector(org.apache.accumulo.core.client.Connector) PasswordToken(org.apache.accumulo.core.client.security.tokens.PasswordToken) AuthenticationToken(org.apache.accumulo.core.client.security.tokens.AuthenticationToken) KerberosToken(org.apache.accumulo.core.client.security.tokens.KerberosToken) Test(org.junit.Test)

Example 2 with KerberosToken

use of org.apache.accumulo.core.client.security.tokens.KerberosToken in project accumulo by apache.

the class KerberosTokenEmbeddedKDCTest method test.

@Test
public void test() throws Exception {
    String user = testName.getMethodName();
    File userKeytab = new File(kdc.getKeytabDir(), user + ".keytab");
    if (userKeytab.exists() && !userKeytab.delete()) {
        log.warn("Unable to delete {}", userKeytab);
    }
    kdc.createPrincipal(userKeytab, user);
    user = kdc.qualifyUser(user);
    UserGroupInformation.loginUserFromKeytab(user, userKeytab.getAbsolutePath());
    KerberosToken token = new KerberosToken();
    assertEquals(user, token.getPrincipal());
    // Use the long-hand constructor, should be equivalent to short-hand
    KerberosToken tokenWithPrinc = new KerberosToken(user);
    assertEquals(token, tokenWithPrinc);
    assertEquals(token.hashCode(), tokenWithPrinc.hashCode());
}
Also used : KerberosToken(org.apache.accumulo.core.client.security.tokens.KerberosToken) File(java.io.File) Test(org.junit.Test)

Example 3 with KerberosToken

use of org.apache.accumulo.core.client.security.tokens.KerberosToken in project accumulo by apache.

the class KerberosTokenEmbeddedKDCTest method testDestroy.

@Test
public void testDestroy() throws Exception {
    String user = testName.getMethodName();
    File userKeytab = new File(kdc.getKeytabDir(), user + ".keytab");
    if (userKeytab.exists() && !userKeytab.delete()) {
        log.warn("Unable to delete {}", userKeytab);
    }
    kdc.createPrincipal(userKeytab, user);
    user = kdc.qualifyUser(user);
    UserGroupInformation.loginUserFromKeytab(user, userKeytab.getAbsolutePath());
    KerberosToken token = new KerberosToken();
    assertEquals(user, token.getPrincipal());
    token.destroy();
    assertTrue(token.isDestroyed());
    assertNull(token.getPrincipal());
}
Also used : KerberosToken(org.apache.accumulo.core.client.security.tokens.KerberosToken) File(java.io.File) Test(org.junit.Test)

Example 4 with KerberosToken

use of org.apache.accumulo.core.client.security.tokens.KerberosToken in project accumulo by apache.

the class Proxy method createProxyServer.

public static ServerAddress createProxyServer(HostAndPort address, TProtocolFactory protocolFactory, Properties properties, ClientConfiguration clientConf) throws Exception {
    final int numThreads = Integer.parseInt(properties.getProperty(THRIFT_THREAD_POOL_SIZE_KEY, THRIFT_THREAD_POOL_SIZE_DEFAULT));
    final long maxFrameSize = ConfigurationTypeHelper.getFixedMemoryAsBytes(properties.getProperty(THRIFT_MAX_FRAME_SIZE_KEY, THRIFT_MAX_FRAME_SIZE_DEFAULT));
    final int simpleTimerThreadpoolSize = Integer.parseInt(Property.GENERAL_SIMPLETIMER_THREADPOOL_SIZE.getDefaultValue());
    // How frequently to try to resize the thread pool
    final long threadpoolResizeInterval = 1000l * 5;
    // No timeout
    final long serverSocketTimeout = 0l;
    // Use the new hadoop metrics2 support
    final MetricsFactory metricsFactory = new MetricsFactory(false);
    final String serverName = "Proxy", threadName = "Accumulo Thrift Proxy";
    // create the implementation of the proxy interface
    ProxyServer impl = new ProxyServer(properties);
    // Wrap the implementation -- translate some exceptions
    AccumuloProxy.Iface wrappedImpl = RpcWrapper.service(impl);
    // Create the processor from the implementation
    TProcessor processor = new AccumuloProxy.Processor<>(wrappedImpl);
    // Get the type of thrift server to instantiate
    final String serverTypeStr = properties.getProperty(THRIFT_SERVER_TYPE, THRIFT_SERVER_TYPE_DEFAULT);
    ThriftServerType serverType = DEFAULT_SERVER_TYPE;
    if (!THRIFT_SERVER_TYPE_DEFAULT.equals(serverTypeStr)) {
        serverType = ThriftServerType.get(serverTypeStr);
    }
    SslConnectionParams sslParams = null;
    SaslServerConnectionParams saslParams = null;
    switch(serverType) {
        case SSL:
            sslParams = SslConnectionParams.forClient(ClientContext.convertClientConfig(clientConf));
            break;
        case SASL:
            if (!clientConf.hasSasl()) {
                // ACCUMULO-3651 Changed level to error and added FATAL to message for slf4j capability
                log.error("FATAL: SASL thrift server was requested but it is disabled in client configuration");
                throw new RuntimeException("SASL is not enabled in configuration");
            }
            // Kerberos needs to be enabled to use it
            if (!UserGroupInformation.isSecurityEnabled()) {
                // ACCUMULO-3651 Changed level to error and added FATAL to message for slf4j capability
                log.error("FATAL: Hadoop security is not enabled");
                throw new RuntimeException();
            }
            // Login via principal and keytab
            final String kerberosPrincipal = properties.getProperty(KERBEROS_PRINCIPAL, ""), kerberosKeytab = properties.getProperty(KERBEROS_KEYTAB, "");
            if (StringUtils.isBlank(kerberosPrincipal) || StringUtils.isBlank(kerberosKeytab)) {
                // ACCUMULO-3651 Changed level to error and added FATAL to message for slf4j capability
                log.error("FATAL: Kerberos principal and keytab must be provided");
                throw new RuntimeException();
            }
            UserGroupInformation.loginUserFromKeytab(kerberosPrincipal, kerberosKeytab);
            UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
            log.info("Logged in as {}", ugi.getUserName());
            // The kerberosPrimary set in the SASL server needs to match the principal we're logged in as.
            final String shortName = ugi.getShortUserName();
            log.info("Setting server primary to {}", shortName);
            clientConf.setProperty(ClientProperty.KERBEROS_SERVER_PRIMARY, shortName);
            KerberosToken token = new KerberosToken();
            saslParams = new SaslServerConnectionParams(clientConf, token, null);
            processor = new UGIAssumingProcessor(processor);
            break;
        default:
            // nothing to do -- no extra configuration necessary
            break;
    }
    // Hook up support for tracing for thrift calls
    TimedProcessor timedProcessor = new TimedProcessor(metricsFactory, processor, serverName, threadName);
    // Create the thrift server with our processor and properties
    ServerAddress serverAddr = TServerUtils.startTServer(serverType, timedProcessor, protocolFactory, serverName, threadName, numThreads, simpleTimerThreadpoolSize, threadpoolResizeInterval, maxFrameSize, sslParams, saslParams, serverSocketTimeout, address);
    return serverAddr;
}
Also used : SaslServerConnectionParams(org.apache.accumulo.server.rpc.SaslServerConnectionParams) AccumuloProxy(org.apache.accumulo.proxy.thrift.AccumuloProxy) UGIAssumingProcessor(org.apache.accumulo.server.rpc.UGIAssumingProcessor) TimedProcessor(org.apache.accumulo.server.rpc.TimedProcessor) TProcessor(org.apache.thrift.TProcessor) UGIAssumingProcessor(org.apache.accumulo.server.rpc.UGIAssumingProcessor) KerberosToken(org.apache.accumulo.core.client.security.tokens.KerberosToken) ServerAddress(org.apache.accumulo.server.rpc.ServerAddress) SslConnectionParams(org.apache.accumulo.core.rpc.SslConnectionParams) ThriftServerType(org.apache.accumulo.server.rpc.ThriftServerType) TProcessor(org.apache.thrift.TProcessor) MetricsFactory(org.apache.accumulo.server.metrics.MetricsFactory) TimedProcessor(org.apache.accumulo.server.rpc.TimedProcessor) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Example 5 with KerberosToken

use of org.apache.accumulo.core.client.security.tokens.KerberosToken in project accumulo by apache.

the class KerberosIT method testUserPrivilegesForTable.

@Test
public void testUserPrivilegesForTable() throws Exception {
    String user1 = testName.getMethodName();
    final File user1Keytab = new File(kdc.getKeytabDir(), user1 + ".keytab");
    if (user1Keytab.exists() && !user1Keytab.delete()) {
        log.warn("Unable to delete {}", user1Keytab);
    }
    // Create some new users -- cannot contain realm
    kdc.createPrincipal(user1Keytab, user1);
    final String qualifiedUser1 = kdc.qualifyUser(user1);
    // Log in as user1
    UserGroupInformation ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(qualifiedUser1, user1Keytab.getAbsolutePath());
    log.info("Logged in as {}", user1);
    ugi.doAs(new PrivilegedExceptionAction<Void>() {

        @Override
        public Void run() throws Exception {
            // Indirectly creates this user when we use it
            Connector conn = mac.getConnector(qualifiedUser1, new KerberosToken());
            log.info("Created connector as {}", qualifiedUser1);
            // The new user should have no system permissions
            for (SystemPermission perm : SystemPermission.values()) {
                assertFalse(conn.securityOperations().hasSystemPermission(qualifiedUser1, perm));
            }
            return null;
        }
    });
    final String table = testName.getMethodName() + "_user_table";
    final String viz = "viz";
    ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(rootUser.getPrincipal(), rootUser.getKeytab().getAbsolutePath());
    ugi.doAs(new PrivilegedExceptionAction<Void>() {

        @Override
        public Void run() throws Exception {
            Connector conn = mac.getConnector(rootUser.getPrincipal(), new KerberosToken());
            conn.tableOperations().create(table);
            // Give our unprivileged user permission on the table we made for them
            conn.securityOperations().grantTablePermission(qualifiedUser1, table, TablePermission.READ);
            conn.securityOperations().grantTablePermission(qualifiedUser1, table, TablePermission.WRITE);
            conn.securityOperations().grantTablePermission(qualifiedUser1, table, TablePermission.ALTER_TABLE);
            conn.securityOperations().grantTablePermission(qualifiedUser1, table, TablePermission.DROP_TABLE);
            conn.securityOperations().changeUserAuthorizations(qualifiedUser1, new Authorizations(viz));
            return null;
        }
    });
    // Switch back to the original user
    ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(qualifiedUser1, user1Keytab.getAbsolutePath());
    ugi.doAs(new PrivilegedExceptionAction<Void>() {

        @Override
        public Void run() throws Exception {
            Connector conn = mac.getConnector(qualifiedUser1, new KerberosToken());
            // Make sure we can actually use the table we made
            // Write data
            final long ts = 1000l;
            BatchWriter bw = conn.createBatchWriter(table, new BatchWriterConfig());
            Mutation m = new Mutation("a");
            m.put("b", "c", new ColumnVisibility(viz.getBytes()), ts, "d");
            bw.addMutation(m);
            bw.close();
            // Compact
            conn.tableOperations().compact(table, new CompactionConfig().setWait(true).setFlush(true));
            // Alter
            conn.tableOperations().setProperty(table, Property.TABLE_BLOOM_ENABLED.getKey(), "true");
            // Read (and proper authorizations)
            try (Scanner s = conn.createScanner(table, new Authorizations(viz))) {
                Iterator<Entry<Key, Value>> iter = s.iterator();
                assertTrue("No results from iterator", iter.hasNext());
                Entry<Key, Value> entry = iter.next();
                assertEquals(new Key("a", "b", "c", viz, ts), entry.getKey());
                assertEquals(new Value("d".getBytes()), entry.getValue());
                assertFalse("Had more results from iterator", iter.hasNext());
                return null;
            }
        }
    });
}
Also used : Connector(org.apache.accumulo.core.client.Connector) BatchScanner(org.apache.accumulo.core.client.BatchScanner) Scanner(org.apache.accumulo.core.client.Scanner) Authorizations(org.apache.accumulo.core.security.Authorizations) KerberosToken(org.apache.accumulo.core.client.security.tokens.KerberosToken) TableNotFoundException(org.apache.accumulo.core.client.TableNotFoundException) TableExistsException(org.apache.accumulo.core.client.TableExistsException) AccumuloSecurityException(org.apache.accumulo.core.client.AccumuloSecurityException) UndeclaredThrowableException(java.lang.reflect.UndeclaredThrowableException) AccumuloException(org.apache.accumulo.core.client.AccumuloException) SystemPermission(org.apache.accumulo.core.security.SystemPermission) Entry(java.util.Map.Entry) CompactionConfig(org.apache.accumulo.core.client.admin.CompactionConfig) Iterator(java.util.Iterator) Value(org.apache.accumulo.core.data.Value) BatchWriterConfig(org.apache.accumulo.core.client.BatchWriterConfig) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Mutation(org.apache.accumulo.core.data.Mutation) ColumnVisibility(org.apache.accumulo.core.security.ColumnVisibility) File(java.io.File) Key(org.apache.accumulo.core.data.Key) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) Test(org.junit.Test)

Aggregations

KerberosToken (org.apache.accumulo.core.client.security.tokens.KerberosToken)46 Connector (org.apache.accumulo.core.client.Connector)28 Test (org.junit.Test)28 AuthenticationToken (org.apache.accumulo.core.client.security.tokens.AuthenticationToken)22 PasswordToken (org.apache.accumulo.core.client.security.tokens.PasswordToken)19 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)19 AccumuloSecurityException (org.apache.accumulo.core.client.AccumuloSecurityException)18 IOException (java.io.IOException)15 AccumuloException (org.apache.accumulo.core.client.AccumuloException)15 TableNotFoundException (org.apache.accumulo.core.client.TableNotFoundException)14 ClientConfiguration (org.apache.accumulo.core.client.ClientConfiguration)12 TableExistsException (org.apache.accumulo.core.client.TableExistsException)12 DelegationTokenConfig (org.apache.accumulo.core.client.admin.DelegationTokenConfig)12 File (java.io.File)10 UndeclaredThrowableException (java.lang.reflect.UndeclaredThrowableException)10 ClusterUser (org.apache.accumulo.cluster.ClusterUser)8 DelegationTokenImpl (org.apache.accumulo.core.client.impl.DelegationTokenImpl)8 Instance (org.apache.accumulo.core.client.Instance)6 AuthenticationTokenIdentifier (org.apache.accumulo.core.client.impl.AuthenticationTokenIdentifier)6 ClusterControl (org.apache.accumulo.cluster.ClusterControl)4