Search in sources :

Example 1 with DataEncryptionKey

use of org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey in project hadoop by apache.

the class TestSaslDataTransfer method TestPeerFromSocketAndKeyReadTimeout.

/**
   * Verifies that peerFromSocketAndKey honors socket read timeouts.
   */
@Test(timeout = 60000)
public void TestPeerFromSocketAndKeyReadTimeout() throws Exception {
    HdfsConfiguration conf = createSecureConfig("authentication,integrity,privacy");
    AtomicBoolean fallbackToSimpleAuth = new AtomicBoolean(false);
    SaslDataTransferClient saslClient = new SaslDataTransferClient(conf, DataTransferSaslUtil.getSaslPropertiesResolver(conf), TrustedChannelResolver.getInstance(conf), fallbackToSimpleAuth);
    DatanodeID fakeDatanodeId = new DatanodeID("127.0.0.1", "localhost", "beefbeef-beef-beef-beef-beefbeefbeef", 1, 2, 3, 4);
    DataEncryptionKeyFactory dataEncKeyFactory = new DataEncryptionKeyFactory() {

        @Override
        public DataEncryptionKey newDataEncryptionKey() {
            return new DataEncryptionKey(123, "456", new byte[8], new byte[8], 1234567, "fakeAlgorithm");
        }
    };
    ServerSocket serverSocket = null;
    Socket socket = null;
    try {
        serverSocket = new ServerSocket(0, -1);
        socket = new Socket(serverSocket.getInetAddress(), serverSocket.getLocalPort());
        Peer peer = DFSUtilClient.peerFromSocketAndKey(saslClient, socket, dataEncKeyFactory, new Token(), fakeDatanodeId, 1);
        peer.close();
        Assert.fail("Expected DFSClient#peerFromSocketAndKey to time out.");
    } catch (SocketTimeoutException e) {
        GenericTestUtils.assertExceptionContains("Read timed out", e);
    } finally {
        IOUtils.cleanup(null, socket, serverSocket);
    }
}
Also used : AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) DatanodeID(org.apache.hadoop.hdfs.protocol.DatanodeID) DataEncryptionKey(org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey) SocketTimeoutException(java.net.SocketTimeoutException) Peer(org.apache.hadoop.hdfs.net.Peer) ServerSocket(java.net.ServerSocket) Token(org.apache.hadoop.security.token.Token) HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) Socket(java.net.Socket) ServerSocket(java.net.ServerSocket) Test(org.junit.Test)

Example 2 with DataEncryptionKey

use of org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey in project hadoop by apache.

the class ClientNamenodeProtocolServerSideTranslatorPB method getDataEncryptionKey.

@Override
public GetDataEncryptionKeyResponseProto getDataEncryptionKey(RpcController controller, GetDataEncryptionKeyRequestProto request) throws ServiceException {
    try {
        GetDataEncryptionKeyResponseProto.Builder builder = GetDataEncryptionKeyResponseProto.newBuilder();
        DataEncryptionKey encryptionKey = server.getDataEncryptionKey();
        if (encryptionKey != null) {
            builder.setDataEncryptionKey(PBHelperClient.convert(encryptionKey));
        }
        return builder.build();
    } catch (IOException e) {
        throw new ServiceException(e);
    }
}
Also used : DataEncryptionKey(org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey) ServiceException(com.google.protobuf.ServiceException) GetDataEncryptionKeyResponseProto(org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetDataEncryptionKeyResponseProto) IOException(java.io.IOException)

Example 3 with DataEncryptionKey

use of org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey in project hbase by apache.

the class FanOutOneBlockAsyncDFSOutputSaslHelper method trySaslNegotiate.

static void trySaslNegotiate(Configuration conf, Channel channel, DatanodeInfo dnInfo, int timeoutMs, DFSClient client, Token<BlockTokenIdentifier> accessToken, Promise<Void> saslPromise) throws IOException {
    SaslDataTransferClient saslClient = client.getSaslDataTransferClient();
    SaslPropertiesResolver saslPropsResolver = SASL_ADAPTOR.getSaslPropsResolver(saslClient);
    TrustedChannelResolver trustedChannelResolver = SASL_ADAPTOR.getTrustedChannelResolver(saslClient);
    AtomicBoolean fallbackToSimpleAuth = SASL_ADAPTOR.getFallbackToSimpleAuth(saslClient);
    InetAddress addr = ((InetSocketAddress) channel.remoteAddress()).getAddress();
    if (trustedChannelResolver.isTrusted() || trustedChannelResolver.isTrusted(addr)) {
        saslPromise.trySuccess(null);
        return;
    }
    DataEncryptionKey encryptionKey = client.newDataEncryptionKey();
    if (encryptionKey != null) {
        if (LOG.isDebugEnabled()) {
            LOG.debug("SASL client doing encrypted handshake for addr = " + addr + ", datanodeId = " + dnInfo);
        }
        doSaslNegotiation(conf, channel, timeoutMs, getUserNameFromEncryptionKey(encryptionKey), encryptionKeyToPassword(encryptionKey.encryptionKey), createSaslPropertiesForEncryption(encryptionKey.encryptionAlgorithm), saslPromise);
    } else if (!UserGroupInformation.isSecurityEnabled()) {
        if (LOG.isDebugEnabled()) {
            LOG.debug("SASL client skipping handshake in unsecured configuration for addr = " + addr + ", datanodeId = " + dnInfo);
        }
        saslPromise.trySuccess(null);
    } else if (dnInfo.getXferPort() < 1024) {
        if (LOG.isDebugEnabled()) {
            LOG.debug("SASL client skipping handshake in secured configuration with " + "privileged port for addr = " + addr + ", datanodeId = " + dnInfo);
        }
        saslPromise.trySuccess(null);
    } else if (fallbackToSimpleAuth != null && fallbackToSimpleAuth.get()) {
        if (LOG.isDebugEnabled()) {
            LOG.debug("SASL client skipping handshake in secured configuration with " + "unsecured cluster for addr = " + addr + ", datanodeId = " + dnInfo);
        }
        saslPromise.trySuccess(null);
    } else if (saslPropsResolver != null) {
        if (LOG.isDebugEnabled()) {
            LOG.debug("SASL client doing general handshake for addr = " + addr + ", datanodeId = " + dnInfo);
        }
        doSaslNegotiation(conf, channel, timeoutMs, buildUsername(accessToken), buildClientPassword(accessToken), saslPropsResolver.getClientProperties(addr), saslPromise);
    } else {
        // edge case.
        if (LOG.isDebugEnabled()) {
            LOG.debug("SASL client skipping handshake in secured configuration with no SASL " + "protection configured for addr = " + addr + ", datanodeId = " + dnInfo);
        }
        saslPromise.trySuccess(null);
    }
}
Also used : AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) DataEncryptionKey(org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey) InetSocketAddress(java.net.InetSocketAddress) TrustedChannelResolver(org.apache.hadoop.hdfs.protocol.datatransfer.TrustedChannelResolver) SaslPropertiesResolver(org.apache.hadoop.security.SaslPropertiesResolver) InetAddress(java.net.InetAddress) SaslDataTransferClient(org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient)

Example 4 with DataEncryptionKey

use of org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey in project hadoop by apache.

the class SaslDataTransferClient method newSocketSend.

/**
   * Sends client SASL negotiation for a newly allocated socket if required.
   *
   * @param socket connection socket
   * @param underlyingOut connection output stream
   * @param underlyingIn connection input stream
   * @param encryptionKeyFactory for creation of an encryption key
   * @param accessToken connection block access token
   * @param datanodeId ID of destination DataNode
   * @return new pair of streams, wrapped after SASL negotiation
   * @throws IOException for any error
   */
public IOStreamPair newSocketSend(Socket socket, OutputStream underlyingOut, InputStream underlyingIn, DataEncryptionKeyFactory encryptionKeyFactory, Token<BlockTokenIdentifier> accessToken, DatanodeID datanodeId) throws IOException {
    // The encryption key factory only returns a key if encryption is enabled.
    DataEncryptionKey encryptionKey = !trustedChannelResolver.isTrusted() ? encryptionKeyFactory.newDataEncryptionKey() : null;
    IOStreamPair ios = send(socket.getInetAddress(), underlyingOut, underlyingIn, encryptionKey, accessToken, datanodeId);
    return ios != null ? ios : new IOStreamPair(underlyingIn, underlyingOut);
}
Also used : DataEncryptionKey(org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey) IOStreamPair(org.apache.hadoop.hdfs.protocol.datatransfer.IOStreamPair)

Example 5 with DataEncryptionKey

use of org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey in project hadoop by apache.

the class TestEncryptedTransfer method testLongLivedClientPipelineRecovery.

@Test
public void testLongLivedClientPipelineRecovery() throws IOException, InterruptedException, TimeoutException {
    if (resolverClazz != null) {
        // TestTrustedChannelResolver does not use encryption keys.
        return;
    }
    // use 4 datanodes to make sure that after 1 data node is stopped,
    // client only retries establishing pipeline with the 4th node.
    int numDataNodes = 4;
    // do not consider load factor when selecting a data node
    conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_REDUNDANCY_CONSIDERLOAD_KEY, false);
    setEncryptionConfigKeys();
    cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDataNodes).build();
    fs = getFileSystem(conf);
    DFSClient client = DFSClientAdapter.getDFSClient((DistributedFileSystem) fs);
    DFSClient spyClient = Mockito.spy(client);
    DFSClientAdapter.setDFSClient((DistributedFileSystem) fs, spyClient);
    writeTestDataToFile(fs);
    BlockTokenSecretManager btsm = cluster.getNamesystem().getBlockManager().getBlockTokenSecretManager();
    // Reduce key update interval and token life for testing.
    btsm.setKeyUpdateIntervalForTesting(2 * 1000);
    btsm.setTokenLifetime(2 * 1000);
    btsm.clearAllKeysForTesting();
    // Wait until the encryption key becomes invalid.
    LOG.info("Wait until encryption keys become invalid...");
    DataEncryptionKey encryptionKey = spyClient.getEncryptionKey();
    List<DataNode> dataNodes = cluster.getDataNodes();
    for (DataNode dn : dataNodes) {
        GenericTestUtils.waitFor(new Supplier<Boolean>() {

            @Override
            public Boolean get() {
                return !dn.getBlockPoolTokenSecretManager().get(encryptionKey.blockPoolId).hasKey(encryptionKey.keyId);
            }
        }, 100, 30 * 1000);
    }
    LOG.info("The encryption key is invalid on all nodes now.");
    try (FSDataOutputStream out = fs.append(TEST_PATH)) {
        DFSOutputStream dfstream = (DFSOutputStream) out.getWrappedStream();
        // shut down the first datanode in the pipeline.
        DatanodeInfo[] targets = dfstream.getPipeline();
        cluster.stopDataNode(targets[0].getXferAddr());
        // write data to induce pipeline recovery
        out.write(PLAIN_TEXT.getBytes());
        out.hflush();
        assertFalse("The first datanode in the pipeline was not replaced.", Arrays.asList(dfstream.getPipeline()).contains(targets[0]));
    }
    // verify that InvalidEncryptionKeyException is handled properly
    Mockito.verify(spyClient, times(1)).clearDataEncryptionKey();
}
Also used : DataEncryptionKey(org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey) DatanodeInfo(org.apache.hadoop.hdfs.protocol.DatanodeInfo) DataNode(org.apache.hadoop.hdfs.server.datanode.DataNode) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) BlockTokenSecretManager(org.apache.hadoop.hdfs.security.token.block.BlockTokenSecretManager) Test(org.junit.Test)

Aggregations

DataEncryptionKey (org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey)5 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)2 Test (org.junit.Test)2 ServiceException (com.google.protobuf.ServiceException)1 IOException (java.io.IOException)1 InetAddress (java.net.InetAddress)1 InetSocketAddress (java.net.InetSocketAddress)1 ServerSocket (java.net.ServerSocket)1 Socket (java.net.Socket)1 SocketTimeoutException (java.net.SocketTimeoutException)1 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)1 HdfsConfiguration (org.apache.hadoop.hdfs.HdfsConfiguration)1 Peer (org.apache.hadoop.hdfs.net.Peer)1 DatanodeID (org.apache.hadoop.hdfs.protocol.DatanodeID)1 DatanodeInfo (org.apache.hadoop.hdfs.protocol.DatanodeInfo)1 IOStreamPair (org.apache.hadoop.hdfs.protocol.datatransfer.IOStreamPair)1 TrustedChannelResolver (org.apache.hadoop.hdfs.protocol.datatransfer.TrustedChannelResolver)1 SaslDataTransferClient (org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient)1 GetDataEncryptionKeyResponseProto (org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetDataEncryptionKeyResponseProto)1 BlockTokenSecretManager (org.apache.hadoop.hdfs.security.token.block.BlockTokenSecretManager)1