Search in sources :

Example 96 with BlockID

use of org.apache.hadoop.hdds.client.BlockID in project ozone by apache.

the class TestContainerStateMachineIdempotency method testContainerStateMachineIdempotency.

@Test
public void testContainerStateMachineIdempotency() throws Exception {
    ContainerWithPipeline container = storageContainerLocationClient.allocateContainer(HddsProtos.ReplicationType.RATIS, HddsProtos.ReplicationFactor.ONE, OzoneConsts.OZONE);
    long containerID = container.getContainerInfo().getContainerID();
    Pipeline pipeline = container.getPipeline();
    XceiverClientSpi client = xceiverClientManager.acquireClient(pipeline);
    try {
        // create the container
        ContainerProtocolCalls.createContainer(client, containerID, null);
        // call create Container again
        BlockID blockID = ContainerTestHelper.getTestBlockID(containerID);
        byte[] data = RandomStringUtils.random(RandomUtils.nextInt(0, 1024)).getBytes(UTF_8);
        ContainerProtos.ContainerCommandRequestProto writeChunkRequest = ContainerTestHelper.getWriteChunkRequest(container.getPipeline(), blockID, data.length, null);
        client.sendCommand(writeChunkRequest);
        // Make the write chunk request again without requesting for overWrite
        client.sendCommand(writeChunkRequest);
        // Now, explicitly make a putKey request for the block.
        ContainerProtos.ContainerCommandRequestProto putKeyRequest = ContainerTestHelper.getPutBlockRequest(pipeline, writeChunkRequest.getWriteChunk());
        client.sendCommand(putKeyRequest).getPutBlock();
        // send the putBlock again
        client.sendCommand(putKeyRequest);
        // close container call
        ContainerProtocolCalls.closeContainer(client, containerID, null);
        ContainerProtocolCalls.closeContainer(client, containerID, null);
    } catch (IOException ioe) {
        Assert.fail("Container operation failed" + ioe);
    }
    xceiverClientManager.releaseClient(client, false);
}
Also used : ContainerProtos(org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos) BlockID(org.apache.hadoop.hdds.client.BlockID) IOException(java.io.IOException) XceiverClientSpi(org.apache.hadoop.hdds.scm.XceiverClientSpi) ContainerWithPipeline(org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline) ContainerWithPipeline(org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline) Pipeline(org.apache.hadoop.hdds.scm.pipeline.Pipeline) Test(org.junit.Test)

Example 97 with BlockID

use of org.apache.hadoop.hdds.client.BlockID in project ozone by apache.

the class TestOzoneBlockTokenSecretManager method testGenerateToken.

@Test
public void testGenerateToken() throws Exception {
    BlockID blockID = new BlockID(101, 0);
    Token<OzoneBlockTokenIdentifier> token = secretManager.generateToken(blockID, EnumSet.allOf(AccessModeProto.class), 100);
    OzoneBlockTokenIdentifier identifier = OzoneBlockTokenIdentifier.readFieldsProtobuf(new DataInputStream(new ByteArrayInputStream(token.getIdentifier())));
    // Check basic details.
    Assert.assertEquals(OzoneBlockTokenIdentifier.getTokenService(blockID), identifier.getService());
    Assert.assertEquals(EnumSet.allOf(AccessModeProto.class), identifier.getAccessModes());
    Assert.assertEquals(omCertSerialId, identifier.getCertSerialId());
    validateHash(token.getPassword(), token.getIdentifier());
}
Also used : AccessModeProto(org.apache.hadoop.hdds.protocol.proto.HddsProtos.BlockTokenSecretProto.AccessModeProto) ByteArrayInputStream(java.io.ByteArrayInputStream) BlockID(org.apache.hadoop.hdds.client.BlockID) DataInputStream(java.io.DataInputStream) OzoneBlockTokenIdentifier(org.apache.hadoop.hdds.security.token.OzoneBlockTokenIdentifier) Test(org.junit.Test)

Example 98 with BlockID

use of org.apache.hadoop.hdds.client.BlockID in project ozone by apache.

the class TestOzoneBlockTokenSecretManager method testCreateIdentifierSuccess.

@Test
public void testCreateIdentifierSuccess() throws Exception {
    BlockID blockID = new BlockID(101, 0);
    OzoneBlockTokenIdentifier btIdentifier = secretManager.createIdentifier("testUser", blockID, EnumSet.allOf(AccessModeProto.class), 100);
    // Check basic details.
    Assert.assertEquals("testUser", btIdentifier.getOwnerId());
    Assert.assertEquals(BlockTokenVerifier.getTokenService(blockID), btIdentifier.getService());
    Assert.assertEquals(EnumSet.allOf(AccessModeProto.class), btIdentifier.getAccessModes());
    Assert.assertEquals(omCertSerialId, btIdentifier.getCertSerialId());
    byte[] hash = secretManager.createPassword(btIdentifier);
    validateHash(hash, btIdentifier.getBytes());
}
Also used : AccessModeProto(org.apache.hadoop.hdds.protocol.proto.HddsProtos.BlockTokenSecretProto.AccessModeProto) BlockID(org.apache.hadoop.hdds.client.BlockID) OzoneBlockTokenIdentifier(org.apache.hadoop.hdds.security.token.OzoneBlockTokenIdentifier) Test(org.junit.Test)

Example 99 with BlockID

use of org.apache.hadoop.hdds.client.BlockID in project ozone by apache.

the class TestOzoneBlockTokenSecretManager method testBlockTokenWriteAccessMode.

@Test
public void testBlockTokenWriteAccessMode() throws Exception {
    final String testUser2 = "testUser2";
    BlockID blockID = new BlockID(102, 0);
    Token<OzoneBlockTokenIdentifier> token = secretManager.generateToken(testUser2, blockID, EnumSet.of(AccessModeProto.WRITE), 100);
    String encodedToken = token.encodeToUrlString();
    ContainerCommandRequestProto writeChunkRequest = getWriteChunkRequest(pipeline, blockID, 100, encodedToken);
    ContainerCommandRequestProto readChunkRequest = getReadChunkRequest(pipeline, writeChunkRequest.getWriteChunk());
    tokenVerifier.verify(testUser2, token, writeChunkRequest);
    BlockTokenException e = assertThrows(BlockTokenException.class, () -> tokenVerifier.verify(testUser2, token, readChunkRequest));
    String msg = e.getMessage();
    assertTrue(msg, msg.contains("doesn't have READ permission"));
}
Also used : BlockID(org.apache.hadoop.hdds.client.BlockID) ContainerCommandRequestProto(org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerCommandRequestProto) ArgumentMatchers.anyString(org.mockito.ArgumentMatchers.anyString) BlockTokenException(org.apache.hadoop.hdds.security.token.BlockTokenException) OzoneBlockTokenIdentifier(org.apache.hadoop.hdds.security.token.OzoneBlockTokenIdentifier) Test(org.junit.Test)

Example 100 with BlockID

use of org.apache.hadoop.hdds.client.BlockID in project ozone by apache.

the class TestOzoneBlockTokenSecretManager method testExpiredCertificate.

@Test
public void testExpiredCertificate() throws Exception {
    String user = "testUser2";
    BlockID blockID = new BlockID(102, 0);
    Token<OzoneBlockTokenIdentifier> token = secretManager.generateToken(user, blockID, EnumSet.allOf(AccessModeProto.class), 100);
    ContainerCommandRequestProto writeChunkRequest = getWriteChunkRequest(pipeline, blockID, 100, token.encodeToUrlString());
    tokenVerifier.verify("testUser", token, writeChunkRequest);
    // Mock client with an expired cert
    X509Certificate expiredCert = generateExpiredCert("CN=OzoneMaster", keyPair, ALGORITHM);
    when(client.getCertificate(anyString())).thenReturn(expiredCert);
    BlockTokenException e = assertThrows(BlockTokenException.class, () -> tokenVerifier.verify(user, token, writeChunkRequest));
    String msg = e.getMessage();
    assertTrue(msg, msg.contains("Token can't be verified due to" + " expired certificate"));
}
Also used : AccessModeProto(org.apache.hadoop.hdds.protocol.proto.HddsProtos.BlockTokenSecretProto.AccessModeProto) BlockID(org.apache.hadoop.hdds.client.BlockID) ContainerCommandRequestProto(org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerCommandRequestProto) ArgumentMatchers.anyString(org.mockito.ArgumentMatchers.anyString) BlockTokenException(org.apache.hadoop.hdds.security.token.BlockTokenException) OzoneBlockTokenIdentifier(org.apache.hadoop.hdds.security.token.OzoneBlockTokenIdentifier) X509Certificate(java.security.cert.X509Certificate) Test(org.junit.Test)

Aggregations

BlockID (org.apache.hadoop.hdds.client.BlockID)112 Test (org.junit.Test)64 ArrayList (java.util.ArrayList)41 Pipeline (org.apache.hadoop.hdds.scm.pipeline.Pipeline)37 ChunkInfo (org.apache.hadoop.ozone.container.common.helpers.ChunkInfo)31 ContainerProtos (org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos)25 OmKeyLocationInfo (org.apache.hadoop.ozone.om.helpers.OmKeyLocationInfo)25 BlockData (org.apache.hadoop.ozone.container.common.helpers.BlockData)21 IOException (java.io.IOException)20 OmKeyInfo (org.apache.hadoop.ozone.om.helpers.OmKeyInfo)20 KeyValueContainer (org.apache.hadoop.ozone.container.keyvalue.KeyValueContainer)19 XceiverClientSpi (org.apache.hadoop.hdds.scm.XceiverClientSpi)18 ContainerWithPipeline (org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline)18 StorageContainerException (org.apache.hadoop.hdds.scm.container.common.helpers.StorageContainerException)18 DatanodeDetails (org.apache.hadoop.hdds.protocol.DatanodeDetails)15 ChunkBuffer (org.apache.hadoop.ozone.common.ChunkBuffer)14 OmKeyLocationInfoGroup (org.apache.hadoop.ozone.om.helpers.OmKeyLocationInfoGroup)14 ContainerBlockID (org.apache.hadoop.hdds.client.ContainerBlockID)12 ContainerCommandRequestProto (org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerCommandRequestProto)11 MockPipeline (org.apache.hadoop.hdds.scm.pipeline.MockPipeline)11