Search in sources :

Example 6 with MockId

use of com.github.ambry.store.MockId in project ambry by linkedin.

the class MessageFormatSendTest method sendWriteTestWithBadId.

@Test
public void sendWriteTestWithBadId() throws IOException, MessageFormatException {
    // add header,system metadata, user metadata and data to the buffers
    ByteBuffer buf1 = ByteBuffer.allocate(1010);
    // fill header
    // version
    buf1.putShort((short) 1);
    // total size
    buf1.putLong(950);
    // put relative offsets
    // blob property relative offset
    buf1.putInt(60);
    // delete relative offset
    buf1.putInt(-1);
    // user metadata relative offset
    buf1.putInt(81);
    // data relative offset
    buf1.putInt(191);
    Crc32 crc = new Crc32();
    crc.update(buf1.array(), 0, buf1.position());
    // crc
    buf1.putLong(crc.getValue());
    // blob id
    String id = new String("012345678910123456789012");
    buf1.putShort((short) id.length());
    buf1.put(id.getBytes());
    // blob property version
    buf1.putShort((short) 1);
    String attribute1 = "ttl";
    String attribute2 = "del";
    // ttl name
    buf1.put(attribute1.getBytes());
    // ttl value
    buf1.putLong(12345);
    // delete name
    buf1.put(attribute2.getBytes());
    byte b = 1;
    // delete flag
    buf1.put(b);
    // crc
    buf1.putInt(456);
    // user metadata version
    buf1.putShort((short) 1);
    buf1.putInt(100);
    byte[] usermetadata = new byte[100];
    new Random().nextBytes(usermetadata);
    buf1.put(usermetadata);
    buf1.putInt(123);
    // blob version
    buf1.putShort((short) 0);
    // blob size
    buf1.putLong(805);
    // blob
    byte[] data = new byte[805];
    new Random().nextBytes(data);
    buf1.put(data);
    // blob crc
    buf1.putInt(123);
    buf1.flip();
    ArrayList<ByteBuffer> listbuf = new ArrayList<ByteBuffer>();
    listbuf.add(buf1);
    ArrayList<StoreKey> storeKeys = new ArrayList<StoreKey>();
    storeKeys.add(new MockId("012345678910123223233456789012"));
    MessageReadSet readSet = new MockMessageReadSet(listbuf, storeKeys);
    MetricRegistry registry = new MetricRegistry();
    MessageFormatMetrics metrics = new MessageFormatMetrics(registry);
    // get all
    MessageFormatSend send = new MessageFormatSend(readSet, MessageFormatFlags.All, metrics, new MockIdFactory());
    Assert.assertEquals(send.sizeInBytes(), 1010);
    ByteBuffer bufresult = ByteBuffer.allocate(1010);
    WritableByteChannel channel1 = Channels.newChannel(new ByteBufferOutputStream(bufresult));
    while (!send.isSendComplete()) {
        send.writeTo(channel1);
    }
    Assert.assertArrayEquals(buf1.array(), bufresult.array());
    try {
        // get blob
        MessageFormatSend send1 = new MessageFormatSend(readSet, MessageFormatFlags.Blob, metrics, new MockIdFactory());
        Assert.fail("Exception is expected");
    } catch (MessageFormatException e) {
        Assert.assertTrue(e.getErrorCode() == MessageFormatErrorCodes.Store_Key_Id_MisMatch);
    }
}
Also used : MessageReadSet(com.github.ambry.store.MessageReadSet) MetricRegistry(com.codahale.metrics.MetricRegistry) ArrayList(java.util.ArrayList) WritableByteChannel(java.nio.channels.WritableByteChannel) MockId(com.github.ambry.store.MockId) ByteBuffer(java.nio.ByteBuffer) StoreKey(com.github.ambry.store.StoreKey) Random(java.util.Random) MockIdFactory(com.github.ambry.store.MockIdFactory) ByteBufferOutputStream(com.github.ambry.utils.ByteBufferOutputStream) Crc32(com.github.ambry.utils.Crc32) Test(org.junit.Test)

Example 7 with MockId

use of com.github.ambry.store.MockId in project ambry by linkedin.

the class ValidatingKeyConvertingTransformer method testDeprecatedMsg.

private void testDeprecatedMsg(short blobVersion, BlobType blobType, short headerVersionToUse) throws Exception {
    MessageFormatRecord.headerVersionToUse = headerVersionToUse;
    // MessageSievingInputStream contains put records for 2 valid blobs and 1 deprecated blob
    // id1(put record for valid blob), id2(deprecated) and id3(put record for valid blob)
    // create message stream for blob 1
    StoreKey key1 = new MockId("id1");
    short accountId1 = Utils.getRandomShort(RANDOM);
    short containerId1 = Utils.getRandomShort(RANDOM);
    BlobProperties prop1 = new BlobProperties(10, "servid1", accountId1, containerId1, false);
    byte[] encryptionKey1 = new byte[100];
    RANDOM.nextBytes(encryptionKey1);
    byte[] usermetadata1 = new byte[1000];
    RANDOM.nextBytes(usermetadata1);
    int blobContentSize = 2000;
    byte[] data1 = new byte[blobContentSize];
    RANDOM.nextBytes(data1);
    long blobSize = -1;
    if (blobType == BlobType.DataBlob) {
        blobSize = (int) Blob_Format_V2.getBlobRecordSize(blobContentSize);
    } else {
        ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
        data1 = byteBufferBlob.array();
        blobContentSize = data1.length;
        blobSize = (int) Blob_Format_V2.getBlobRecordSize(blobContentSize);
    }
    ByteBufferInputStream stream1 = new ByteBufferInputStream(ByteBuffer.wrap(data1));
    MessageFormatInputStream messageFormatStream1 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key1, ByteBuffer.wrap(encryptionKey1), prop1, ByteBuffer.wrap(usermetadata1), stream1, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key1, prop1, ByteBuffer.wrap(usermetadata1), stream1, blobContentSize, blobType);
    MessageInfo msgInfo1 = new MessageInfo(key1, messageFormatStream1.getSize(), accountId1, containerId1, prop1.getCreationTimeInMs());
    // create message stream for blob 2
    StoreKey key2 = new MockId("id2");
    short accountId2 = Utils.getRandomShort(RANDOM);
    short containerId2 = Utils.getRandomShort(RANDOM);
    BlobProperties prop2 = new BlobProperties(10, "servid2", accountId2, containerId2, false);
    byte[] encryptionKey2 = new byte[100];
    RANDOM.nextBytes(encryptionKey2);
    byte[] usermetadata2 = new byte[1000];
    RANDOM.nextBytes(usermetadata2);
    blobContentSize = 2000;
    byte[] data2 = new byte[blobContentSize];
    RANDOM.nextBytes(data2);
    if (blobVersion == Blob_Version_V2 && blobType == BlobType.MetadataBlob) {
        ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
        data2 = byteBufferBlob.array();
        blobContentSize = data2.length;
    }
    ByteBufferInputStream stream2 = new ByteBufferInputStream(ByteBuffer.wrap(data2));
    MessageFormatInputStream messageFormatStream2 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key2, ByteBuffer.wrap(encryptionKey2), prop2, ByteBuffer.wrap(usermetadata2), stream2, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key2, prop2, ByteBuffer.wrap(usermetadata2), stream2, blobContentSize, blobType);
    MessageInfo msgInfo2 = new MessageInfo(key2, messageFormatStream2.getSize(), accountId2, containerId2, prop2.getCreationTimeInMs());
    // Add the key for the second message to the discardable ones.
    randomKeyConverter.addInvalids(Collections.singletonList(key2));
    // create message stream for blob 3
    StoreKey key3 = new MockId("id3");
    short accountId3 = Utils.getRandomShort(RANDOM);
    short containerId3 = Utils.getRandomShort(RANDOM);
    BlobProperties prop3 = new BlobProperties(10, "servid3", accountId3, containerId3, false);
    byte[] encryptionKey3 = new byte[100];
    RANDOM.nextBytes(encryptionKey3);
    byte[] usermetadata3 = new byte[1000];
    RANDOM.nextBytes(usermetadata3);
    blobContentSize = 2000;
    byte[] data3 = new byte[blobContentSize];
    RANDOM.nextBytes(data3);
    if (blobVersion == Blob_Version_V2 && blobType == BlobType.MetadataBlob) {
        ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
        data3 = byteBufferBlob.array();
        blobContentSize = data3.length;
    }
    ByteBufferInputStream stream3 = new ByteBufferInputStream(ByteBuffer.wrap(data3));
    MessageFormatInputStream messageFormatStream3 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key3, ByteBuffer.wrap(encryptionKey3), prop3, ByteBuffer.wrap(usermetadata3), stream3, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key3, prop3, ByteBuffer.wrap(usermetadata3), stream3, blobContentSize, blobType);
    MessageInfo msgInfo3 = new MessageInfo(key3, messageFormatStream3.getSize(), accountId3, containerId3, prop3.getCreationTimeInMs());
    // create input stream for all blob messages together
    byte[] totalMessageStreamContent = new byte[(int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize() + (int) messageFormatStream3.getSize()];
    messageFormatStream1.read(totalMessageStreamContent, 0, (int) messageFormatStream1.getSize());
    messageFormatStream2.read(totalMessageStreamContent, (int) messageFormatStream1.getSize(), (int) messageFormatStream2.getSize());
    messageFormatStream3.read(totalMessageStreamContent, (int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize(), (int) messageFormatStream3.getSize());
    InputStream inputStream = new ByteBufferInputStream(ByteBuffer.wrap(totalMessageStreamContent));
    List<MessageInfo> msgInfoList = new ArrayList<>();
    msgInfoList.add(msgInfo1);
    msgInfoList.add(msgInfo2);
    msgInfoList.add(msgInfo3);
    MessageSievingInputStream sievedStream = new MessageSievingInputStream(inputStream, msgInfoList, transformers, new MetricRegistry());
    Map<StoreKey, StoreKey> convertedMap = randomKeyConverter.convert(Arrays.asList(key1, key2, key3));
    int headerSize = MessageFormatRecord.getHeaderSizeForVersion(headerVersionToUse);
    int blobPropertiesRecordSize = BlobProperties_Format_V1.getBlobPropertiesRecordSize(prop1);
    int userMetadataSize = UserMetadata_Format_V1.getUserMetadataSize(ByteBuffer.wrap(usermetadata1));
    int totalHeadSize = (options.contains(TransformerOptions.KeyConvert) ? 2 : 3) * headerSize;
    int totalBlobPropertiesSize = (options.contains(TransformerOptions.KeyConvert) ? 2 : 3) * blobPropertiesRecordSize;
    int totalUserMetadataSize = (options.contains(TransformerOptions.KeyConvert) ? 2 : 3) * userMetadataSize;
    int totalBlobSize = (options.contains(TransformerOptions.KeyConvert) ? 2 : 3) * (int) blobSize;
    int totalKeySize = options.contains(TransformerOptions.KeyConvert) ? convertedMap.get(key1).sizeInBytes() + convertedMap.get(key3).sizeInBytes() : key1.sizeInBytes() + key2.sizeInBytes() + key3.sizeInBytes();
    int totalEncryptionRecordSize = blobVersion > Blob_Version_V1 ? BlobEncryptionKey_Format_V1.getBlobEncryptionKeyRecordSize(ByteBuffer.wrap(encryptionKey1)) + (options.contains(TransformerOptions.KeyConvert) ? 0 : BlobEncryptionKey_Format_V1.getBlobEncryptionKeyRecordSize(ByteBuffer.wrap(encryptionKey2))) + BlobEncryptionKey_Format_V1.getBlobEncryptionKeyRecordSize(ByteBuffer.wrap(encryptionKey3)) : 0;
    if (!options.isEmpty()) {
        if (options.contains(TransformerOptions.KeyConvert)) {
            Assert.assertTrue(sievedStream.hasDeprecatedMessages());
        }
        Assert.assertEquals((int) sievedStream.getValidMessageInfoList().stream().mapToLong(MessageInfo::getSize).sum(), sievedStream.getSize());
        Assert.assertEquals(options.isEmpty() ? totalMessageStreamContent.length : totalHeadSize + totalBlobPropertiesSize + totalUserMetadataSize + totalBlobSize + totalKeySize + totalEncryptionRecordSize, sievedStream.getSize());
        verifySievedTransformedMessage(sievedStream, options.contains(TransformerOptions.KeyConvert) ? convertedMap.get(key1) : key1, "servid1", accountId1, containerId1, blobVersion > Blob_Version_V1 ? encryptionKey1 : null, usermetadata1, data1, blobVersion, blobType);
        if (!options.contains(TransformerOptions.KeyConvert)) {
            verifySievedTransformedMessage(sievedStream, key2, "servid2", accountId2, containerId2, blobVersion > Blob_Version_V1 ? encryptionKey2 : null, usermetadata2, data2, blobVersion, blobType);
        }
        verifySievedTransformedMessage(sievedStream, options.contains(TransformerOptions.KeyConvert) ? convertedMap.get(key3) : key3, "servid3", accountId3, containerId3, blobVersion > Blob_Version_V1 ? encryptionKey3 : null, usermetadata3, data3, blobVersion, blobType);
    } else {
        Assert.assertEquals(totalMessageStreamContent.length, sievedStream.getSize());
        byte[] sievedBytes = Utils.readBytesFromStream(sievedStream, sievedStream.getSize());
        Assert.assertArrayEquals(totalMessageStreamContent, sievedBytes);
    }
    Assert.assertEquals(-1, sievedStream.read());
}
Also used : DataInputStream(java.io.DataInputStream) ByteBufInputStream(io.netty.buffer.ByteBufInputStream) ByteBufferInputStream(com.github.ambry.utils.ByteBufferInputStream) InputStream(java.io.InputStream) MetricRegistry(com.codahale.metrics.MetricRegistry) ByteBufferInputStream(com.github.ambry.utils.ByteBufferInputStream) ArrayList(java.util.ArrayList) MockId(com.github.ambry.store.MockId) StoreKey(com.github.ambry.store.StoreKey) ByteBuffer(java.nio.ByteBuffer) MessageInfo(com.github.ambry.store.MessageInfo)

Example 8 with MockId

use of com.github.ambry.store.MockId in project ambry by linkedin.

the class ValidatingKeyConvertingTransformer method testInValidDeletedAndExpiredBlobs.

private void testInValidDeletedAndExpiredBlobs(short blobVersion, BlobType blobType, short headerVersionToUse) throws Exception {
    MessageFormatRecord.headerVersionToUse = headerVersionToUse;
    // MessageSievingInputStream contains put records for 2 valid blobs, 1 corrupt blob, 1 deleted blob and 1 expired.
    // id1(put record for valid blob), id2(corrupt), id3(put record of deleted blob), id4(put record of expired blob)
    // and id3(put record for valid blob)
    // create message stream for blob 1
    StoreKey key1 = new MockId("id1");
    short accountId1 = Utils.getRandomShort(RANDOM);
    short containerId1 = Utils.getRandomShort(RANDOM);
    BlobProperties prop1 = new BlobProperties(10, "servid1", accountId1, containerId1, false);
    byte[] encryptionKey1 = new byte[100];
    RANDOM.nextBytes(encryptionKey1);
    byte[] usermetadata1 = new byte[1000];
    RANDOM.nextBytes(usermetadata1);
    int blobContentSize = 2000;
    byte[] data1 = new byte[blobContentSize];
    RANDOM.nextBytes(data1);
    long blobSize = -1;
    if (blobType == BlobType.DataBlob) {
        blobSize = (int) Blob_Format_V2.getBlobRecordSize(blobContentSize);
    } else {
        ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
        data1 = byteBufferBlob.array();
        blobContentSize = data1.length;
        blobSize = (int) Blob_Format_V2.getBlobRecordSize(blobContentSize);
    }
    ByteBufferInputStream stream1 = new ByteBufferInputStream(ByteBuffer.wrap(data1));
    MessageFormatInputStream messageFormatStream1 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key1, ByteBuffer.wrap(encryptionKey1), prop1, ByteBuffer.wrap(usermetadata1), stream1, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key1, prop1, ByteBuffer.wrap(usermetadata1), stream1, blobContentSize, blobType);
    MessageInfo msgInfo1 = new MessageInfo(key1, messageFormatStream1.getSize(), accountId1, containerId1, prop1.getCreationTimeInMs());
    // create message stream for blob 2
    StoreKey key2 = new MockId("id2");
    short accountId2 = Utils.getRandomShort(RANDOM);
    short containerId2 = Utils.getRandomShort(RANDOM);
    BlobProperties prop2 = new BlobProperties(10, "servid2", accountId2, containerId2, false);
    byte[] encryptionKey2 = new byte[100];
    RANDOM.nextBytes(encryptionKey2);
    byte[] usermetadata2 = new byte[1000];
    RANDOM.nextBytes(usermetadata2);
    blobContentSize = 2000;
    byte[] data2 = new byte[blobContentSize];
    RANDOM.nextBytes(data2);
    if (blobVersion == Blob_Version_V2 && blobType == BlobType.MetadataBlob) {
        ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
        data2 = byteBufferBlob.array();
        blobContentSize = data2.length;
    }
    ByteBufferInputStream stream2 = new ByteBufferInputStream(ByteBuffer.wrap(data2));
    MessageFormatInputStream messageFormatStream2 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key2, ByteBuffer.wrap(encryptionKey2), prop2, ByteBuffer.wrap(usermetadata2), stream2, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key2, prop2, ByteBuffer.wrap(usermetadata2), stream2, blobContentSize, blobType);
    MessageInfo msgInfo2 = new MessageInfo(key2, messageFormatStream2.getSize(), accountId2, containerId2, prop2.getCreationTimeInMs());
    // corrupt the message stream but make sure this header version is still valid
    byte[] corruptMessageStream2 = new byte[(int) messageFormatStream2.getSize()];
    RANDOM.nextBytes(corruptMessageStream2);
    corruptMessageStream2[0] = (byte) 0;
    corruptMessageStream2[1] = (byte) headerVersionToUse;
    InputStream corruptStream2 = new ByteBufferInputStream(ByteBuffer.wrap(corruptMessageStream2));
    // create message stream for blob 3 that is deleted.
    StoreKey key3 = new MockId("id3");
    short accountId3 = Utils.getRandomShort(RANDOM);
    short containerId3 = Utils.getRandomShort(RANDOM);
    BlobProperties prop3 = new BlobProperties(10, "servid3", accountId3, containerId3, false);
    byte[] usermetadata3 = new byte[1000];
    RANDOM.nextBytes(usermetadata3);
    blobContentSize = 2000;
    byte[] data3 = new byte[blobContentSize];
    RANDOM.nextBytes(data3);
    if (blobVersion == Blob_Version_V2 && blobType == BlobType.MetadataBlob) {
        ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
        data3 = byteBufferBlob.array();
        blobContentSize = data3.length;
    }
    ByteBufferInputStream stream3 = new ByteBufferInputStream(ByteBuffer.wrap(data3));
    MessageFormatInputStream messageFormatStream3 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key3, null, prop3, ByteBuffer.wrap(usermetadata3), stream3, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key3, prop3, ByteBuffer.wrap(usermetadata3), stream3, blobContentSize, blobType);
    // MessageInfo marks this blob as deleted.
    MessageInfo msgInfo3 = new MessageInfo(key3, messageFormatStream3.getSize(), true, false, Utils.Infinite_Time, accountId3, containerId3, prop3.getCreationTimeInMs());
    // create message stream for blob 4 that is expired.
    StoreKey key4 = new MockId("id4");
    short accountId4 = Utils.getRandomShort(RANDOM);
    short containerId4 = Utils.getRandomShort(RANDOM);
    BlobProperties prop4 = new BlobProperties(10, "servid4", accountId4, containerId4, false);
    byte[] usermetadata4 = new byte[1000];
    RANDOM.nextBytes(usermetadata4);
    blobContentSize = 2000;
    byte[] data4 = new byte[blobContentSize];
    RANDOM.nextBytes(data4);
    if (blobVersion == Blob_Version_V2 && blobType == BlobType.MetadataBlob) {
        ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
        data4 = byteBufferBlob.array();
        blobContentSize = data4.length;
    }
    ByteBufferInputStream stream4 = new ByteBufferInputStream(ByteBuffer.wrap(data4));
    MessageFormatInputStream messageFormatStream4 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key4, null, prop4, ByteBuffer.wrap(usermetadata4), stream4, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key4, prop4, ByteBuffer.wrap(usermetadata4), stream4, blobContentSize, blobType);
    // MessageInfo marks this as already expired (the third field is an absolute expiresAt time).
    MessageInfo msgInfo4 = new MessageInfo(key4, messageFormatStream4.getSize(), 1, accountId4, containerId4, prop4.getCreationTimeInMs());
    // create message stream for blob 5
    StoreKey key5 = new MockId("id5");
    short accountId5 = Utils.getRandomShort(RANDOM);
    short containerId5 = Utils.getRandomShort(RANDOM);
    BlobProperties prop5 = new BlobProperties(10, "servid5", accountId5, containerId5, false);
    byte[] encryptionKey5 = new byte[100];
    RANDOM.nextBytes(encryptionKey5);
    byte[] usermetadata5 = new byte[1000];
    RANDOM.nextBytes(usermetadata5);
    blobContentSize = 2000;
    byte[] data5 = new byte[blobContentSize];
    RANDOM.nextBytes(data5);
    if (blobVersion == Blob_Version_V2 && blobType == BlobType.MetadataBlob) {
        ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
        data5 = byteBufferBlob.array();
        blobContentSize = data5.length;
    }
    ByteBufferInputStream stream5 = new ByteBufferInputStream(ByteBuffer.wrap(data5));
    MessageFormatInputStream messageFormatStream5 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key5, ByteBuffer.wrap(encryptionKey5), prop5, ByteBuffer.wrap(usermetadata5), stream5, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key5, prop5, ByteBuffer.wrap(usermetadata5), stream5, blobContentSize, blobType);
    MessageInfo msgInfo5 = new MessageInfo(key5, messageFormatStream5.getSize(), accountId5, containerId5, prop5.getCreationTimeInMs());
    // create message stream for blob 6
    StoreKey key6 = new MockId("id6");
    short accountId6 = Utils.getRandomShort(RANDOM);
    short containerId6 = Utils.getRandomShort(RANDOM);
    BlobProperties prop6 = new BlobProperties(10, "servid6", accountId6, containerId6, false);
    byte[] encryptionKey6 = new byte[100];
    RANDOM.nextBytes(encryptionKey6);
    byte[] usermetadata6 = new byte[1000];
    RANDOM.nextBytes(usermetadata6);
    blobContentSize = 2000;
    byte[] data6 = new byte[blobContentSize];
    RANDOM.nextBytes(data6);
    if (blobVersion == Blob_Version_V2 && blobType == BlobType.MetadataBlob) {
        ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
        data2 = byteBufferBlob.array();
        blobContentSize = data6.length;
    }
    ByteBufferInputStream stream6 = new ByteBufferInputStream(ByteBuffer.wrap(data2));
    MessageFormatInputStream messageFormatStream6 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key6, ByteBuffer.wrap(encryptionKey6), prop6, ByteBuffer.wrap(usermetadata6), stream6, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key6, prop6, ByteBuffer.wrap(usermetadata6), stream6, blobContentSize, blobType);
    MessageInfo msgInfo6 = new MessageInfo(key6, messageFormatStream6.getSize(), accountId6, containerId6, prop6.getCreationTimeInMs());
    // corrupt the message stream but make sure this header version is not valid
    byte[] corruptMessageStream6 = new byte[(int) messageFormatStream6.getSize()];
    RANDOM.nextBytes(corruptMessageStream6);
    corruptMessageStream6[1] = (byte) 100;
    InputStream corruptStream6 = new ByteBufferInputStream(ByteBuffer.wrap(corruptMessageStream6));
    // create input stream for all blob messages together
    byte[] totalMessageStreamContent = new byte[(int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize() + (int) messageFormatStream3.getSize() + (int) messageFormatStream4.getSize() + (int) messageFormatStream5.getSize() + (int) messageFormatStream6.getSize()];
    messageFormatStream1.read(totalMessageStreamContent, 0, (int) messageFormatStream1.getSize());
    corruptStream2.read(totalMessageStreamContent, (int) messageFormatStream1.getSize(), (int) messageFormatStream2.getSize());
    messageFormatStream3.read(totalMessageStreamContent, (int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize(), (int) messageFormatStream3.getSize());
    messageFormatStream4.read(totalMessageStreamContent, (int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize() + (int) messageFormatStream3.getSize(), (int) messageFormatStream4.getSize());
    messageFormatStream5.read(totalMessageStreamContent, (int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize() + (int) messageFormatStream3.getSize() + (int) messageFormatStream4.getSize(), (int) messageFormatStream5.getSize());
    corruptStream6.read(totalMessageStreamContent, (int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize() + (int) messageFormatStream3.getSize() + (int) messageFormatStream4.getSize() + (int) messageFormatStream5.getSize(), (int) messageFormatStream6.getSize());
    InputStream inputStream = new ByteBufferInputStream(ByteBuffer.wrap(totalMessageStreamContent));
    List<MessageInfo> msgInfoList = new ArrayList<>();
    msgInfoList.add(msgInfo1);
    msgInfoList.add(msgInfo2);
    msgInfoList.add(msgInfo3);
    msgInfoList.add(msgInfo4);
    msgInfoList.add(msgInfo5);
    msgInfoList.add(msgInfo6);
    MessageSievingInputStream sievedStream = new MessageSievingInputStream(inputStream, msgInfoList, transformers, new MetricRegistry());
    Map<StoreKey, StoreKey> convertedMap = randomKeyConverter.convert(Arrays.asList(key1, key2, key3, key4, key5, key6));
    int headerSize = getHeaderSizeForVersion(headerVersionToUse);
    int blobPropertiesRecordSize = BlobProperties_Format_V1.getBlobPropertiesRecordSize(prop1);
    int userMetadataSize = UserMetadata_Format_V1.getUserMetadataSize(ByteBuffer.wrap(usermetadata1));
    int totalHeadSize = 2 * headerSize;
    int totalBlobPropertiesSize = 2 * blobPropertiesRecordSize;
    int totalUserMetadataSize = 2 * userMetadataSize;
    int totalBlobSize = 2 * (int) blobSize;
    int totalKeySize = options.contains(TransformerOptions.KeyConvert) ? convertedMap.get(key1).sizeInBytes() + convertedMap.get(key5).sizeInBytes() : key1.sizeInBytes() + key5.sizeInBytes();
    int totalEncryptionRecordSize = blobVersion > Blob_Version_V1 ? BlobEncryptionKey_Format_V1.getBlobEncryptionKeyRecordSize(ByteBuffer.wrap(encryptionKey1)) + BlobEncryptionKey_Format_V1.getBlobEncryptionKeyRecordSize(ByteBuffer.wrap(encryptionKey5)) : 0;
    if (!options.isEmpty()) {
        Assert.assertTrue(sievedStream.hasInvalidMessages());
        Assert.assertEquals((int) sievedStream.getValidMessageInfoList().stream().mapToLong(MessageInfo::getSize).sum(), sievedStream.getSize());
        Assert.assertEquals(totalHeadSize + totalBlobPropertiesSize + totalUserMetadataSize + totalBlobSize + totalKeySize + totalEncryptionRecordSize, sievedStream.getSize());
        verifySievedTransformedMessage(sievedStream, options.contains(TransformerOptions.KeyConvert) ? convertedMap.get(key1) : key1, "servid1", accountId1, containerId1, blobVersion > Blob_Version_V1 ? encryptionKey1 : null, usermetadata1, data1, blobVersion, blobType);
        verifySievedTransformedMessage(sievedStream, options.contains(TransformerOptions.KeyConvert) ? convertedMap.get(key5) : key5, "servid5", accountId5, containerId5, blobVersion > Blob_Version_V1 ? encryptionKey5 : null, usermetadata5, data5, blobVersion, blobType);
    } else {
        // even if there are no transformers, deleted and expired messages should be dropped by the MessageSievingInputStream.
        byte[] expectedBytes = new byte[(int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize() + (int) messageFormatStream5.getSize() + (int) messageFormatStream6.getSize()];
        System.arraycopy(totalMessageStreamContent, 0, expectedBytes, 0, (int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize());
        System.arraycopy(totalMessageStreamContent, totalMessageStreamContent.length - (int) messageFormatStream5.getSize() - (int) messageFormatStream6.getSize(), expectedBytes, (int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize(), (int) messageFormatStream5.getSize() + (int) messageFormatStream6.getSize());
        Assert.assertEquals(expectedBytes.length, sievedStream.getSize());
        byte[] sievedBytes = Utils.readBytesFromStream(sievedStream, sievedStream.getSize());
        Assert.assertArrayEquals(expectedBytes, sievedBytes);
    }
    Assert.assertEquals(-1, sievedStream.read());
}
Also used : DataInputStream(java.io.DataInputStream) ByteBufInputStream(io.netty.buffer.ByteBufInputStream) ByteBufferInputStream(com.github.ambry.utils.ByteBufferInputStream) InputStream(java.io.InputStream) MetricRegistry(com.codahale.metrics.MetricRegistry) ByteBufferInputStream(com.github.ambry.utils.ByteBufferInputStream) ArrayList(java.util.ArrayList) MockId(com.github.ambry.store.MockId) StoreKey(com.github.ambry.store.StoreKey) ByteBuffer(java.nio.ByteBuffer) MessageInfo(com.github.ambry.store.MessageInfo)

Example 9 with MockId

use of com.github.ambry.store.MockId in project ambry by linkedin.

the class ValidatingKeyConvertingTransformer method testDeletedRecords.

private void testDeletedRecords(short blobVersion, BlobType blobType) throws Exception {
    // MessageSievingInputStream contains put records for 2 valid blobs and 1 delete record
    // id1(put record for valid blob), id2(delete record) and id3(put record for valid blob)
    ArrayList<Short> versions = new ArrayList<>();
    versions.add(Message_Header_Version_V1);
    if (blobVersion != Blob_Version_V1) {
        versions.add(Message_Header_Version_V2);
        versions.add(Message_Header_Version_V3);
    }
    try {
        for (short version : versions) {
            headerVersionToUse = version;
            // create message stream for blob 1
            StoreKey key1 = new MockId("id1");
            short accountId = Utils.getRandomShort(RANDOM);
            short containerId = Utils.getRandomShort(RANDOM);
            BlobProperties prop1 = new BlobProperties(10, "servid1", accountId, containerId, false);
            byte[] encryptionKey1 = new byte[100];
            RANDOM.nextBytes(encryptionKey1);
            byte[] usermetadata1 = new byte[1000];
            RANDOM.nextBytes(usermetadata1);
            int blobContentSize = 2000;
            byte[] data1 = new byte[blobContentSize];
            RANDOM.nextBytes(data1);
            if (blobVersion == Blob_Version_V2 && blobType == BlobType.MetadataBlob) {
                ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
                data1 = byteBufferBlob.array();
                blobContentSize = data1.length;
            }
            ByteBufferInputStream stream1 = new ByteBufferInputStream(ByteBuffer.wrap(data1));
            MessageFormatInputStream messageFormatStream1 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key1, ByteBuffer.wrap(encryptionKey1), prop1, ByteBuffer.wrap(usermetadata1), stream1, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key1, prop1, ByteBuffer.wrap(usermetadata1), stream1, blobContentSize, blobType);
            MessageInfo msgInfo1 = new MessageInfo(key1, messageFormatStream1.getSize(), accountId, containerId, prop1.getCreationTimeInMs());
            // create message stream for blob 2 and mark it as deleted
            StoreKey key2 = new MockId("id2");
            accountId = Utils.getRandomShort(RANDOM);
            containerId = Utils.getRandomShort(RANDOM);
            long deletionTimeMs = SystemTime.getInstance().milliseconds() + RANDOM.nextInt();
            MessageFormatInputStream messageFormatStream2 = new DeleteMessageFormatInputStream(key2, accountId, containerId, deletionTimeMs);
            MessageInfo msgInfo2 = new MessageInfo(key2, messageFormatStream2.getSize(), accountId, containerId, deletionTimeMs);
            // create message stream for blob 3
            StoreKey key3 = new MockId("id3");
            accountId = Utils.getRandomShort(RANDOM);
            containerId = Utils.getRandomShort(RANDOM);
            BlobProperties prop3 = new BlobProperties(10, "servid3", accountId, containerId, false);
            byte[] encryptionKey3 = new byte[100];
            RANDOM.nextBytes(encryptionKey3);
            byte[] usermetadata3 = new byte[1000];
            RANDOM.nextBytes(usermetadata3);
            blobContentSize = 2000;
            byte[] data3 = new byte[blobContentSize];
            RANDOM.nextBytes(data3);
            if (blobVersion == Blob_Version_V2 && blobType == BlobType.MetadataBlob) {
                ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
                data3 = byteBufferBlob.array();
                blobContentSize = data3.length;
            }
            ByteBufferInputStream stream3 = new ByteBufferInputStream(ByteBuffer.wrap(data3));
            MessageFormatInputStream messageFormatStream3 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key3, ByteBuffer.wrap(encryptionKey3), prop3, ByteBuffer.wrap(usermetadata3), stream3, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key3, prop3, ByteBuffer.wrap(usermetadata3), stream3, blobContentSize, blobType);
            MessageInfo msgInfo3 = new MessageInfo(key3, messageFormatStream3.getSize(), accountId, containerId, prop3.getCreationTimeInMs());
            // create input stream for all blob messages together
            byte[] totalMessageContent = new byte[(int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize() + (int) messageFormatStream3.getSize()];
            messageFormatStream1.read(totalMessageContent, 0, (int) messageFormatStream1.getSize());
            messageFormatStream2.read(totalMessageContent, (int) messageFormatStream1.getSize(), (int) messageFormatStream2.getSize());
            messageFormatStream3.read(totalMessageContent, (int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize(), (int) messageFormatStream3.getSize());
            InputStream inputStream = new ByteBufferInputStream(ByteBuffer.wrap(totalMessageContent));
            List<MessageInfo> msgInfoList = new ArrayList<MessageInfo>();
            msgInfoList.add(msgInfo1);
            msgInfoList.add(msgInfo2);
            msgInfoList.add(msgInfo3);
            new MessageSievingInputStream(inputStream, msgInfoList, transformers, new MetricRegistry());
            if (!options.isEmpty()) {
                Assert.fail("IOException should have been thrown due to delete record ");
            }
        }
    } catch (IOException e) {
        if (options.isEmpty()) {
            Assert.fail("No exceptions should have occurred");
        }
    }
    headerVersionToUse = Message_Header_Version_V1;
}
Also used : DataInputStream(java.io.DataInputStream) ByteBufInputStream(io.netty.buffer.ByteBufInputStream) ByteBufferInputStream(com.github.ambry.utils.ByteBufferInputStream) InputStream(java.io.InputStream) MetricRegistry(com.codahale.metrics.MetricRegistry) ArrayList(java.util.ArrayList) ByteBufferInputStream(com.github.ambry.utils.ByteBufferInputStream) MockId(com.github.ambry.store.MockId) IOException(java.io.IOException) StoreKey(com.github.ambry.store.StoreKey) ByteBuffer(java.nio.ByteBuffer) MessageInfo(com.github.ambry.store.MessageInfo)

Example 10 with MockId

use of com.github.ambry.store.MockId in project ambry by linkedin.

the class CompositeBlobInfoTest method testV2CompositeBlobInfoCtor.

/**
 * Tests various kinds of inputs for the V2 {@link CompositeBlobInfo}
 */
@Test
public void testV2CompositeBlobInfoCtor() {
    List<StoreKey> keys = new ArrayList<>();
    for (int i = 0; i < 10; i++) {
        keys.add(new MockId(TestUtils.getRandomString(60)));
    }
    // creates 1 10 byte blob, so not enough keys
    invalidV2CompositeBlobInfoCtor(100, 10, keys);
    // creates 9 100 byte blobs, so too many keys
    invalidV2CompositeBlobInfoCtor(100, 900, keys);
    // creates 9 100 byte blobs and 1 1 byte blob
    validV2CompositeBlobInfoCtor(100, 901, keys);
    // creates 10 100 byte blobs
    validV2CompositeBlobInfoCtor(100, 1000, keys);
    // creates 10 100 byte blobs and 1 1 byte blob, so not enough keys
    invalidV2CompositeBlobInfoCtor(100, 1001, keys);
    // rest are a mix of negative and 0 values for chunkSize and totalSize, all invalid
    invalidV2CompositeBlobInfoCtor(0, 0, keys);
    invalidV2CompositeBlobInfoCtor(1, 0, keys);
    invalidV2CompositeBlobInfoCtor(0, 1, keys);
    invalidV2CompositeBlobInfoCtor(-1, 1, keys);
    invalidV2CompositeBlobInfoCtor(-1, -1, keys);
    invalidV2CompositeBlobInfoCtor(1, -1, keys);
}
Also used : ArrayList(java.util.ArrayList) MockId(com.github.ambry.store.MockId) StoreKey(com.github.ambry.store.StoreKey) Test(org.junit.Test)

Aggregations

MockId (com.github.ambry.store.MockId)21 StoreKey (com.github.ambry.store.StoreKey)17 ArrayList (java.util.ArrayList)14 ByteBuffer (java.nio.ByteBuffer)13 Test (org.junit.Test)12 MetricRegistry (com.codahale.metrics.MetricRegistry)8 ByteBufferInputStream (com.github.ambry.utils.ByteBufferInputStream)8 Random (java.util.Random)8 MessageInfo (com.github.ambry.store.MessageInfo)7 DataInputStream (java.io.DataInputStream)6 MockIdFactory (com.github.ambry.store.MockIdFactory)4 ByteBufInputStream (io.netty.buffer.ByteBufInputStream)4 InputStream (java.io.InputStream)4 MessageReadSet (com.github.ambry.store.MessageReadSet)3 Crc32 (com.github.ambry.utils.Crc32)3 MockClusterMap (com.github.ambry.clustermap.MockClusterMap)2 MockHelixParticipant (com.github.ambry.clustermap.MockHelixParticipant)2 MockPartitionId (com.github.ambry.clustermap.MockPartitionId)2 MockReplicaId (com.github.ambry.clustermap.MockReplicaId)2 PartitionId (com.github.ambry.clustermap.PartitionId)2