use of com.github.ambry.store.MessageInfo in project ambry by linkedin.
the class ValidatingKeyConvertingTransformer method testInValidDeletedAndExpiredBlobs.
private void testInValidDeletedAndExpiredBlobs(short blobVersion, BlobType blobType, short headerVersionToUse) throws Exception {
MessageFormatRecord.headerVersionToUse = headerVersionToUse;
// MessageSievingInputStream contains put records for 2 valid blobs, 1 corrupt blob, 1 deleted blob and 1 expired.
// id1(put record for valid blob), id2(corrupt), id3(put record of deleted blob), id4(put record of expired blob)
// and id3(put record for valid blob)
// create message stream for blob 1
StoreKey key1 = new MockId("id1");
short accountId1 = Utils.getRandomShort(RANDOM);
short containerId1 = Utils.getRandomShort(RANDOM);
BlobProperties prop1 = new BlobProperties(10, "servid1", accountId1, containerId1, false);
byte[] encryptionKey1 = new byte[100];
RANDOM.nextBytes(encryptionKey1);
byte[] usermetadata1 = new byte[1000];
RANDOM.nextBytes(usermetadata1);
int blobContentSize = 2000;
byte[] data1 = new byte[blobContentSize];
RANDOM.nextBytes(data1);
long blobSize = -1;
if (blobType == BlobType.DataBlob) {
blobSize = (int) Blob_Format_V2.getBlobRecordSize(blobContentSize);
} else {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data1 = byteBufferBlob.array();
blobContentSize = data1.length;
blobSize = (int) Blob_Format_V2.getBlobRecordSize(blobContentSize);
}
ByteBufferInputStream stream1 = new ByteBufferInputStream(ByteBuffer.wrap(data1));
MessageFormatInputStream messageFormatStream1 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key1, ByteBuffer.wrap(encryptionKey1), prop1, ByteBuffer.wrap(usermetadata1), stream1, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key1, prop1, ByteBuffer.wrap(usermetadata1), stream1, blobContentSize, blobType);
MessageInfo msgInfo1 = new MessageInfo(key1, messageFormatStream1.getSize(), accountId1, containerId1, prop1.getCreationTimeInMs());
// create message stream for blob 2
StoreKey key2 = new MockId("id2");
short accountId2 = Utils.getRandomShort(RANDOM);
short containerId2 = Utils.getRandomShort(RANDOM);
BlobProperties prop2 = new BlobProperties(10, "servid2", accountId2, containerId2, false);
byte[] encryptionKey2 = new byte[100];
RANDOM.nextBytes(encryptionKey2);
byte[] usermetadata2 = new byte[1000];
RANDOM.nextBytes(usermetadata2);
blobContentSize = 2000;
byte[] data2 = new byte[blobContentSize];
RANDOM.nextBytes(data2);
if (blobVersion == Blob_Version_V2 && blobType == BlobType.MetadataBlob) {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data2 = byteBufferBlob.array();
blobContentSize = data2.length;
}
ByteBufferInputStream stream2 = new ByteBufferInputStream(ByteBuffer.wrap(data2));
MessageFormatInputStream messageFormatStream2 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key2, ByteBuffer.wrap(encryptionKey2), prop2, ByteBuffer.wrap(usermetadata2), stream2, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key2, prop2, ByteBuffer.wrap(usermetadata2), stream2, blobContentSize, blobType);
MessageInfo msgInfo2 = new MessageInfo(key2, messageFormatStream2.getSize(), accountId2, containerId2, prop2.getCreationTimeInMs());
// corrupt the message stream but make sure this header version is still valid
byte[] corruptMessageStream2 = new byte[(int) messageFormatStream2.getSize()];
RANDOM.nextBytes(corruptMessageStream2);
corruptMessageStream2[0] = (byte) 0;
corruptMessageStream2[1] = (byte) headerVersionToUse;
InputStream corruptStream2 = new ByteBufferInputStream(ByteBuffer.wrap(corruptMessageStream2));
// create message stream for blob 3 that is deleted.
StoreKey key3 = new MockId("id3");
short accountId3 = Utils.getRandomShort(RANDOM);
short containerId3 = Utils.getRandomShort(RANDOM);
BlobProperties prop3 = new BlobProperties(10, "servid3", accountId3, containerId3, false);
byte[] usermetadata3 = new byte[1000];
RANDOM.nextBytes(usermetadata3);
blobContentSize = 2000;
byte[] data3 = new byte[blobContentSize];
RANDOM.nextBytes(data3);
if (blobVersion == Blob_Version_V2 && blobType == BlobType.MetadataBlob) {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data3 = byteBufferBlob.array();
blobContentSize = data3.length;
}
ByteBufferInputStream stream3 = new ByteBufferInputStream(ByteBuffer.wrap(data3));
MessageFormatInputStream messageFormatStream3 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key3, null, prop3, ByteBuffer.wrap(usermetadata3), stream3, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key3, prop3, ByteBuffer.wrap(usermetadata3), stream3, blobContentSize, blobType);
// MessageInfo marks this blob as deleted.
MessageInfo msgInfo3 = new MessageInfo(key3, messageFormatStream3.getSize(), true, false, Utils.Infinite_Time, accountId3, containerId3, prop3.getCreationTimeInMs());
// create message stream for blob 4 that is expired.
StoreKey key4 = new MockId("id4");
short accountId4 = Utils.getRandomShort(RANDOM);
short containerId4 = Utils.getRandomShort(RANDOM);
BlobProperties prop4 = new BlobProperties(10, "servid4", accountId4, containerId4, false);
byte[] usermetadata4 = new byte[1000];
RANDOM.nextBytes(usermetadata4);
blobContentSize = 2000;
byte[] data4 = new byte[blobContentSize];
RANDOM.nextBytes(data4);
if (blobVersion == Blob_Version_V2 && blobType == BlobType.MetadataBlob) {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data4 = byteBufferBlob.array();
blobContentSize = data4.length;
}
ByteBufferInputStream stream4 = new ByteBufferInputStream(ByteBuffer.wrap(data4));
MessageFormatInputStream messageFormatStream4 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key4, null, prop4, ByteBuffer.wrap(usermetadata4), stream4, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key4, prop4, ByteBuffer.wrap(usermetadata4), stream4, blobContentSize, blobType);
// MessageInfo marks this as already expired (the third field is an absolute expiresAt time).
MessageInfo msgInfo4 = new MessageInfo(key4, messageFormatStream4.getSize(), 1, accountId4, containerId4, prop4.getCreationTimeInMs());
// create message stream for blob 5
StoreKey key5 = new MockId("id5");
short accountId5 = Utils.getRandomShort(RANDOM);
short containerId5 = Utils.getRandomShort(RANDOM);
BlobProperties prop5 = new BlobProperties(10, "servid5", accountId5, containerId5, false);
byte[] encryptionKey5 = new byte[100];
RANDOM.nextBytes(encryptionKey5);
byte[] usermetadata5 = new byte[1000];
RANDOM.nextBytes(usermetadata5);
blobContentSize = 2000;
byte[] data5 = new byte[blobContentSize];
RANDOM.nextBytes(data5);
if (blobVersion == Blob_Version_V2 && blobType == BlobType.MetadataBlob) {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data5 = byteBufferBlob.array();
blobContentSize = data5.length;
}
ByteBufferInputStream stream5 = new ByteBufferInputStream(ByteBuffer.wrap(data5));
MessageFormatInputStream messageFormatStream5 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key5, ByteBuffer.wrap(encryptionKey5), prop5, ByteBuffer.wrap(usermetadata5), stream5, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key5, prop5, ByteBuffer.wrap(usermetadata5), stream5, blobContentSize, blobType);
MessageInfo msgInfo5 = new MessageInfo(key5, messageFormatStream5.getSize(), accountId5, containerId5, prop5.getCreationTimeInMs());
// create message stream for blob 6
StoreKey key6 = new MockId("id6");
short accountId6 = Utils.getRandomShort(RANDOM);
short containerId6 = Utils.getRandomShort(RANDOM);
BlobProperties prop6 = new BlobProperties(10, "servid6", accountId6, containerId6, false);
byte[] encryptionKey6 = new byte[100];
RANDOM.nextBytes(encryptionKey6);
byte[] usermetadata6 = new byte[1000];
RANDOM.nextBytes(usermetadata6);
blobContentSize = 2000;
byte[] data6 = new byte[blobContentSize];
RANDOM.nextBytes(data6);
if (blobVersion == Blob_Version_V2 && blobType == BlobType.MetadataBlob) {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data2 = byteBufferBlob.array();
blobContentSize = data6.length;
}
ByteBufferInputStream stream6 = new ByteBufferInputStream(ByteBuffer.wrap(data2));
MessageFormatInputStream messageFormatStream6 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key6, ByteBuffer.wrap(encryptionKey6), prop6, ByteBuffer.wrap(usermetadata6), stream6, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key6, prop6, ByteBuffer.wrap(usermetadata6), stream6, blobContentSize, blobType);
MessageInfo msgInfo6 = new MessageInfo(key6, messageFormatStream6.getSize(), accountId6, containerId6, prop6.getCreationTimeInMs());
// corrupt the message stream but make sure this header version is not valid
byte[] corruptMessageStream6 = new byte[(int) messageFormatStream6.getSize()];
RANDOM.nextBytes(corruptMessageStream6);
corruptMessageStream6[1] = (byte) 100;
InputStream corruptStream6 = new ByteBufferInputStream(ByteBuffer.wrap(corruptMessageStream6));
// create input stream for all blob messages together
byte[] totalMessageStreamContent = new byte[(int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize() + (int) messageFormatStream3.getSize() + (int) messageFormatStream4.getSize() + (int) messageFormatStream5.getSize() + (int) messageFormatStream6.getSize()];
messageFormatStream1.read(totalMessageStreamContent, 0, (int) messageFormatStream1.getSize());
corruptStream2.read(totalMessageStreamContent, (int) messageFormatStream1.getSize(), (int) messageFormatStream2.getSize());
messageFormatStream3.read(totalMessageStreamContent, (int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize(), (int) messageFormatStream3.getSize());
messageFormatStream4.read(totalMessageStreamContent, (int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize() + (int) messageFormatStream3.getSize(), (int) messageFormatStream4.getSize());
messageFormatStream5.read(totalMessageStreamContent, (int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize() + (int) messageFormatStream3.getSize() + (int) messageFormatStream4.getSize(), (int) messageFormatStream5.getSize());
corruptStream6.read(totalMessageStreamContent, (int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize() + (int) messageFormatStream3.getSize() + (int) messageFormatStream4.getSize() + (int) messageFormatStream5.getSize(), (int) messageFormatStream6.getSize());
InputStream inputStream = new ByteBufferInputStream(ByteBuffer.wrap(totalMessageStreamContent));
List<MessageInfo> msgInfoList = new ArrayList<>();
msgInfoList.add(msgInfo1);
msgInfoList.add(msgInfo2);
msgInfoList.add(msgInfo3);
msgInfoList.add(msgInfo4);
msgInfoList.add(msgInfo5);
msgInfoList.add(msgInfo6);
MessageSievingInputStream sievedStream = new MessageSievingInputStream(inputStream, msgInfoList, transformers, new MetricRegistry());
Map<StoreKey, StoreKey> convertedMap = randomKeyConverter.convert(Arrays.asList(key1, key2, key3, key4, key5, key6));
int headerSize = getHeaderSizeForVersion(headerVersionToUse);
int blobPropertiesRecordSize = BlobProperties_Format_V1.getBlobPropertiesRecordSize(prop1);
int userMetadataSize = UserMetadata_Format_V1.getUserMetadataSize(ByteBuffer.wrap(usermetadata1));
int totalHeadSize = 2 * headerSize;
int totalBlobPropertiesSize = 2 * blobPropertiesRecordSize;
int totalUserMetadataSize = 2 * userMetadataSize;
int totalBlobSize = 2 * (int) blobSize;
int totalKeySize = options.contains(TransformerOptions.KeyConvert) ? convertedMap.get(key1).sizeInBytes() + convertedMap.get(key5).sizeInBytes() : key1.sizeInBytes() + key5.sizeInBytes();
int totalEncryptionRecordSize = blobVersion > Blob_Version_V1 ? BlobEncryptionKey_Format_V1.getBlobEncryptionKeyRecordSize(ByteBuffer.wrap(encryptionKey1)) + BlobEncryptionKey_Format_V1.getBlobEncryptionKeyRecordSize(ByteBuffer.wrap(encryptionKey5)) : 0;
if (!options.isEmpty()) {
Assert.assertTrue(sievedStream.hasInvalidMessages());
Assert.assertEquals((int) sievedStream.getValidMessageInfoList().stream().mapToLong(MessageInfo::getSize).sum(), sievedStream.getSize());
Assert.assertEquals(totalHeadSize + totalBlobPropertiesSize + totalUserMetadataSize + totalBlobSize + totalKeySize + totalEncryptionRecordSize, sievedStream.getSize());
verifySievedTransformedMessage(sievedStream, options.contains(TransformerOptions.KeyConvert) ? convertedMap.get(key1) : key1, "servid1", accountId1, containerId1, blobVersion > Blob_Version_V1 ? encryptionKey1 : null, usermetadata1, data1, blobVersion, blobType);
verifySievedTransformedMessage(sievedStream, options.contains(TransformerOptions.KeyConvert) ? convertedMap.get(key5) : key5, "servid5", accountId5, containerId5, blobVersion > Blob_Version_V1 ? encryptionKey5 : null, usermetadata5, data5, blobVersion, blobType);
} else {
// even if there are no transformers, deleted and expired messages should be dropped by the MessageSievingInputStream.
byte[] expectedBytes = new byte[(int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize() + (int) messageFormatStream5.getSize() + (int) messageFormatStream6.getSize()];
System.arraycopy(totalMessageStreamContent, 0, expectedBytes, 0, (int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize());
System.arraycopy(totalMessageStreamContent, totalMessageStreamContent.length - (int) messageFormatStream5.getSize() - (int) messageFormatStream6.getSize(), expectedBytes, (int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize(), (int) messageFormatStream5.getSize() + (int) messageFormatStream6.getSize());
Assert.assertEquals(expectedBytes.length, sievedStream.getSize());
byte[] sievedBytes = Utils.readBytesFromStream(sievedStream, sievedStream.getSize());
Assert.assertArrayEquals(expectedBytes, sievedBytes);
}
Assert.assertEquals(-1, sievedStream.read());
}
use of com.github.ambry.store.MessageInfo in project ambry by linkedin.
the class ValidatingKeyConvertingTransformer method testDeletedRecords.
private void testDeletedRecords(short blobVersion, BlobType blobType) throws Exception {
// MessageSievingInputStream contains put records for 2 valid blobs and 1 delete record
// id1(put record for valid blob), id2(delete record) and id3(put record for valid blob)
ArrayList<Short> versions = new ArrayList<>();
versions.add(Message_Header_Version_V1);
if (blobVersion != Blob_Version_V1) {
versions.add(Message_Header_Version_V2);
versions.add(Message_Header_Version_V3);
}
try {
for (short version : versions) {
headerVersionToUse = version;
// create message stream for blob 1
StoreKey key1 = new MockId("id1");
short accountId = Utils.getRandomShort(RANDOM);
short containerId = Utils.getRandomShort(RANDOM);
BlobProperties prop1 = new BlobProperties(10, "servid1", accountId, containerId, false);
byte[] encryptionKey1 = new byte[100];
RANDOM.nextBytes(encryptionKey1);
byte[] usermetadata1 = new byte[1000];
RANDOM.nextBytes(usermetadata1);
int blobContentSize = 2000;
byte[] data1 = new byte[blobContentSize];
RANDOM.nextBytes(data1);
if (blobVersion == Blob_Version_V2 && blobType == BlobType.MetadataBlob) {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data1 = byteBufferBlob.array();
blobContentSize = data1.length;
}
ByteBufferInputStream stream1 = new ByteBufferInputStream(ByteBuffer.wrap(data1));
MessageFormatInputStream messageFormatStream1 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key1, ByteBuffer.wrap(encryptionKey1), prop1, ByteBuffer.wrap(usermetadata1), stream1, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key1, prop1, ByteBuffer.wrap(usermetadata1), stream1, blobContentSize, blobType);
MessageInfo msgInfo1 = new MessageInfo(key1, messageFormatStream1.getSize(), accountId, containerId, prop1.getCreationTimeInMs());
// create message stream for blob 2 and mark it as deleted
StoreKey key2 = new MockId("id2");
accountId = Utils.getRandomShort(RANDOM);
containerId = Utils.getRandomShort(RANDOM);
long deletionTimeMs = SystemTime.getInstance().milliseconds() + RANDOM.nextInt();
MessageFormatInputStream messageFormatStream2 = new DeleteMessageFormatInputStream(key2, accountId, containerId, deletionTimeMs);
MessageInfo msgInfo2 = new MessageInfo(key2, messageFormatStream2.getSize(), accountId, containerId, deletionTimeMs);
// create message stream for blob 3
StoreKey key3 = new MockId("id3");
accountId = Utils.getRandomShort(RANDOM);
containerId = Utils.getRandomShort(RANDOM);
BlobProperties prop3 = new BlobProperties(10, "servid3", accountId, containerId, false);
byte[] encryptionKey3 = new byte[100];
RANDOM.nextBytes(encryptionKey3);
byte[] usermetadata3 = new byte[1000];
RANDOM.nextBytes(usermetadata3);
blobContentSize = 2000;
byte[] data3 = new byte[blobContentSize];
RANDOM.nextBytes(data3);
if (blobVersion == Blob_Version_V2 && blobType == BlobType.MetadataBlob) {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data3 = byteBufferBlob.array();
blobContentSize = data3.length;
}
ByteBufferInputStream stream3 = new ByteBufferInputStream(ByteBuffer.wrap(data3));
MessageFormatInputStream messageFormatStream3 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key3, ByteBuffer.wrap(encryptionKey3), prop3, ByteBuffer.wrap(usermetadata3), stream3, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key3, prop3, ByteBuffer.wrap(usermetadata3), stream3, blobContentSize, blobType);
MessageInfo msgInfo3 = new MessageInfo(key3, messageFormatStream3.getSize(), accountId, containerId, prop3.getCreationTimeInMs());
// create input stream for all blob messages together
byte[] totalMessageContent = new byte[(int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize() + (int) messageFormatStream3.getSize()];
messageFormatStream1.read(totalMessageContent, 0, (int) messageFormatStream1.getSize());
messageFormatStream2.read(totalMessageContent, (int) messageFormatStream1.getSize(), (int) messageFormatStream2.getSize());
messageFormatStream3.read(totalMessageContent, (int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize(), (int) messageFormatStream3.getSize());
InputStream inputStream = new ByteBufferInputStream(ByteBuffer.wrap(totalMessageContent));
List<MessageInfo> msgInfoList = new ArrayList<MessageInfo>();
msgInfoList.add(msgInfo1);
msgInfoList.add(msgInfo2);
msgInfoList.add(msgInfo3);
new MessageSievingInputStream(inputStream, msgInfoList, transformers, new MetricRegistry());
if (!options.isEmpty()) {
Assert.fail("IOException should have been thrown due to delete record ");
}
}
} catch (IOException e) {
if (options.isEmpty()) {
Assert.fail("No exceptions should have occurred");
}
}
headerVersionToUse = Message_Header_Version_V1;
}
use of com.github.ambry.store.MessageInfo in project ambry by linkedin.
the class ValidatingKeyConvertingTransformer method testDeprecatedMsg.
private void testDeprecatedMsg(short blobVersion, BlobType blobType, short headerVersionToUse) throws Exception {
MessageFormatRecord.headerVersionToUse = headerVersionToUse;
// MessageSievingInputStream contains put records for 2 valid blobs and 1 deprecated blob
// id1(put record for valid blob), id2(deprecated) and id3(put record for valid blob)
// create message stream for blob 1
StoreKey key1 = new MockId("id1");
short accountId1 = Utils.getRandomShort(RANDOM);
short containerId1 = Utils.getRandomShort(RANDOM);
BlobProperties prop1 = new BlobProperties(10, "servid1", accountId1, containerId1, false);
byte[] encryptionKey1 = new byte[100];
RANDOM.nextBytes(encryptionKey1);
byte[] usermetadata1 = new byte[1000];
RANDOM.nextBytes(usermetadata1);
int blobContentSize = 2000;
byte[] data1 = new byte[blobContentSize];
RANDOM.nextBytes(data1);
long blobSize = -1;
if (blobType == BlobType.DataBlob) {
blobSize = (int) Blob_Format_V2.getBlobRecordSize(blobContentSize);
} else {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data1 = byteBufferBlob.array();
blobContentSize = data1.length;
blobSize = (int) Blob_Format_V2.getBlobRecordSize(blobContentSize);
}
ByteBufferInputStream stream1 = new ByteBufferInputStream(ByteBuffer.wrap(data1));
MessageFormatInputStream messageFormatStream1 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key1, ByteBuffer.wrap(encryptionKey1), prop1, ByteBuffer.wrap(usermetadata1), stream1, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key1, prop1, ByteBuffer.wrap(usermetadata1), stream1, blobContentSize, blobType);
MessageInfo msgInfo1 = new MessageInfo(key1, messageFormatStream1.getSize(), accountId1, containerId1, prop1.getCreationTimeInMs());
// create message stream for blob 2
StoreKey key2 = new MockId("id2");
short accountId2 = Utils.getRandomShort(RANDOM);
short containerId2 = Utils.getRandomShort(RANDOM);
BlobProperties prop2 = new BlobProperties(10, "servid2", accountId2, containerId2, false);
byte[] encryptionKey2 = new byte[100];
RANDOM.nextBytes(encryptionKey2);
byte[] usermetadata2 = new byte[1000];
RANDOM.nextBytes(usermetadata2);
blobContentSize = 2000;
byte[] data2 = new byte[blobContentSize];
RANDOM.nextBytes(data2);
if (blobVersion == Blob_Version_V2 && blobType == BlobType.MetadataBlob) {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data2 = byteBufferBlob.array();
blobContentSize = data2.length;
}
ByteBufferInputStream stream2 = new ByteBufferInputStream(ByteBuffer.wrap(data2));
MessageFormatInputStream messageFormatStream2 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key2, ByteBuffer.wrap(encryptionKey2), prop2, ByteBuffer.wrap(usermetadata2), stream2, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key2, prop2, ByteBuffer.wrap(usermetadata2), stream2, blobContentSize, blobType);
MessageInfo msgInfo2 = new MessageInfo(key2, messageFormatStream2.getSize(), accountId2, containerId2, prop2.getCreationTimeInMs());
// Add the key for the second message to the discardable ones.
randomKeyConverter.addInvalids(Collections.singletonList(key2));
// create message stream for blob 3
StoreKey key3 = new MockId("id3");
short accountId3 = Utils.getRandomShort(RANDOM);
short containerId3 = Utils.getRandomShort(RANDOM);
BlobProperties prop3 = new BlobProperties(10, "servid3", accountId3, containerId3, false);
byte[] encryptionKey3 = new byte[100];
RANDOM.nextBytes(encryptionKey3);
byte[] usermetadata3 = new byte[1000];
RANDOM.nextBytes(usermetadata3);
blobContentSize = 2000;
byte[] data3 = new byte[blobContentSize];
RANDOM.nextBytes(data3);
if (blobVersion == Blob_Version_V2 && blobType == BlobType.MetadataBlob) {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data3 = byteBufferBlob.array();
blobContentSize = data3.length;
}
ByteBufferInputStream stream3 = new ByteBufferInputStream(ByteBuffer.wrap(data3));
MessageFormatInputStream messageFormatStream3 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key3, ByteBuffer.wrap(encryptionKey3), prop3, ByteBuffer.wrap(usermetadata3), stream3, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key3, prop3, ByteBuffer.wrap(usermetadata3), stream3, blobContentSize, blobType);
MessageInfo msgInfo3 = new MessageInfo(key3, messageFormatStream3.getSize(), accountId3, containerId3, prop3.getCreationTimeInMs());
// create input stream for all blob messages together
byte[] totalMessageStreamContent = new byte[(int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize() + (int) messageFormatStream3.getSize()];
messageFormatStream1.read(totalMessageStreamContent, 0, (int) messageFormatStream1.getSize());
messageFormatStream2.read(totalMessageStreamContent, (int) messageFormatStream1.getSize(), (int) messageFormatStream2.getSize());
messageFormatStream3.read(totalMessageStreamContent, (int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize(), (int) messageFormatStream3.getSize());
InputStream inputStream = new ByteBufferInputStream(ByteBuffer.wrap(totalMessageStreamContent));
List<MessageInfo> msgInfoList = new ArrayList<>();
msgInfoList.add(msgInfo1);
msgInfoList.add(msgInfo2);
msgInfoList.add(msgInfo3);
MessageSievingInputStream sievedStream = new MessageSievingInputStream(inputStream, msgInfoList, transformers, new MetricRegistry());
Map<StoreKey, StoreKey> convertedMap = randomKeyConverter.convert(Arrays.asList(key1, key2, key3));
int headerSize = MessageFormatRecord.getHeaderSizeForVersion(headerVersionToUse);
int blobPropertiesRecordSize = BlobProperties_Format_V1.getBlobPropertiesRecordSize(prop1);
int userMetadataSize = UserMetadata_Format_V1.getUserMetadataSize(ByteBuffer.wrap(usermetadata1));
int totalHeadSize = (options.contains(TransformerOptions.KeyConvert) ? 2 : 3) * headerSize;
int totalBlobPropertiesSize = (options.contains(TransformerOptions.KeyConvert) ? 2 : 3) * blobPropertiesRecordSize;
int totalUserMetadataSize = (options.contains(TransformerOptions.KeyConvert) ? 2 : 3) * userMetadataSize;
int totalBlobSize = (options.contains(TransformerOptions.KeyConvert) ? 2 : 3) * (int) blobSize;
int totalKeySize = options.contains(TransformerOptions.KeyConvert) ? convertedMap.get(key1).sizeInBytes() + convertedMap.get(key3).sizeInBytes() : key1.sizeInBytes() + key2.sizeInBytes() + key3.sizeInBytes();
int totalEncryptionRecordSize = blobVersion > Blob_Version_V1 ? BlobEncryptionKey_Format_V1.getBlobEncryptionKeyRecordSize(ByteBuffer.wrap(encryptionKey1)) + (options.contains(TransformerOptions.KeyConvert) ? 0 : BlobEncryptionKey_Format_V1.getBlobEncryptionKeyRecordSize(ByteBuffer.wrap(encryptionKey2))) + BlobEncryptionKey_Format_V1.getBlobEncryptionKeyRecordSize(ByteBuffer.wrap(encryptionKey3)) : 0;
if (!options.isEmpty()) {
if (options.contains(TransformerOptions.KeyConvert)) {
Assert.assertTrue(sievedStream.hasDeprecatedMessages());
}
Assert.assertEquals((int) sievedStream.getValidMessageInfoList().stream().mapToLong(MessageInfo::getSize).sum(), sievedStream.getSize());
Assert.assertEquals(options.isEmpty() ? totalMessageStreamContent.length : totalHeadSize + totalBlobPropertiesSize + totalUserMetadataSize + totalBlobSize + totalKeySize + totalEncryptionRecordSize, sievedStream.getSize());
verifySievedTransformedMessage(sievedStream, options.contains(TransformerOptions.KeyConvert) ? convertedMap.get(key1) : key1, "servid1", accountId1, containerId1, blobVersion > Blob_Version_V1 ? encryptionKey1 : null, usermetadata1, data1, blobVersion, blobType);
if (!options.contains(TransformerOptions.KeyConvert)) {
verifySievedTransformedMessage(sievedStream, key2, "servid2", accountId2, containerId2, blobVersion > Blob_Version_V1 ? encryptionKey2 : null, usermetadata2, data2, blobVersion, blobType);
}
verifySievedTransformedMessage(sievedStream, options.contains(TransformerOptions.KeyConvert) ? convertedMap.get(key3) : key3, "servid3", accountId3, containerId3, blobVersion > Blob_Version_V1 ? encryptionKey3 : null, usermetadata3, data3, blobVersion, blobType);
} else {
Assert.assertEquals(totalMessageStreamContent.length, sievedStream.getSize());
byte[] sievedBytes = Utils.readBytesFromStream(sievedStream, sievedStream.getSize());
Assert.assertArrayEquals(totalMessageStreamContent, sievedBytes);
}
Assert.assertEquals(-1, sievedStream.read());
}
use of com.github.ambry.store.MessageInfo in project ambry by linkedin.
the class BlobStoreRecoveryTest method recoveryTest.
@Test
public void recoveryTest() throws MessageFormatException, IOException {
MessageStoreRecovery recovery = new BlobStoreRecovery();
// create log and write to it
ReadImp readrecovery = new ReadImp();
readrecovery.initialize();
List<MessageInfo> recoveredMessages = recovery.recover(readrecovery, 0, readrecovery.getSize(), new MockIdFactory());
Assert.assertEquals(recoveredMessages.size(), 6);
verifyInfo(recoveredMessages.get(0), readrecovery.keys[0], readrecovery.sizes.get(0), readrecovery.expectedExpirationTimeMs, false, false);
verifyInfo(recoveredMessages.get(1), readrecovery.keys[1], readrecovery.sizes.get(1), Utils.Infinite_Time, false, false);
verifyInfo(recoveredMessages.get(2), readrecovery.keys[2], readrecovery.sizes.get(2), Utils.Infinite_Time, false, false);
verifyInfo(recoveredMessages.get(4), readrecovery.keys[1], readrecovery.sizes.get(4), Utils.Infinite_Time, true, false);
if (MessageFormatRecord.headerVersionToUse >= MessageFormatRecord.Message_Header_Version_V2) {
verifyInfo(recoveredMessages.get(3), readrecovery.keys[1], readrecovery.sizes.get(3), Utils.Infinite_Time, false, true);
verifyInfo(recoveredMessages.get(5), readrecovery.keys[0], readrecovery.sizes.get(5), Utils.Infinite_Time, false, true);
} else {
verifyInfo(recoveredMessages.get(3), readrecovery.keys[3], readrecovery.sizes.get(3), Utils.Infinite_Time, false, false);
verifyInfo(recoveredMessages.get(5), readrecovery.keys[4], readrecovery.sizes.get(5), Utils.Infinite_Time, false, false);
}
}
use of com.github.ambry.store.MessageInfo in project ambry by linkedin.
the class MessageSievingInputStream method validateAndTransform.
/**
* Validates and potentially transforms the given input stream consisting of message data. It does so using the list
* of {@link Transformer}s associated with this instance.
* message corruption and acceptable formats.
* @param inMsg the original {@link Message} that needs to be validated and possibly transformed.
* @param msgStreamList the output list to which the sieved stream output are to be added to.
* @param msgOffset the offset of the message in the stream.
* @throws IOException if an exception was encountered reading or writing bytes to/from streams.
*/
private void validateAndTransform(Message inMsg, List<InputStream> msgStreamList, int msgOffset) throws IOException {
if (transformers == null || transformers.isEmpty()) {
// Write the message without any transformations.
sievedMessageInfoList.add(inMsg.getMessageInfo());
msgStreamList.add(inMsg.getStream());
} else {
long sieveStartTime = SystemTime.getInstance().milliseconds();
Message msg = inMsg;
TransformationOutput output = null;
for (Transformer transformer : transformers) {
output = transformer.transform(msg);
if (output.getException() != null || output.getMsg() == null) {
break;
} else {
msg = output.getMsg();
}
}
if (output.getException() != null) {
if (output.getException() instanceof MessageFormatException) {
logger.error("Error validating/transforming the message at {} with messageInfo {} and hence skipping the message", msgOffset, inMsg.getMessageInfo(), output.getException());
hasInvalidMessages = true;
messageSievingCorruptMessagesDiscardedCount.inc();
} else {
throw new IOException("Encountered exception during transformation", output.getException());
}
} else if (output.getMsg() == null) {
logger.trace("Transformation is on, and the message with id {} does not have a replacement and was discarded.", inMsg.getMessageInfo().getStoreKey());
hasDeprecatedMessages = true;
messageSievingDeprecatedMessagesDiscardedCount.inc();
} else {
MessageInfo tfmMsgInfo = output.getMsg().getMessageInfo();
sievedMessageInfoList.add(tfmMsgInfo);
msgStreamList.add(output.getMsg().getStream());
logger.trace("Original message length {}, transformed bytes read {}", inMsg.getMessageInfo().getSize(), tfmMsgInfo.getSize());
}
singleMessageSieveTime.update(SystemTime.getInstance().milliseconds() - sieveStartTime);
}
}
Aggregations