use of com.github.ambry.store.StoreKey in project ambry by linkedin.
the class MessageSievingInputStream method checkForMessageValidity.
/**
* Ensures blob validity in the given input stream. For now, blobs are checked for message corruption
* @param byteArrayInputStream stream against which validation has to be done
* @param size total size of the message expected
* @param currentOffset Current offset at which the data has to be read from the given byte array
* @param storeKeyFactory StoreKeyFactory used to get store key
* @return true if message is valid and false otherwise
* @throws IOException
*/
private boolean checkForMessageValidity(ByteArrayInputStream byteArrayInputStream, int currentOffset, long size, StoreKeyFactory storeKeyFactory, MessageInfo msgInfo) throws IOException {
boolean isValid = false;
ByteBuffer encryptionKey;
BlobProperties props;
ByteBuffer metadata;
BlobData blobData;
long startTime = SystemTime.getInstance().milliseconds();
try {
int availableBeforeParsing = byteArrayInputStream.available();
byte[] headerVersionInBytes = new byte[Version_Field_Size_In_Bytes];
byteArrayInputStream.read(headerVersionInBytes, 0, Version_Field_Size_In_Bytes);
ByteBuffer headerVersion = ByteBuffer.wrap(headerVersionInBytes);
short version = headerVersion.getShort();
if (!isValidHeaderVersion(version)) {
throw new MessageFormatException("Header version not supported " + version, MessageFormatErrorCodes.Data_Corrupt);
}
ByteBuffer headerBuffer = ByteBuffer.allocate(getHeaderSizeForVersion(version));
headerBuffer.putShort(version);
byteArrayInputStream.read(headerBuffer.array(), 2, headerBuffer.capacity() - 2);
headerBuffer.position(headerBuffer.capacity());
headerBuffer.flip();
MessageHeader_Format header = getMessageHeader(version, headerBuffer);
StoreKey storeKey = storeKeyFactory.getStoreKey(new DataInputStream(byteArrayInputStream));
if (header.isPutRecord()) {
encryptionKey = header.hasEncryptionKeyRecord() ? deserializeBlobEncryptionKey(byteArrayInputStream) : null;
props = deserializeBlobProperties(byteArrayInputStream);
metadata = deserializeUserMetadata(byteArrayInputStream);
blobData = deserializeBlob(byteArrayInputStream);
} else {
throw new IllegalStateException("Message cannot be a deleted record ");
}
if (byteArrayInputStream.available() != 0) {
logger.error("Parsed message size {} is not equivalent to the size in message info {}", (availableBeforeParsing + byteArrayInputStream.available()), availableBeforeParsing);
} else {
logger.trace("Message Successfully read");
logger.trace("Header - version {} Message Size {} Starting offset of the blob {} BlobEncryptionKeyRecord {} BlobPropertiesRelativeOffset {}" + " UserMetadataRelativeOffset {} DataRelativeOffset {} DeleteRecordRelativeOffset {} Crc {}", header.getVersion(), header.getMessageSize(), currentOffset, header.getBlobEncryptionKeyRecordRelativeOffset(), header.getBlobPropertiesRecordRelativeOffset(), header.getUserMetadataRecordRelativeOffset(), header.getBlobRecordRelativeOffset(), header.getDeleteRecordRelativeOffset(), header.getCrc());
logger.trace("Id {} Encryption Key -size {} Blob Properties - blobSize {} Metadata - size {} Blob - size {} ", storeKey.getID(), encryptionKey == null ? 0 : encryptionKey.capacity(), props.getBlobSize(), metadata.capacity(), blobData.getSize());
if (msgInfo.getStoreKey().equals(storeKey)) {
isValid = true;
} else {
logger.error("StoreKey in log " + storeKey + " failed to match store key from Index " + msgInfo.getStoreKey());
}
}
} catch (MessageFormatException e) {
logger.error("MessageFormat exception thrown for a blob starting at offset " + currentOffset + " with exception: ", e);
} finally {
messageFormatValidationTime.update(SystemTime.getInstance().milliseconds() - startTime);
}
return isValid;
}
use of com.github.ambry.store.StoreKey in project ambry by linkedin.
the class MessageFormatInputStreamTest method messageFormatRecordsTest.
private void messageFormatRecordsTest(short blobVersion, BlobType blobType, boolean useV2Header) throws IOException, MessageFormatException {
StoreKey key = new MockId("id1");
StoreKeyFactory keyFactory = new MockIdFactory();
short accountId = Utils.getRandomShort(TestUtils.RANDOM);
short containerId = Utils.getRandomShort(TestUtils.RANDOM);
BlobProperties prop = new BlobProperties(10, "servid", accountId, containerId, false);
byte[] encryptionKey = new byte[100];
new Random().nextBytes(encryptionKey);
byte[] usermetadata = new byte[1000];
new Random().nextBytes(usermetadata);
int blobContentSize = 2000;
byte[] data = new byte[blobContentSize];
new Random().nextBytes(data);
long blobSize = -1;
MessageFormatRecord.headerVersionToUse = useV2Header ? MessageFormatRecord.Message_Header_Version_V2 : MessageFormatRecord.Message_Header_Version_V1;
if (blobVersion == MessageFormatRecord.Blob_Version_V1) {
blobSize = MessageFormatRecord.Blob_Format_V1.getBlobRecordSize(blobContentSize);
} else if (blobVersion == MessageFormatRecord.Blob_Version_V2 && blobType == BlobType.DataBlob) {
blobSize = (int) MessageFormatRecord.Blob_Format_V2.getBlobRecordSize(blobContentSize);
} else if (blobVersion == MessageFormatRecord.Blob_Version_V2 && blobType == BlobType.MetadataBlob) {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data = byteBufferBlob.array();
blobContentSize = data.length;
blobSize = (int) MessageFormatRecord.Blob_Format_V2.getBlobRecordSize(blobContentSize);
}
ByteBufferInputStream stream = new ByteBufferInputStream(ByteBuffer.wrap(data));
MessageFormatInputStream messageFormatStream = (blobVersion == MessageFormatRecord.Blob_Version_V2) ? new PutMessageFormatInputStream(key, ByteBuffer.wrap(encryptionKey), prop, ByteBuffer.wrap(usermetadata), stream, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key, prop, ByteBuffer.wrap(usermetadata), stream, blobContentSize, blobType);
int headerSize = MessageFormatRecord.getHeaderSizeForVersion(useV2Header ? MessageFormatRecord.Message_Header_Version_V2 : MessageFormatRecord.Message_Header_Version_V1);
int blobEncryptionKeySize = useV2Header ? MessageFormatRecord.BlobEncryptionKey_Format_V1.getBlobEncryptionKeyRecordSize(ByteBuffer.wrap(encryptionKey)) : 0;
int blobPropertiesRecordSize = MessageFormatRecord.BlobProperties_Format_V1.getBlobPropertiesRecordSize(prop);
int userMetadataSize = MessageFormatRecord.UserMetadata_Format_V1.getUserMetadataSize(ByteBuffer.wrap(usermetadata));
Assert.assertEquals(messageFormatStream.getSize(), headerSize + blobEncryptionKeySize + blobPropertiesRecordSize + userMetadataSize + blobSize + key.sizeInBytes());
// verify header
byte[] headerOutput = new byte[headerSize];
messageFormatStream.read(headerOutput);
ByteBuffer headerBuf = ByteBuffer.wrap(headerOutput);
Assert.assertEquals(useV2Header ? MessageFormatRecord.Message_Header_Version_V2 : MessageFormatRecord.Message_Header_Version_V1, headerBuf.getShort());
Assert.assertEquals(blobEncryptionKeySize + blobPropertiesRecordSize + userMetadataSize + blobSize, headerBuf.getLong());
if (useV2Header) {
Assert.assertEquals(headerSize + key.sizeInBytes(), headerBuf.getInt());
Assert.assertEquals(headerSize + key.sizeInBytes() + blobEncryptionKeySize, headerBuf.getInt());
Assert.assertEquals(MessageFormatRecord.Message_Header_Invalid_Relative_Offset, headerBuf.getInt());
Assert.assertEquals(headerSize + key.sizeInBytes() + blobEncryptionKeySize + blobPropertiesRecordSize, headerBuf.getInt());
Assert.assertEquals(headerSize + key.sizeInBytes() + blobEncryptionKeySize + blobPropertiesRecordSize + userMetadataSize, headerBuf.getInt());
} else {
Assert.assertEquals(headerSize + key.sizeInBytes(), headerBuf.getInt());
Assert.assertEquals(MessageFormatRecord.Message_Header_Invalid_Relative_Offset, headerBuf.getInt());
Assert.assertEquals(headerSize + key.sizeInBytes() + blobPropertiesRecordSize, headerBuf.getInt());
Assert.assertEquals(headerSize + key.sizeInBytes() + blobPropertiesRecordSize + userMetadataSize, headerBuf.getInt());
}
Crc32 crc = new Crc32();
crc.update(headerOutput, 0, headerSize - MessageFormatRecord.Crc_Size);
Assert.assertEquals(crc.getValue(), headerBuf.getLong());
// verify handle
byte[] handleOutput = new byte[key.sizeInBytes()];
ByteBuffer handleOutputBuf = ByteBuffer.wrap(handleOutput);
messageFormatStream.read(handleOutput);
byte[] dest = new byte[key.sizeInBytes()];
handleOutputBuf.get(dest);
Assert.assertArrayEquals(dest, key.toBytes());
// verify encryption key
if (useV2Header) {
byte[] blobEncryptionKeyOutput = new byte[blobEncryptionKeySize];
ByteBuffer blobEncryptionKeyBuf = ByteBuffer.wrap(blobEncryptionKeyOutput);
messageFormatStream.read(blobEncryptionKeyOutput);
Assert.assertEquals(blobEncryptionKeyBuf.getShort(), MessageFormatRecord.Blob_Encryption_Key_V1);
Assert.assertEquals(blobEncryptionKeyBuf.getInt(), 100);
dest = new byte[100];
blobEncryptionKeyBuf.get(dest);
Assert.assertArrayEquals(dest, encryptionKey);
crc = new Crc32();
crc.update(blobEncryptionKeyOutput, 0, blobEncryptionKeySize - MessageFormatRecord.Crc_Size);
Assert.assertEquals(crc.getValue(), blobEncryptionKeyBuf.getLong());
}
// verify blob properties
byte[] blobPropertiesOutput = new byte[blobPropertiesRecordSize];
ByteBuffer blobPropertiesBuf = ByteBuffer.wrap(blobPropertiesOutput);
messageFormatStream.read(blobPropertiesOutput);
Assert.assertEquals(blobPropertiesBuf.getShort(), 1);
BlobProperties propOutput = BlobPropertiesSerDe.getBlobPropertiesFromStream(new DataInputStream(new ByteBufferInputStream(blobPropertiesBuf)));
Assert.assertEquals(10, propOutput.getBlobSize());
Assert.assertEquals("servid", propOutput.getServiceId());
Assert.assertEquals("AccountId mismatch", accountId, propOutput.getAccountId());
Assert.assertEquals("ContainerId mismatch", containerId, propOutput.getContainerId());
crc = new Crc32();
crc.update(blobPropertiesOutput, 0, blobPropertiesRecordSize - MessageFormatRecord.Crc_Size);
Assert.assertEquals(crc.getValue(), blobPropertiesBuf.getLong());
// verify user metadata
byte[] userMetadataOutput = new byte[userMetadataSize];
ByteBuffer userMetadataBuf = ByteBuffer.wrap(userMetadataOutput);
messageFormatStream.read(userMetadataOutput);
Assert.assertEquals(userMetadataBuf.getShort(), 1);
Assert.assertEquals(userMetadataBuf.getInt(), 1000);
dest = new byte[1000];
userMetadataBuf.get(dest);
Assert.assertArrayEquals(dest, usermetadata);
crc = new Crc32();
crc.update(userMetadataOutput, 0, userMetadataSize - MessageFormatRecord.Crc_Size);
Assert.assertEquals(crc.getValue(), userMetadataBuf.getLong());
// verify blob
CrcInputStream crcstream = new CrcInputStream(messageFormatStream);
DataInputStream streamData = new DataInputStream(crcstream);
Assert.assertEquals(streamData.readShort(), blobVersion);
if (blobVersion == MessageFormatRecord.Blob_Version_V2) {
Assert.assertEquals(streamData.readShort(), blobType.ordinal());
}
Assert.assertEquals(streamData.readLong(), blobContentSize);
for (int i = 0; i < blobContentSize; i++) {
Assert.assertEquals((byte) streamData.read(), data[i]);
}
long crcVal = crcstream.getValue();
Assert.assertEquals(crcVal, streamData.readLong());
// Verify Blob All
stream = new ByteBufferInputStream(ByteBuffer.wrap(data));
messageFormatStream = (blobVersion == MessageFormatRecord.Blob_Version_V2) ? new PutMessageFormatInputStream(key, ByteBuffer.wrap(encryptionKey), prop, ByteBuffer.wrap(usermetadata), stream, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key, prop, ByteBuffer.wrap(usermetadata), stream, blobContentSize, blobType);
int totalSize;
if (useV2Header) {
totalSize = headerSize + key.sizeInBytes() + blobEncryptionKeySize + blobPropertiesRecordSize + userMetadataSize + (int) blobSize;
} else {
totalSize = headerSize + key.sizeInBytes() + blobPropertiesRecordSize + userMetadataSize + (int) blobSize;
}
ByteBuffer allBuf = ByteBuffer.allocate(totalSize);
messageFormatStream.read(allBuf.array());
BlobAll blobAll = MessageFormatRecord.deserializeBlobAll(new ByteBufferInputStream(allBuf), keyFactory);
Assert.assertEquals(key, blobAll.getStoreKey());
Assert.assertArrayEquals(usermetadata, blobAll.getBlobInfo().getUserMetadata());
Assert.assertEquals(blobContentSize, blobAll.getBlobData().getSize());
Assert.assertEquals(blobType, blobAll.getBlobData().getBlobType());
if (useV2Header) {
Assert.assertEquals(ByteBuffer.wrap(encryptionKey), blobAll.getBlobEncryptionKey());
} else {
Assert.assertEquals(null, blobAll.getBlobEncryptionKey());
}
Assert.assertEquals(ByteBuffer.wrap(data), blobAll.getBlobData().getStream().getByteBuffer());
}
use of com.github.ambry.store.StoreKey in project ambry by linkedin.
the class MessageSievingInputStreamTest method testValidBlobs.
public void testValidBlobs(short blobVersion, BlobType blobType) throws IOException, MessageFormatException {
// MessageSievingInputStream contains put records for 3 valid blobs
// id1(put record for valid blob), id2(put record for valid blob) and id3(put record for valid blob)
headerVersionToUse = Message_Header_Version_V1;
byte[] encryptionKey = new byte[100];
TestUtils.RANDOM.nextBytes(encryptionKey);
// create message stream for blob 1
StoreKey key1 = new MockId("id1");
short accountId1 = Utils.getRandomShort(TestUtils.RANDOM);
short containerId1 = Utils.getRandomShort(TestUtils.RANDOM);
BlobProperties prop1 = new BlobProperties(10, "servid1", accountId1, containerId1, false);
byte[] usermetadata1 = new byte[1000];
TestUtils.RANDOM.nextBytes(usermetadata1);
int blobContentSize = 2000;
byte[] data1 = new byte[blobContentSize];
TestUtils.RANDOM.nextBytes(data1);
long blobSize = -1;
if (blobVersion == Blob_Version_V1) {
blobSize = Blob_Format_V1.getBlobRecordSize(blobContentSize);
} else if (blobVersion == Blob_Version_V2 && blobType == BlobType.DataBlob) {
blobSize = (int) Blob_Format_V2.getBlobRecordSize(blobContentSize);
} else if (blobVersion == Blob_Version_V2 && blobType == BlobType.MetadataBlob) {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data1 = byteBufferBlob.array();
blobContentSize = data1.length;
blobSize = (int) Blob_Format_V2.getBlobRecordSize(blobContentSize);
}
ByteBufferInputStream stream1 = new ByteBufferInputStream(ByteBuffer.wrap(data1));
MessageFormatInputStream messageFormatStream1 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key1, ByteBuffer.wrap(encryptionKey), prop1, ByteBuffer.wrap(usermetadata1), stream1, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key1, prop1, ByteBuffer.wrap(usermetadata1), stream1, blobContentSize, blobType);
MessageInfo msgInfo1 = new MessageInfo(key1, messageFormatStream1.getSize(), accountId1, containerId1, prop1.getCreationTimeInMs());
// create message stream for blob 2
StoreKey key2 = new MockId("id2");
short accountId2 = Utils.getRandomShort(TestUtils.RANDOM);
short containerId2 = Utils.getRandomShort(TestUtils.RANDOM);
BlobProperties prop2 = new BlobProperties(10, "servid2", accountId2, containerId2, false);
byte[] usermetadata2 = new byte[1000];
TestUtils.RANDOM.nextBytes(usermetadata2);
blobContentSize = 2000;
byte[] data2 = new byte[blobContentSize];
TestUtils.RANDOM.nextBytes(data2);
if (blobVersion == Blob_Version_V2 && blobType == BlobType.MetadataBlob) {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data2 = byteBufferBlob.array();
blobContentSize = data2.length;
}
ByteBufferInputStream stream2 = new ByteBufferInputStream(ByteBuffer.wrap(data2));
MessageFormatInputStream messageFormatStream2 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key2, ByteBuffer.wrap(encryptionKey), prop2, ByteBuffer.wrap(usermetadata2), stream2, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key2, prop2, ByteBuffer.wrap(usermetadata2), stream2, blobContentSize, blobType);
MessageInfo msgInfo2 = new MessageInfo(key2, messageFormatStream2.getSize(), accountId2, containerId2, prop2.getCreationTimeInMs());
// create message stream for blob 3
StoreKey key3 = new MockId("id3");
short accountId3 = Utils.getRandomShort(TestUtils.RANDOM);
short containerId3 = Utils.getRandomShort(TestUtils.RANDOM);
BlobProperties prop3 = new BlobProperties(10, "servid3", accountId3, containerId3, false);
byte[] usermetadata3 = new byte[1000];
TestUtils.RANDOM.nextBytes(usermetadata3);
blobContentSize = 2000;
byte[] data3 = new byte[blobContentSize];
TestUtils.RANDOM.nextBytes(data3);
if (blobVersion == Blob_Version_V2 && blobType == BlobType.MetadataBlob) {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data3 = byteBufferBlob.array();
blobContentSize = data3.length;
}
ByteBufferInputStream stream3 = new ByteBufferInputStream(ByteBuffer.wrap(data3));
MessageFormatInputStream messageFormatStream3 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key3, ByteBuffer.wrap(encryptionKey), prop3, ByteBuffer.wrap(usermetadata3), stream3, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key3, prop3, ByteBuffer.wrap(usermetadata3), stream3, blobContentSize, blobType);
MessageInfo msgInfo3 = new MessageInfo(key3, messageFormatStream3.getSize(), accountId3, containerId3, prop3.getCreationTimeInMs());
MessageInfo msgInfo4 = null;
MessageFormatInputStream messageFormatStream4 = null;
MessageInfo msgInfo5 = null;
MessageFormatInputStream messageFormatStream5 = null;
// create message stream for blob 4. Header version 2, with encryption key.
StoreKey key4 = new MockId("id4");
short accountId4 = Utils.getRandomShort(TestUtils.RANDOM);
short containerId4 = Utils.getRandomShort(TestUtils.RANDOM);
BlobProperties prop4 = new BlobProperties(10, "servid4", accountId4, containerId4, false);
byte[] usermetadata4 = new byte[1000];
TestUtils.RANDOM.nextBytes(usermetadata4);
blobContentSize = 2000;
byte[] data4 = new byte[blobContentSize];
TestUtils.RANDOM.nextBytes(data4);
if (blobType == BlobType.MetadataBlob) {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data4 = byteBufferBlob.array();
blobContentSize = data4.length;
}
if (blobVersion == Blob_Version_V2) {
ByteBufferInputStream stream4 = new ByteBufferInputStream(ByteBuffer.wrap(data4));
headerVersionToUse = Message_Header_Version_V2;
messageFormatStream4 = new PutMessageFormatInputStream(key4, ByteBuffer.wrap(encryptionKey), prop4, ByteBuffer.wrap(usermetadata4), stream4, blobContentSize, blobType);
msgInfo4 = new MessageInfo(key4, messageFormatStream4.getSize(), accountId4, containerId4, prop4.getCreationTimeInMs());
}
// create message stream for blob 5. Header version 2, without encryption key.
StoreKey key5 = new MockId("id5");
short accountId5 = Utils.getRandomShort(TestUtils.RANDOM);
short containerId5 = Utils.getRandomShort(TestUtils.RANDOM);
BlobProperties prop5 = new BlobProperties(10, "servid5", accountId5, containerId5, false);
byte[] usermetadata5 = new byte[1000];
TestUtils.RANDOM.nextBytes(usermetadata5);
blobContentSize = 2000;
byte[] data5 = new byte[blobContentSize];
TestUtils.RANDOM.nextBytes(data5);
if (blobType == BlobType.MetadataBlob) {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data5 = byteBufferBlob.array();
blobContentSize = data5.length;
}
if (blobVersion == Blob_Version_V2) {
ByteBufferInputStream stream5 = new ByteBufferInputStream(ByteBuffer.wrap(data5));
headerVersionToUse = Message_Header_Version_V2;
messageFormatStream5 = new PutMessageFormatInputStream(key5, null, prop5, ByteBuffer.wrap(usermetadata5), stream5, blobContentSize, blobType);
msgInfo5 = new MessageInfo(key5, messageFormatStream5.getSize(), accountId5, containerId5, prop5.getCreationTimeInMs());
}
// create input stream for all blob messages together
byte[] totalMessageStreamContent = new byte[(int) (messageFormatStream1.getSize() + messageFormatStream2.getSize() + messageFormatStream3.getSize() + (blobVersion == Blob_Version_V2 ? messageFormatStream4.getSize() + messageFormatStream5.getSize() : 0))];
messageFormatStream1.read(totalMessageStreamContent, 0, (int) messageFormatStream1.getSize());
messageFormatStream2.read(totalMessageStreamContent, (int) messageFormatStream1.getSize(), (int) messageFormatStream2.getSize());
messageFormatStream3.read(totalMessageStreamContent, (int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize(), (int) messageFormatStream3.getSize());
if (blobVersion == Blob_Version_V2) {
messageFormatStream4.read(totalMessageStreamContent, (int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize() + (int) messageFormatStream3.getSize(), (int) messageFormatStream4.getSize());
messageFormatStream5.read(totalMessageStreamContent, (int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize() + (int) messageFormatStream3.getSize() + (int) messageFormatStream4.getSize(), (int) messageFormatStream5.getSize());
}
InputStream inputStream = new ByteBufferInputStream(ByteBuffer.wrap(totalMessageStreamContent));
List<MessageInfo> msgInfoList = new ArrayList<MessageInfo>();
msgInfoList.add(msgInfo1);
msgInfoList.add(msgInfo2);
msgInfoList.add(msgInfo3);
if (blobVersion == Blob_Version_V2) {
msgInfoList.add(msgInfo4);
msgInfoList.add(msgInfo5);
}
MessageSievingInputStream validMessageDetectionInputStream = new MessageSievingInputStream(inputStream, msgInfoList, new MockIdFactory(), new MetricRegistry());
int headerSizeV1 = MessageHeader_Format_V1.getHeaderSize();
int headerSizeV2 = MessageHeader_Format_V2.getHeaderSize();
int blobPropertiesRecordSize = BlobProperties_Format_V1.getBlobPropertiesRecordSize(prop1);
int userMetadataSize = UserMetadata_Format_V1.getUserMetadataSize(ByteBuffer.wrap(usermetadata1));
int totalHeadSize = 3 * headerSizeV1;
int totalEncryptionRecordSize = 0;
int totalBlobPropertiesSize = 3 * blobPropertiesRecordSize;
int totalUserMetadataSize = 3 * userMetadataSize;
int totalBlobSize = 3 * (int) blobSize;
int totalKeySize = key1.sizeInBytes() + key2.sizeInBytes() + key3.sizeInBytes();
int encryptionRecordSize = BlobEncryptionKey_Format_V1.getBlobEncryptionKeyRecordSize(ByteBuffer.wrap(encryptionKey));
if (blobVersion == Blob_Version_V2) {
totalHeadSize += 2 * headerSizeV2;
totalEncryptionRecordSize += encryptionRecordSize;
totalBlobPropertiesSize += 2 * blobPropertiesRecordSize;
totalUserMetadataSize += 2 * userMetadataSize;
totalBlobSize += 2 * (int) blobSize;
totalKeySize += key4.sizeInBytes() + key5.sizeInBytes();
}
Assert.assertEquals(validMessageDetectionInputStream.getSize(), totalHeadSize + totalEncryptionRecordSize + totalBlobPropertiesSize + totalUserMetadataSize + totalBlobSize + totalKeySize);
Assert.assertEquals(validMessageDetectionInputStream.getSize(), messageFormatStream1.getSize() + messageFormatStream2.getSize() + messageFormatStream3.getSize() + (blobVersion == Blob_Version_V2 ? messageFormatStream4.getSize() + messageFormatStream5.getSize() : 0));
Assert.assertEquals(true, verifyBlob(validMessageDetectionInputStream, Message_Header_Version_V1, headerSizeV1, 0, blobPropertiesRecordSize, userMetadataSize, (int) blobSize, key1, 10, "servid1", accountId1, containerId1, null, usermetadata1, data1, blobVersion, blobType));
Assert.assertEquals(true, verifyBlob(validMessageDetectionInputStream, Message_Header_Version_V1, headerSizeV1, 0, blobPropertiesRecordSize, userMetadataSize, (int) blobSize, key2, 10, "servid2", accountId2, containerId2, null, usermetadata2, data2, blobVersion, blobType));
Assert.assertEquals(true, verifyBlob(validMessageDetectionInputStream, Message_Header_Version_V1, headerSizeV1, 0, blobPropertiesRecordSize, userMetadataSize, (int) blobSize, key3, 10, "servid3", accountId3, containerId3, null, usermetadata3, data3, blobVersion, blobType));
if (blobVersion == Blob_Version_V2) {
Assert.assertEquals(true, verifyBlob(validMessageDetectionInputStream, Message_Header_Version_V2, headerSizeV2, encryptionRecordSize, blobPropertiesRecordSize, userMetadataSize, (int) blobSize, key4, 10, "servid4", accountId4, containerId4, encryptionKey, usermetadata4, data4, blobVersion, blobType));
Assert.assertEquals(true, verifyBlob(validMessageDetectionInputStream, Message_Header_Version_V2, headerSizeV2, 0, blobPropertiesRecordSize, userMetadataSize, (int) blobSize, key5, 10, "servid5", accountId5, containerId5, null, usermetadata5, data5, blobVersion, blobType));
}
}
use of com.github.ambry.store.StoreKey in project ambry by linkedin.
the class MessageSievingInputStreamTest method testInValidBlobs.
private void testInValidBlobs(short blobVersion, BlobType blobType) throws IOException, MessageFormatException {
headerVersionToUse = Message_Header_Version_V1;
// MessageSievingInputStream contains put records for 2 valid blobs and 1 corrupt blob
// id1(put record for valid blob), id2(corrupt) and id3(put record for valid blob)
// create message stream for blob 1
StoreKey key1 = new MockId("id1");
short accountId1 = Utils.getRandomShort(TestUtils.RANDOM);
short containerId1 = Utils.getRandomShort(TestUtils.RANDOM);
BlobProperties prop1 = new BlobProperties(10, "servid1", accountId1, containerId1, false);
byte[] encryptionKey1 = new byte[100];
TestUtils.RANDOM.nextBytes(encryptionKey1);
byte[] usermetadata1 = new byte[1000];
TestUtils.RANDOM.nextBytes(usermetadata1);
int blobContentSize = 2000;
byte[] data1 = new byte[blobContentSize];
TestUtils.RANDOM.nextBytes(data1);
long blobSize = -1;
if (blobVersion == Blob_Version_V1) {
blobSize = Blob_Format_V1.getBlobRecordSize(blobContentSize);
} else if (blobVersion == Blob_Version_V2 && blobType == BlobType.DataBlob) {
blobSize = (int) Blob_Format_V2.getBlobRecordSize(blobContentSize);
} else if (blobVersion == Blob_Version_V2 && blobType == BlobType.MetadataBlob) {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data1 = byteBufferBlob.array();
blobContentSize = data1.length;
blobSize = (int) Blob_Format_V2.getBlobRecordSize(blobContentSize);
} else {
Assert.fail("Illegal blob version " + blobVersion + " and type " + blobType);
}
ByteBufferInputStream stream1 = new ByteBufferInputStream(ByteBuffer.wrap(data1));
MessageFormatInputStream messageFormatStream1 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key1, ByteBuffer.wrap(encryptionKey1), prop1, ByteBuffer.wrap(usermetadata1), stream1, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key1, prop1, ByteBuffer.wrap(usermetadata1), stream1, blobContentSize, blobType);
MessageInfo msgInfo1 = new MessageInfo(key1, messageFormatStream1.getSize(), accountId1, containerId1, prop1.getCreationTimeInMs());
// create message stream for blob 2
StoreKey key2 = new MockId("id2");
short accountId2 = Utils.getRandomShort(TestUtils.RANDOM);
short containerId2 = Utils.getRandomShort(TestUtils.RANDOM);
BlobProperties prop2 = new BlobProperties(10, "servid2", accountId2, containerId2, false);
byte[] encryptionKey2 = new byte[100];
TestUtils.RANDOM.nextBytes(encryptionKey2);
byte[] usermetadata2 = new byte[1000];
TestUtils.RANDOM.nextBytes(usermetadata2);
blobContentSize = 2000;
byte[] data2 = new byte[blobContentSize];
TestUtils.RANDOM.nextBytes(data2);
if (blobVersion == Blob_Version_V2 && blobType == BlobType.MetadataBlob) {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data2 = byteBufferBlob.array();
blobContentSize = data2.length;
}
ByteBufferInputStream stream2 = new ByteBufferInputStream(ByteBuffer.wrap(data2));
MessageFormatInputStream messageFormatStream2 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key2, ByteBuffer.wrap(encryptionKey2), prop2, ByteBuffer.wrap(usermetadata2), stream2, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key2, prop2, ByteBuffer.wrap(usermetadata2), stream2, blobContentSize, blobType);
MessageInfo msgInfo2 = new MessageInfo(key2, messageFormatStream2.getSize(), accountId2, containerId2, prop2.getCreationTimeInMs());
// corrupt the message stream
byte[] corruptMessageStream = new byte[(int) messageFormatStream2.getSize()];
TestUtils.RANDOM.nextBytes(corruptMessageStream);
InputStream corruptStream = new ByteBufferInputStream(ByteBuffer.wrap(corruptMessageStream));
// create message stream for blob 3
StoreKey key3 = new MockId("id3");
short accountId3 = Utils.getRandomShort(TestUtils.RANDOM);
short containerId3 = Utils.getRandomShort(TestUtils.RANDOM);
BlobProperties prop3 = new BlobProperties(10, "servid3", accountId3, containerId3, false);
byte[] encryptionKey3 = new byte[100];
TestUtils.RANDOM.nextBytes(encryptionKey3);
byte[] usermetadata3 = new byte[1000];
TestUtils.RANDOM.nextBytes(usermetadata3);
blobContentSize = 2000;
byte[] data3 = new byte[blobContentSize];
TestUtils.RANDOM.nextBytes(data3);
if (blobVersion == Blob_Version_V2 && blobType == BlobType.MetadataBlob) {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data3 = byteBufferBlob.array();
blobContentSize = data3.length;
}
ByteBufferInputStream stream3 = new ByteBufferInputStream(ByteBuffer.wrap(data3));
MessageFormatInputStream messageFormatStream3 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key3, ByteBuffer.wrap(encryptionKey3), prop3, ByteBuffer.wrap(usermetadata3), stream3, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key3, prop3, ByteBuffer.wrap(usermetadata3), stream3, blobContentSize, blobType);
MessageInfo msgInfo3 = new MessageInfo(key3, messageFormatStream3.getSize(), accountId3, containerId3, prop3.getCreationTimeInMs());
// create input stream for all blob messages together
byte[] totalMessageStreamContent = new byte[(int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize() + (int) messageFormatStream3.getSize()];
messageFormatStream1.read(totalMessageStreamContent, 0, (int) messageFormatStream1.getSize());
corruptStream.read(totalMessageStreamContent, (int) messageFormatStream1.getSize(), (int) messageFormatStream2.getSize());
messageFormatStream3.read(totalMessageStreamContent, (int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize(), (int) messageFormatStream3.getSize());
InputStream inputStream = new ByteBufferInputStream(ByteBuffer.wrap(totalMessageStreamContent));
List<MessageInfo> msgInfoList = new ArrayList<MessageInfo>();
msgInfoList.add(msgInfo1);
msgInfoList.add(msgInfo2);
msgInfoList.add(msgInfo3);
MessageSievingInputStream validMessageDetectionInputStream = new MessageSievingInputStream(inputStream, msgInfoList, new MockIdFactory(), new MetricRegistry());
int headerSize = MessageHeader_Format_V1.getHeaderSize();
int blobPropertiesRecordSize = BlobProperties_Format_V1.getBlobPropertiesRecordSize(prop1);
int userMetadataSize = UserMetadata_Format_V1.getUserMetadataSize(ByteBuffer.wrap(usermetadata1));
int totalHeadSize = 2 * headerSize;
int totalBlobPropertiesSize = 2 * blobPropertiesRecordSize;
int totalUserMetadataSize = 2 * userMetadataSize;
int totalBlobSize = 2 * (int) blobSize;
int totalKeySize = key1.sizeInBytes() + key3.sizeInBytes();
Assert.assertEquals(validMessageDetectionInputStream.getSize(), totalHeadSize + totalBlobPropertiesSize + totalUserMetadataSize + totalBlobSize + totalKeySize);
Assert.assertEquals(validMessageDetectionInputStream.getSize(), messageFormatStream1.getSize() + messageFormatStream3.getSize());
Assert.assertEquals(true, verifyBlob(validMessageDetectionInputStream, Message_Header_Version_V1, headerSize, 0, blobPropertiesRecordSize, userMetadataSize, (int) blobSize, key1, 10, "servid1", accountId1, containerId1, null, usermetadata1, data1, blobVersion, blobType));
Assert.assertEquals(true, verifyBlob(validMessageDetectionInputStream, Message_Header_Version_V1, headerSize, 0, blobPropertiesRecordSize, userMetadataSize, (int) blobSize, key3, 10, "servid3", accountId3, containerId3, null, usermetadata3, data3, blobVersion, blobType));
}
use of com.github.ambry.store.StoreKey in project ambry by linkedin.
the class MessageReadSetIndexInputStream method calculateOffsets.
/**
* Calculates the offsets from the MessageReadSet that needs to be sent over the network
* based on the type of data requested as indicated by the flags
*/
private void calculateOffsets() throws IOException, MessageFormatException {
try {
// get size
int messageCount = readSet.count();
// for each message, determine the offset and size that needs to be sent based on the flag
sendInfoList = new ArrayList<>(messageCount);
messageMetadataList = new ArrayList<>(messageCount);
logger.trace("Calculate offsets of messages for one partition, MessageFormatFlag : {} number of messages : {}", flag, messageCount);
for (int i = 0; i < messageCount; i++) {
if (flag == MessageFormatFlags.All) {
// just copy over the total size and use relative offset to be 0
// We do not have to check any version in this case as we dont
// have to read any data to deserialize anything.
sendInfoList.add(i, new SendInfo(0, readSet.sizeInBytes(i)));
messageMetadataList.add(i, null);
totalSizeToWrite += readSet.sizeInBytes(i);
} else {
// read header version
long startTime = SystemTime.getInstance().milliseconds();
ByteBuffer headerVersion = ByteBuffer.allocate(Version_Field_Size_In_Bytes);
readSet.writeTo(i, Channels.newChannel(new ByteBufferOutputStream(headerVersion)), 0, Version_Field_Size_In_Bytes);
logger.trace("Calculate offsets, read header version time: {}", SystemTime.getInstance().milliseconds() - startTime);
headerVersion.flip();
short version = headerVersion.getShort();
if (!isValidHeaderVersion(version)) {
throw new MessageFormatException("Version not known while reading message - version " + version + ", StoreKey " + readSet.getKeyAt(i), MessageFormatErrorCodes.Unknown_Format_Version);
}
ByteBuffer header = ByteBuffer.allocate(getHeaderSizeForVersion(version));
// read the header
startTime = SystemTime.getInstance().milliseconds();
headerVersion.clear();
header.putShort(headerVersion.getShort());
readSet.writeTo(i, Channels.newChannel(new ByteBufferOutputStream(header)), Version_Field_Size_In_Bytes, header.capacity() - Version_Field_Size_In_Bytes);
logger.trace("Calculate offsets, read header time: {}", SystemTime.getInstance().milliseconds() - startTime);
startTime = SystemTime.getInstance().milliseconds();
header.flip();
MessageHeader_Format headerFormat = getMessageHeader(version, header);
headerFormat.verifyHeader();
int storeKeyRelativeOffset = header.capacity();
StoreKey storeKey = storeKeyFactory.getStoreKey(new DataInputStream(new MessageReadSetIndexInputStream(readSet, i, storeKeyRelativeOffset)));
if (storeKey.compareTo(readSet.getKeyAt(i)) != 0) {
throw new MessageFormatException("Id mismatch between metadata and store - metadataId " + readSet.getKeyAt(i) + " storeId " + storeKey, MessageFormatErrorCodes.Store_Key_Id_MisMatch);
}
logger.trace("Calculate offsets, verify header time: {}", SystemTime.getInstance().milliseconds() - startTime);
startTime = SystemTime.getInstance().milliseconds();
if (flag == MessageFormatFlags.BlobProperties) {
sendInfoList.add(i, new SendInfo(headerFormat.getBlobPropertiesRecordRelativeOffset(), headerFormat.getBlobPropertiesRecordSize()));
messageMetadataList.add(null);
totalSizeToWrite += headerFormat.getBlobPropertiesRecordSize();
logger.trace("Calculate offsets, get total size of blob properties time: {}", SystemTime.getInstance().milliseconds() - startTime);
logger.trace("Sending blob properties for message relativeOffset : {} size : {}", sendInfoList.get(i).relativeOffset(), sendInfoList.get(i).sizetoSend());
} else if (flag == MessageFormatFlags.BlobUserMetadata) {
messageMetadataList.add(headerFormat.hasEncryptionKeyRecord() ? new MessageMetadata(extractEncryptionKey(i, headerFormat.getBlobEncryptionKeyRecordRelativeOffset(), headerFormat.getBlobEncryptionKeyRecordSize())) : null);
sendInfoList.add(i, new SendInfo(headerFormat.getUserMetadataRecordRelativeOffset(), headerFormat.getUserMetadataRecordSize()));
totalSizeToWrite += headerFormat.getUserMetadataRecordSize();
logger.trace("Calculate offsets, get total size of user metadata time: {}", SystemTime.getInstance().milliseconds() - startTime);
logger.trace("Sending user metadata for message relativeOffset : {} size : {}", sendInfoList.get(i).relativeOffset(), sendInfoList.get(i).sizetoSend());
} else if (flag == MessageFormatFlags.BlobInfo) {
messageMetadataList.add(headerFormat.hasEncryptionKeyRecord() ? new MessageMetadata(extractEncryptionKey(i, headerFormat.getBlobEncryptionKeyRecordRelativeOffset(), headerFormat.getBlobEncryptionKeyRecordSize())) : null);
sendInfoList.add(i, new SendInfo(headerFormat.getBlobPropertiesRecordRelativeOffset(), headerFormat.getBlobPropertiesRecordSize() + headerFormat.getUserMetadataRecordSize()));
totalSizeToWrite += headerFormat.getBlobPropertiesRecordSize() + headerFormat.getUserMetadataRecordSize();
logger.trace("Calculate offsets, get total size of blob info time: {}", SystemTime.getInstance().milliseconds() - startTime);
logger.trace("Sending blob info (blob properties + user metadata) for message relativeOffset : {} " + "size : {}", sendInfoList.get(i).relativeOffset(), sendInfoList.get(i).sizetoSend());
} else if (flag == MessageFormatFlags.Blob) {
messageMetadataList.add(headerFormat.hasEncryptionKeyRecord() ? new MessageMetadata(extractEncryptionKey(i, headerFormat.getBlobEncryptionKeyRecordRelativeOffset(), headerFormat.getBlobEncryptionKeyRecordSize())) : null);
sendInfoList.add(i, new SendInfo(headerFormat.getBlobRecordRelativeOffset(), headerFormat.getBlobRecordSize()));
totalSizeToWrite += headerFormat.getBlobRecordSize();
logger.trace("Calculate offsets, get total size of blob time: {}", SystemTime.getInstance().milliseconds() - startTime);
logger.trace("Sending data for message relativeOffset : {} size : {}", sendInfoList.get(i).relativeOffset(), sendInfoList.get(i).sizetoSend());
} else {
throw new MessageFormatException("Unknown flag in request " + flag, MessageFormatErrorCodes.IO_Error);
}
}
}
} catch (IOException e) {
logger.trace("IOError when calculating offsets");
throw new MessageFormatException("IOError when calculating offsets ", e, MessageFormatErrorCodes.IO_Error);
}
}
Aggregations