use of com.github.ambry.store.StoreKey in project ambry by linkedin.
the class MessageFormatSendTest method messageReadSetIndexInputStreamTestException.
/**
* Test Exceptions cases for {@link MessageReadSetIndexInputStream}
* IndexOutOfBoundsException should be thrown if offset or length is invalid.
* 0 is expected if length requested is 0.
* -1 is expected if no more data available.
*/
@Test
public void messageReadSetIndexInputStreamTestException() throws Exception {
ArrayList<ByteBuffer> listBuf = new ArrayList<ByteBuffer>();
byte[] buf = new byte[1024];
new Random().nextBytes(buf);
listBuf.add(ByteBuffer.wrap(buf));
ArrayList<StoreKey> storeKeys = new ArrayList<StoreKey>();
storeKeys.add(new MockId("012345678910123223233456789012"));
MessageReadSet readSet = new MockMessageReadSet(listBuf, storeKeys);
MessageReadSetIndexInputStream stream = new MessageReadSetIndexInputStream(readSet, 0, 0);
byte[] bufOutput = new byte[1024];
Assert.assertEquals("Should return 0 if length requested is 0", 0, stream.read(bufOutput, 0, 0));
Assert.assertEquals("Should return 0 if length requested is 0", 0, stream.read(bufOutput, 1, 0));
try {
stream.read(bufOutput, -1, 10);
Assert.fail("IndexOutOfBoundsException is expected.");
} catch (IndexOutOfBoundsException e) {
}
try {
stream.read(bufOutput, 0, -1);
Assert.fail("IndexOutOfBoundsException is expected.");
} catch (IndexOutOfBoundsException e) {
}
try {
stream.read(bufOutput, 1, 1024);
Assert.fail("IndexOutOfBoundsException is expected.");
} catch (IndexOutOfBoundsException e) {
}
stream.read(bufOutput, 0, 1024);
Assert.assertArrayEquals("Output doesn't match", bufOutput, buf);
Assert.assertEquals("Should return -1 if no more data", -1, stream.read());
Assert.assertEquals("Should return -1 if no more data", -1, stream.read(bufOutput, 0, 1));
}
use of com.github.ambry.store.StoreKey in project ambry by linkedin.
the class MessageFormatSendTest method messageReadSetIndexInputStreamTest.
/**
* Test {@link MessageReadSetIndexInputStream} with different offsets and lengths.
*/
@Test
public void messageReadSetIndexInputStreamTest() throws Exception {
ArrayList<ByteBuffer> listbuf = new ArrayList<ByteBuffer>();
byte[] buf1 = new byte[1024];
byte[] buf2 = new byte[2048];
byte[] buf3 = new byte[4096];
new Random().nextBytes(buf1);
new Random().nextBytes(buf2);
new Random().nextBytes(buf3);
listbuf.add(ByteBuffer.wrap(buf1));
listbuf.add(ByteBuffer.wrap(buf2));
listbuf.add(ByteBuffer.wrap(buf3));
ArrayList<StoreKey> storeKeys = new ArrayList<StoreKey>();
storeKeys.add(new MockId("012345678910123223233456789012"));
storeKeys.add(new MockId("012345678910123223233456789013"));
storeKeys.add(new MockId("012345678910123223233456789014"));
MessageReadSet readSet = new MockMessageReadSet(listbuf, storeKeys);
MessageReadSetIndexInputStream stream1 = new MessageReadSetIndexInputStream(readSet, 0, 0);
byte[] buf1Output = new byte[1024];
Assert.assertEquals("Number of bytes read doesn't match", 1024, stream1.read(buf1Output, 0, 1024));
Assert.assertArrayEquals(buf1Output, buf1);
MessageReadSetIndexInputStream stream2 = new MessageReadSetIndexInputStream(readSet, 1, 1024);
byte[] buf2Output = new byte[1025];
Assert.assertEquals("Number of bytes read doesn't match", 512, stream2.read(buf2Output, 0, 512));
Assert.assertEquals("Number of bytes read doesn't match", 512, stream2.read(buf2Output, 512, 513));
for (int i = 0; i < 1024; i++) {
Assert.assertEquals(buf2Output[i], buf2[i + 1024]);
}
MessageReadSetIndexInputStream stream3 = new MessageReadSetIndexInputStream(readSet, 2, 2048);
byte[] buf3Output = new byte[2048];
for (int i = 0; i < 2048; i++) {
Assert.assertEquals((byte) stream3.read(), buf3[i + 2048]);
}
Assert.assertEquals("Should return -1 if no more data available", -1, stream3.read(buf3Output, 0, 1));
Assert.assertEquals("Should return -1 if no more data available", -1, stream3.read());
}
use of com.github.ambry.store.StoreKey in project ambry by linkedin.
the class MessageFormatSendTest method sendWriteTestWithBadId.
@Test
public void sendWriteTestWithBadId() throws IOException, MessageFormatException {
// add header,system metadata, user metadata and data to the buffers
ByteBuffer buf1 = ByteBuffer.allocate(1010);
// fill header
// version
buf1.putShort((short) 1);
// total size
buf1.putLong(950);
// put relative offsets
// blob property relative offset
buf1.putInt(60);
// delete relative offset
buf1.putInt(-1);
// user metadata relative offset
buf1.putInt(81);
// data relative offset
buf1.putInt(191);
Crc32 crc = new Crc32();
crc.update(buf1.array(), 0, buf1.position());
// crc
buf1.putLong(crc.getValue());
// blob id
String id = new String("012345678910123456789012");
buf1.putShort((short) id.length());
buf1.put(id.getBytes());
// blob property version
buf1.putShort((short) 1);
String attribute1 = "ttl";
String attribute2 = "del";
// ttl name
buf1.put(attribute1.getBytes());
// ttl value
buf1.putLong(12345);
// delete name
buf1.put(attribute2.getBytes());
byte b = 1;
// delete flag
buf1.put(b);
// crc
buf1.putInt(456);
// user metadata version
buf1.putShort((short) 1);
buf1.putInt(100);
byte[] usermetadata = new byte[100];
new Random().nextBytes(usermetadata);
buf1.put(usermetadata);
buf1.putInt(123);
// blob version
buf1.putShort((short) 0);
// blob size
buf1.putLong(805);
// blob
byte[] data = new byte[805];
new Random().nextBytes(data);
buf1.put(data);
// blob crc
buf1.putInt(123);
buf1.flip();
ArrayList<ByteBuffer> listbuf = new ArrayList<ByteBuffer>();
listbuf.add(buf1);
ArrayList<StoreKey> storeKeys = new ArrayList<StoreKey>();
storeKeys.add(new MockId("012345678910123223233456789012"));
MessageReadSet readSet = new MockMessageReadSet(listbuf, storeKeys);
MetricRegistry registry = new MetricRegistry();
MessageFormatMetrics metrics = new MessageFormatMetrics(registry);
// get all
MessageFormatSend send = new MessageFormatSend(readSet, MessageFormatFlags.All, metrics, new MockIdFactory());
Assert.assertEquals(send.sizeInBytes(), 1010);
ByteBuffer bufresult = ByteBuffer.allocate(1010);
WritableByteChannel channel1 = Channels.newChannel(new ByteBufferOutputStream(bufresult));
while (!send.isSendComplete()) {
send.writeTo(channel1);
}
Assert.assertArrayEquals(buf1.array(), bufresult.array());
try {
// get blob
MessageFormatSend send1 = new MessageFormatSend(readSet, MessageFormatFlags.Blob, metrics, new MockIdFactory());
Assert.fail("Exception is expected");
} catch (MessageFormatException e) {
Assert.assertTrue(e.getErrorCode() == MessageFormatErrorCodes.Store_Key_Id_MisMatch);
}
}
use of com.github.ambry.store.StoreKey in project ambry by linkedin.
the class ValidatingKeyConvertingTransformer method testDeprecatedMsg.
private void testDeprecatedMsg(short blobVersion, BlobType blobType, short headerVersionToUse) throws Exception {
MessageFormatRecord.headerVersionToUse = headerVersionToUse;
// MessageSievingInputStream contains put records for 2 valid blobs and 1 deprecated blob
// id1(put record for valid blob), id2(deprecated) and id3(put record for valid blob)
// create message stream for blob 1
StoreKey key1 = new MockId("id1");
short accountId1 = Utils.getRandomShort(RANDOM);
short containerId1 = Utils.getRandomShort(RANDOM);
BlobProperties prop1 = new BlobProperties(10, "servid1", accountId1, containerId1, false);
byte[] encryptionKey1 = new byte[100];
RANDOM.nextBytes(encryptionKey1);
byte[] usermetadata1 = new byte[1000];
RANDOM.nextBytes(usermetadata1);
int blobContentSize = 2000;
byte[] data1 = new byte[blobContentSize];
RANDOM.nextBytes(data1);
long blobSize = -1;
if (blobType == BlobType.DataBlob) {
blobSize = (int) Blob_Format_V2.getBlobRecordSize(blobContentSize);
} else {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data1 = byteBufferBlob.array();
blobContentSize = data1.length;
blobSize = (int) Blob_Format_V2.getBlobRecordSize(blobContentSize);
}
ByteBufferInputStream stream1 = new ByteBufferInputStream(ByteBuffer.wrap(data1));
MessageFormatInputStream messageFormatStream1 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key1, ByteBuffer.wrap(encryptionKey1), prop1, ByteBuffer.wrap(usermetadata1), stream1, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key1, prop1, ByteBuffer.wrap(usermetadata1), stream1, blobContentSize, blobType);
MessageInfo msgInfo1 = new MessageInfo(key1, messageFormatStream1.getSize(), accountId1, containerId1, prop1.getCreationTimeInMs());
// create message stream for blob 2
StoreKey key2 = new MockId("id2");
short accountId2 = Utils.getRandomShort(RANDOM);
short containerId2 = Utils.getRandomShort(RANDOM);
BlobProperties prop2 = new BlobProperties(10, "servid2", accountId2, containerId2, false);
byte[] encryptionKey2 = new byte[100];
RANDOM.nextBytes(encryptionKey2);
byte[] usermetadata2 = new byte[1000];
RANDOM.nextBytes(usermetadata2);
blobContentSize = 2000;
byte[] data2 = new byte[blobContentSize];
RANDOM.nextBytes(data2);
if (blobVersion == Blob_Version_V2 && blobType == BlobType.MetadataBlob) {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data2 = byteBufferBlob.array();
blobContentSize = data2.length;
}
ByteBufferInputStream stream2 = new ByteBufferInputStream(ByteBuffer.wrap(data2));
MessageFormatInputStream messageFormatStream2 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key2, ByteBuffer.wrap(encryptionKey2), prop2, ByteBuffer.wrap(usermetadata2), stream2, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key2, prop2, ByteBuffer.wrap(usermetadata2), stream2, blobContentSize, blobType);
MessageInfo msgInfo2 = new MessageInfo(key2, messageFormatStream2.getSize(), accountId2, containerId2, prop2.getCreationTimeInMs());
// Add the key for the second message to the discardable ones.
randomKeyConverter.addInvalids(Collections.singletonList(key2));
// create message stream for blob 3
StoreKey key3 = new MockId("id3");
short accountId3 = Utils.getRandomShort(RANDOM);
short containerId3 = Utils.getRandomShort(RANDOM);
BlobProperties prop3 = new BlobProperties(10, "servid3", accountId3, containerId3, false);
byte[] encryptionKey3 = new byte[100];
RANDOM.nextBytes(encryptionKey3);
byte[] usermetadata3 = new byte[1000];
RANDOM.nextBytes(usermetadata3);
blobContentSize = 2000;
byte[] data3 = new byte[blobContentSize];
RANDOM.nextBytes(data3);
if (blobVersion == Blob_Version_V2 && blobType == BlobType.MetadataBlob) {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data3 = byteBufferBlob.array();
blobContentSize = data3.length;
}
ByteBufferInputStream stream3 = new ByteBufferInputStream(ByteBuffer.wrap(data3));
MessageFormatInputStream messageFormatStream3 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key3, ByteBuffer.wrap(encryptionKey3), prop3, ByteBuffer.wrap(usermetadata3), stream3, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key3, prop3, ByteBuffer.wrap(usermetadata3), stream3, blobContentSize, blobType);
MessageInfo msgInfo3 = new MessageInfo(key3, messageFormatStream3.getSize(), accountId3, containerId3, prop3.getCreationTimeInMs());
// create input stream for all blob messages together
byte[] totalMessageStreamContent = new byte[(int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize() + (int) messageFormatStream3.getSize()];
messageFormatStream1.read(totalMessageStreamContent, 0, (int) messageFormatStream1.getSize());
messageFormatStream2.read(totalMessageStreamContent, (int) messageFormatStream1.getSize(), (int) messageFormatStream2.getSize());
messageFormatStream3.read(totalMessageStreamContent, (int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize(), (int) messageFormatStream3.getSize());
InputStream inputStream = new ByteBufferInputStream(ByteBuffer.wrap(totalMessageStreamContent));
List<MessageInfo> msgInfoList = new ArrayList<>();
msgInfoList.add(msgInfo1);
msgInfoList.add(msgInfo2);
msgInfoList.add(msgInfo3);
MessageSievingInputStream sievedStream = new MessageSievingInputStream(inputStream, msgInfoList, transformers, new MetricRegistry());
Map<StoreKey, StoreKey> convertedMap = randomKeyConverter.convert(Arrays.asList(key1, key2, key3));
int headerSize = MessageFormatRecord.getHeaderSizeForVersion(headerVersionToUse);
int blobPropertiesRecordSize = BlobProperties_Format_V1.getBlobPropertiesRecordSize(prop1);
int userMetadataSize = UserMetadata_Format_V1.getUserMetadataSize(ByteBuffer.wrap(usermetadata1));
int totalHeadSize = (options.contains(TransformerOptions.KeyConvert) ? 2 : 3) * headerSize;
int totalBlobPropertiesSize = (options.contains(TransformerOptions.KeyConvert) ? 2 : 3) * blobPropertiesRecordSize;
int totalUserMetadataSize = (options.contains(TransformerOptions.KeyConvert) ? 2 : 3) * userMetadataSize;
int totalBlobSize = (options.contains(TransformerOptions.KeyConvert) ? 2 : 3) * (int) blobSize;
int totalKeySize = options.contains(TransformerOptions.KeyConvert) ? convertedMap.get(key1).sizeInBytes() + convertedMap.get(key3).sizeInBytes() : key1.sizeInBytes() + key2.sizeInBytes() + key3.sizeInBytes();
int totalEncryptionRecordSize = blobVersion > Blob_Version_V1 ? BlobEncryptionKey_Format_V1.getBlobEncryptionKeyRecordSize(ByteBuffer.wrap(encryptionKey1)) + (options.contains(TransformerOptions.KeyConvert) ? 0 : BlobEncryptionKey_Format_V1.getBlobEncryptionKeyRecordSize(ByteBuffer.wrap(encryptionKey2))) + BlobEncryptionKey_Format_V1.getBlobEncryptionKeyRecordSize(ByteBuffer.wrap(encryptionKey3)) : 0;
if (!options.isEmpty()) {
if (options.contains(TransformerOptions.KeyConvert)) {
Assert.assertTrue(sievedStream.hasDeprecatedMessages());
}
Assert.assertEquals((int) sievedStream.getValidMessageInfoList().stream().mapToLong(MessageInfo::getSize).sum(), sievedStream.getSize());
Assert.assertEquals(options.isEmpty() ? totalMessageStreamContent.length : totalHeadSize + totalBlobPropertiesSize + totalUserMetadataSize + totalBlobSize + totalKeySize + totalEncryptionRecordSize, sievedStream.getSize());
verifySievedTransformedMessage(sievedStream, options.contains(TransformerOptions.KeyConvert) ? convertedMap.get(key1) : key1, "servid1", accountId1, containerId1, blobVersion > Blob_Version_V1 ? encryptionKey1 : null, usermetadata1, data1, blobVersion, blobType);
if (!options.contains(TransformerOptions.KeyConvert)) {
verifySievedTransformedMessage(sievedStream, key2, "servid2", accountId2, containerId2, blobVersion > Blob_Version_V1 ? encryptionKey2 : null, usermetadata2, data2, blobVersion, blobType);
}
verifySievedTransformedMessage(sievedStream, options.contains(TransformerOptions.KeyConvert) ? convertedMap.get(key3) : key3, "servid3", accountId3, containerId3, blobVersion > Blob_Version_V1 ? encryptionKey3 : null, usermetadata3, data3, blobVersion, blobType);
} else {
Assert.assertEquals(totalMessageStreamContent.length, sievedStream.getSize());
byte[] sievedBytes = Utils.readBytesFromStream(sievedStream, sievedStream.getSize());
Assert.assertArrayEquals(totalMessageStreamContent, sievedBytes);
}
Assert.assertEquals(-1, sievedStream.read());
}
use of com.github.ambry.store.StoreKey in project ambry by linkedin.
the class MessageFormatRecordTest method testMetadataContentRecordV2.
@Test
public void testMetadataContentRecordV2() throws IOException, MessageFormatException {
// Test Metadata Blob V2
List<StoreKey> keys = getKeys(60, 5);
int[] chunkSizes = { ThreadLocalRandom.current().nextInt(1, Integer.MAX_VALUE), 15 };
long[] totalSizes = { (long) keys.size() * chunkSizes[0], ((long) keys.size() * chunkSizes[1]) - 11 };
for (int i = 0; i < chunkSizes.length; i++) {
ByteBuffer metadataContent = getSerializedMetadataContentV2(chunkSizes[i], totalSizes[i], keys);
CompositeBlobInfo compositeBlobInfo = deserializeMetadataContentV2(metadataContent, new MockIdFactory());
Assert.assertEquals("Chunk size doesn't match", chunkSizes[i], compositeBlobInfo.getChunkSize());
Assert.assertEquals("Total size doesn't match", totalSizes[i], compositeBlobInfo.getTotalSize());
Assert.assertEquals("List of keys dont match", keys, compositeBlobInfo.getKeys());
// no testing of corruption as the metadata content record doesn't have crc
}
}
Aggregations