use of com.github.ambry.store.StoreKey in project ambry by linkedin.
the class CloudBlobStore method findMissingKeys.
@Override
public Set<StoreKey> findMissingKeys(List<StoreKey> keys) throws StoreException {
checkStarted();
// Check existence of keys in cloud metadata
// Note that it is ok to refer cache here, because all we are doing is eliminating blobs that were seen before and
// we don't care about the state of the blob.
// TODO Fix corner case where a blob is deleted in cache, and has been compacted. Ideally it should show as missing.
List<BlobId> blobIdQueryList = keys.stream().filter(key -> !checkCacheState(key.getID())).map(key -> (BlobId) key).collect(Collectors.toList());
if (blobIdQueryList.isEmpty()) {
// Cool, the cache did its job and eliminated a possibly expensive query to cloud!
return Collections.emptySet();
}
try {
Set<String> foundSet = requestAgent.doWithRetries(() -> cloudDestination.getBlobMetadata(blobIdQueryList), "FindMissingKeys", partitionId.toPathString()).keySet();
// return input keys - cached keys - keys returned by query
return keys.stream().filter(key -> !foundSet.contains(key.getID())).filter(key -> !recentBlobCache.containsKey(key.getID())).collect(Collectors.toSet());
} catch (CloudStorageException ex) {
throw new StoreException(ex, StoreErrorCodes.IOError);
}
}
use of com.github.ambry.store.StoreKey in project ambry by linkedin.
the class CloudBlobStoreTest method testCacheEvictionOrder.
/**
* Test CloudBlobStore cache eviction.
*/
@Test
public void testCacheEvictionOrder() throws Exception {
assumeTrue(isVcr);
// setup store with small cache size
int cacheSize = 10;
setupCloudStore(false, false, cacheSize, true);
// put blobs to fill up cache
List<StoreKey> blobIdList = new ArrayList<>();
for (int j = 0; j < cacheSize; j++) {
blobIdList.add(getUniqueId(refAccountId, refContainerId, false, partitionId));
store.addToCache(blobIdList.get(j).getID(), (short) 0, CloudBlobStore.BlobState.CREATED);
}
// findMissingKeys should stay in cache
store.findMissingKeys(blobIdList);
verify(dest, never()).getBlobMetadata(anyList());
int expectedLookups = blobIdList.size();
int expectedHits = expectedLookups;
verifyCacheHits(expectedLookups, expectedHits);
// Perform access on first 5 blobs
int delta = 5;
MockMessageWriteSet messageWriteSet = new MockMessageWriteSet();
for (int j = 0; j < delta; j++) {
CloudTestUtil.addBlobToMessageSet(messageWriteSet, (BlobId) blobIdList.get(j), SMALL_BLOB_SIZE, Utils.Infinite_Time, operationTime, isVcr);
}
store.updateTtl(messageWriteSet.getMessageSetInfo());
expectedLookups += delta;
// Note: should be cache misses since blobs are still in CREATED state.
verifyCacheHits(expectedLookups, expectedHits);
// put 5 more blobs
for (int j = cacheSize; j < cacheSize + delta; j++) {
blobIdList.add(getUniqueId(refAccountId, refContainerId, false, partitionId));
store.addToCache(blobIdList.get(j).getID(), (short) 0, CloudBlobStore.BlobState.CREATED);
}
// get same 1-5 which should be still cached.
store.findMissingKeys(blobIdList.subList(0, delta));
expectedLookups += delta;
expectedHits += delta;
verifyCacheHits(expectedLookups, expectedHits);
verify(dest, never()).getBlobMetadata(anyList());
// call findMissingKeys on 6-10 which should trigger getBlobMetadata
store.findMissingKeys(blobIdList.subList(delta, cacheSize));
expectedLookups += delta;
verifyCacheHits(expectedLookups, expectedHits);
verify(dest).getBlobMetadata(anyList());
}
use of com.github.ambry.store.StoreKey in project ambry by linkedin.
the class CloudBlobStoreTest method testStoreNotStarted.
/**
* Test verifying behavior when store not started.
*/
@Test
public void testStoreNotStarted() throws Exception {
// Create store and don't start it.
setupCloudStore(false, true, defaultCacheLimit, false);
List<StoreKey> keys = Collections.singletonList(getUniqueId(refAccountId, refContainerId, false, partitionId));
MockMessageWriteSet messageWriteSet = new MockMessageWriteSet();
CloudTestUtil.addBlobToMessageSet(messageWriteSet, 10, Utils.Infinite_Time, refAccountId, refContainerId, true, false, partitionId, operationTime, isVcr);
try {
store.put(messageWriteSet);
fail("Store put should have failed.");
} catch (StoreException e) {
assertEquals(StoreErrorCodes.Store_Not_Started, e.getErrorCode());
}
try {
store.delete(messageWriteSet.getMessageSetInfo());
fail("Store delete should have failed.");
} catch (StoreException e) {
assertEquals(StoreErrorCodes.Store_Not_Started, e.getErrorCode());
}
try {
store.findMissingKeys(keys);
fail("Store findMissingKeys should have failed.");
} catch (StoreException e) {
assertEquals(StoreErrorCodes.Store_Not_Started, e.getErrorCode());
}
}
use of com.github.ambry.store.StoreKey in project ambry by linkedin.
the class ValidatingKeyConvertingTransformer method transform.
@Override
public TransformationOutput transform(Message message) {
ByteBuffer encryptionKey;
BlobProperties props;
ByteBuffer metadata;
BlobData blobData;
MessageInfo msgInfo = message.getMessageInfo();
InputStream msgStream = message.getStream();
TransformationOutput transformationOutput;
try {
// Read header
ByteBuffer headerVersion = ByteBuffer.allocate(Version_Field_Size_In_Bytes);
msgStream.read(headerVersion.array());
short version = headerVersion.getShort();
if (!isValidHeaderVersion(version)) {
throw new MessageFormatException("Header version not supported " + version, MessageFormatErrorCodes.Data_Corrupt);
}
int headerSize = getHeaderSizeForVersion(version);
ByteBuffer headerBuffer = ByteBuffer.allocate(headerSize);
headerBuffer.put(headerVersion.array());
msgStream.read(headerBuffer.array(), Version_Field_Size_In_Bytes, headerSize - Version_Field_Size_In_Bytes);
headerBuffer.rewind();
MessageHeader_Format header = getMessageHeader(version, headerBuffer);
header.verifyHeader();
StoreKey originalKey = storeKeyFactory.getStoreKey(new DataInputStream(msgStream));
if (header.isPutRecord()) {
encryptionKey = header.hasEncryptionKeyRecord() ? deserializeBlobEncryptionKey(msgStream) : null;
props = deserializeBlobProperties(msgStream);
metadata = deserializeUserMetadata(msgStream);
blobData = deserializeBlob(msgStream);
} else {
throw new IllegalArgumentException("Message cannot be a deleted record ");
}
if (msgInfo.getStoreKey().equals(originalKey)) {
StoreKey newKey = storeKeyConverter.convert(Collections.singletonList(originalKey)).get(originalKey);
if (newKey == null) {
System.out.println("No mapping for the given key, transformed message will be null");
transformationOutput = new TransformationOutput((Message) null);
} else {
MessageInfo transformedMsgInfo;
PutMessageFormatInputStream transformedStream = new PutMessageFormatInputStream(newKey, encryptionKey, props, metadata, new ByteBufInputStream(blobData.content(), true), blobData.getSize(), blobData.getBlobType(), msgInfo.getLifeVersion());
transformedMsgInfo = new MessageInfo.Builder(msgInfo).storeKey(newKey).size(transformedStream.getSize()).isUndeleted(false).build();
transformationOutput = new TransformationOutput(new Message(transformedMsgInfo, transformedStream));
}
} else {
throw new IllegalStateException("StoreKey in log " + originalKey + " failed to match store key from Index " + msgInfo.getStoreKey());
}
} catch (Exception e) {
transformationOutput = new TransformationOutput(e);
}
return transformationOutput;
}
use of com.github.ambry.store.StoreKey in project ambry by linkedin.
the class ValidatingKeyConvertingTransformer method testValidBlobs.
private void testValidBlobs(short blobVersion, BlobType blobType, short headerVersionToUse) throws Exception {
// MessageSievingInputStream contains put records for 3 valid blobs
// id1(put record for valid blob), id2(put record for valid blob) and id3(put record for valid blob)
MessageFormatRecord.headerVersionToUse = headerVersionToUse;
byte[] encryptionKey = new byte[100];
RANDOM.nextBytes(encryptionKey);
// create message stream for blob 1
StoreKey key1 = new MockId("id1");
short accountId1 = Utils.getRandomShort(RANDOM);
short containerId1 = Utils.getRandomShort(RANDOM);
BlobProperties prop1 = new BlobProperties(10, "servid1", accountId1, containerId1, false);
byte[] usermetadata1 = new byte[1000];
RANDOM.nextBytes(usermetadata1);
int blobContentSize = 2000;
byte[] data1 = new byte[blobContentSize];
RANDOM.nextBytes(data1);
long blobSize;
if (blobType == BlobType.DataBlob) {
blobSize = (int) Blob_Format_V2.getBlobRecordSize(blobContentSize);
} else {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data1 = byteBufferBlob.array();
blobContentSize = data1.length;
blobSize = (int) Blob_Format_V2.getBlobRecordSize(blobContentSize);
}
ByteBufferInputStream stream1 = new ByteBufferInputStream(ByteBuffer.wrap(data1));
// For Blob_Version_V2, encryption key is null.
MessageFormatInputStream messageFormatStream1 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key1, null, prop1, ByteBuffer.wrap(usermetadata1), stream1, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key1, prop1, ByteBuffer.wrap(usermetadata1), stream1, blobContentSize, blobType);
MessageInfo msgInfo1 = new MessageInfo(key1, messageFormatStream1.getSize(), accountId1, containerId1, prop1.getCreationTimeInMs());
// create message stream for blob 2
StoreKey key2 = new MockId("id2");
short accountId2 = Utils.getRandomShort(RANDOM);
short containerId2 = Utils.getRandomShort(RANDOM);
BlobProperties prop2 = new BlobProperties(10, "servid2", accountId2, containerId2, false);
byte[] usermetadata2 = new byte[1000];
RANDOM.nextBytes(usermetadata2);
blobContentSize = 2000;
byte[] data2 = new byte[blobContentSize];
RANDOM.nextBytes(data2);
if (blobType == BlobType.MetadataBlob) {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data2 = byteBufferBlob.array();
blobContentSize = data2.length;
}
ByteBufferInputStream stream2 = new ByteBufferInputStream(ByteBuffer.wrap(data2));
// For Blob_Version_V2, encryption key is non-null.
MessageFormatInputStream messageFormatStream2 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key2, ByteBuffer.wrap(encryptionKey), prop2, ByteBuffer.wrap(usermetadata2), stream2, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key2, prop2, ByteBuffer.wrap(usermetadata2), stream2, blobContentSize, blobType);
MessageInfo msgInfo2 = new MessageInfo(key2, messageFormatStream2.getSize(), accountId2, containerId2, prop2.getCreationTimeInMs());
// create message stream for blob 3
StoreKey key3 = new MockId("id3");
short accountId3 = Utils.getRandomShort(RANDOM);
short containerId3 = Utils.getRandomShort(RANDOM);
BlobProperties prop3 = new BlobProperties(10, "servid3", accountId3, containerId3, false);
byte[] usermetadata3 = new byte[1000];
RANDOM.nextBytes(usermetadata3);
blobContentSize = 2000;
byte[] data3 = new byte[blobContentSize];
RANDOM.nextBytes(data3);
if (blobType == BlobType.MetadataBlob) {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data3 = byteBufferBlob.array();
blobContentSize = data3.length;
}
ByteBufferInputStream stream3 = new ByteBufferInputStream(ByteBuffer.wrap(data3));
// For Blob_Version_V2, encryption key is null.
MessageFormatInputStream messageFormatStream3 = (blobVersion == Blob_Version_V2) ? new PutMessageFormatInputStream(key3, null, prop3, ByteBuffer.wrap(usermetadata3), stream3, blobContentSize, blobType) : new PutMessageFormatBlobV1InputStream(key3, prop3, ByteBuffer.wrap(usermetadata3), stream3, blobContentSize, blobType);
MessageInfo msgInfo3 = new MessageInfo(key3, messageFormatStream3.getSize(), accountId3, containerId3, prop3.getCreationTimeInMs());
MessageInfo msgInfo4 = null;
MessageFormatInputStream messageFormatStream4 = null;
MessageInfo msgInfo5 = null;
MessageFormatInputStream messageFormatStream5 = null;
// create message stream for blob 4. Header version 2, with encryption key.
StoreKey key4 = new MockId("id4");
short accountId4 = Utils.getRandomShort(RANDOM);
short containerId4 = Utils.getRandomShort(RANDOM);
BlobProperties prop4 = new BlobProperties(10, "servid4", accountId4, containerId4, false);
byte[] usermetadata4 = new byte[1000];
RANDOM.nextBytes(usermetadata4);
blobContentSize = 2000;
byte[] data4 = new byte[blobContentSize];
RANDOM.nextBytes(data4);
if (blobType == BlobType.MetadataBlob) {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data4 = byteBufferBlob.array();
blobContentSize = data4.length;
}
if (blobVersion == Blob_Version_V2) {
ByteBufferInputStream stream4 = new ByteBufferInputStream(ByteBuffer.wrap(data4));
MessageFormatRecord.headerVersionToUse = Message_Header_Version_V2;
// encryption key is non-null.
messageFormatStream4 = new PutMessageFormatInputStream(key4, ByteBuffer.wrap(encryptionKey), prop4, ByteBuffer.wrap(usermetadata4), stream4, blobContentSize, blobType);
msgInfo4 = new MessageInfo(key4, messageFormatStream4.getSize(), accountId4, containerId4, prop4.getCreationTimeInMs());
}
// create message stream for blob 5. Header version 2, without encryption key.
StoreKey key5 = new MockId("id5");
short accountId5 = Utils.getRandomShort(RANDOM);
short containerId5 = Utils.getRandomShort(RANDOM);
BlobProperties prop5 = new BlobProperties(10, "servid5", accountId5, containerId5, false);
byte[] usermetadata5 = new byte[1000];
RANDOM.nextBytes(usermetadata5);
blobContentSize = 2000;
byte[] data5 = new byte[blobContentSize];
RANDOM.nextBytes(data5);
if (blobType == BlobType.MetadataBlob) {
ByteBuffer byteBufferBlob = MessageFormatTestUtils.getBlobContentForMetadataBlob(blobContentSize);
data5 = byteBufferBlob.array();
blobContentSize = data5.length;
}
if (blobVersion == Blob_Version_V2) {
ByteBufferInputStream stream5 = new ByteBufferInputStream(ByteBuffer.wrap(data5));
MessageFormatRecord.headerVersionToUse = Message_Header_Version_V2;
// encryption key is null.
messageFormatStream5 = new PutMessageFormatInputStream(key5, null, prop5, ByteBuffer.wrap(usermetadata5), stream5, blobContentSize, blobType);
msgInfo5 = new MessageInfo(key5, messageFormatStream5.getSize(), accountId5, containerId5, prop5.getCreationTimeInMs());
}
// create input stream for all blob messages together
byte[] totalMessageStreamContent = new byte[(int) (messageFormatStream1.getSize() + messageFormatStream2.getSize() + messageFormatStream3.getSize() + (blobVersion == Blob_Version_V2 ? messageFormatStream4.getSize() + messageFormatStream5.getSize() : 0))];
messageFormatStream1.read(totalMessageStreamContent, 0, (int) messageFormatStream1.getSize());
messageFormatStream2.read(totalMessageStreamContent, (int) messageFormatStream1.getSize(), (int) messageFormatStream2.getSize());
messageFormatStream3.read(totalMessageStreamContent, (int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize(), (int) messageFormatStream3.getSize());
if (blobVersion == Blob_Version_V2) {
messageFormatStream4.read(totalMessageStreamContent, (int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize() + (int) messageFormatStream3.getSize(), (int) messageFormatStream4.getSize());
messageFormatStream5.read(totalMessageStreamContent, (int) messageFormatStream1.getSize() + (int) messageFormatStream2.getSize() + (int) messageFormatStream3.getSize() + (int) messageFormatStream4.getSize(), (int) messageFormatStream5.getSize());
}
InputStream inputStream = new ByteBufferInputStream(ByteBuffer.wrap(totalMessageStreamContent));
List<MessageInfo> msgInfoList = new ArrayList<MessageInfo>();
msgInfoList.add(msgInfo1);
msgInfoList.add(msgInfo2);
msgInfoList.add(msgInfo3);
if (blobVersion == Blob_Version_V2) {
msgInfoList.add(msgInfo4);
msgInfoList.add(msgInfo5);
}
MessageSievingInputStream sievedStream = new MessageSievingInputStream(inputStream, msgInfoList, transformers, new MetricRegistry());
Map<StoreKey, StoreKey> convertedMap = randomKeyConverter.convert(Arrays.asList(key1, key2, key3));
Map<StoreKey, StoreKey> convertedMapExtra = randomKeyConverter.convert(Arrays.asList(key4, key5));
int headerSizeV1 = MessageHeader_Format_V1.getHeaderSize();
int headerSizeV2 = MessageHeader_Format_V2.getHeaderSize();
int blobPropertiesRecordSize = BlobProperties_Format_V1.getBlobPropertiesRecordSize(prop1);
int userMetadataSize = UserMetadata_Format_V1.getUserMetadataSize(ByteBuffer.wrap(usermetadata1));
int totalHeadSize = 3 * (headerVersionToUse == MessageFormatRecord.Message_Header_Version_V1 ? headerSizeV1 : headerSizeV2);
int totalEncryptionRecordSize = 0;
int totalBlobPropertiesSize = 3 * blobPropertiesRecordSize;
int totalUserMetadataSize = 3 * userMetadataSize;
int totalBlobSize = 3 * (int) blobSize;
int totalKeySize = (options.contains(TransformerOptions.KeyConvert) ? convertedMap.values() : convertedMap.keySet()).stream().mapToInt(StoreKey::sizeInBytes).sum();
int encryptionRecordSize = BlobEncryptionKey_Format_V1.getBlobEncryptionKeyRecordSize(ByteBuffer.wrap(encryptionKey));
if (blobVersion == Blob_Version_V2) {
totalHeadSize += 2 * headerSizeV2;
// stream 2 and stream 4 have encryption keys.
totalEncryptionRecordSize += 2 * encryptionRecordSize;
totalBlobPropertiesSize += 2 * blobPropertiesRecordSize;
totalUserMetadataSize += 2 * userMetadataSize;
totalBlobSize += 2 * (int) blobSize;
totalKeySize += (options.contains(TransformerOptions.KeyConvert) ? convertedMapExtra.values() : convertedMapExtra.keySet()).stream().mapToInt(StoreKey::sizeInBytes).sum();
}
Assert.assertFalse(sievedStream.hasInvalidMessages());
if (!options.isEmpty()) {
Assert.assertEquals(options.isEmpty() ? totalMessageStreamContent.length : totalHeadSize + totalEncryptionRecordSize + totalBlobPropertiesSize + totalUserMetadataSize + totalBlobSize + totalKeySize, sievedStream.getSize());
Assert.assertEquals((int) sievedStream.getValidMessageInfoList().stream().mapToLong(MessageInfo::getSize).sum(), sievedStream.getSize());
verifySievedTransformedMessage(sievedStream, options.contains(TransformerOptions.KeyConvert) ? convertedMap.get(key1) : key1, "servid1", accountId1, containerId1, null, usermetadata1, data1, blobVersion, blobType);
verifySievedTransformedMessage(sievedStream, options.contains(TransformerOptions.KeyConvert) ? convertedMap.get(key2) : key2, "servid2", accountId2, containerId2, blobVersion == Blob_Version_V2 ? encryptionKey : null, usermetadata2, data2, blobVersion, blobType);
verifySievedTransformedMessage(sievedStream, options.contains(TransformerOptions.KeyConvert) ? convertedMap.get(key3) : key3, "servid3", accountId3, containerId3, null, usermetadata3, data3, blobVersion, blobType);
if (blobVersion == Blob_Version_V2) {
verifySievedTransformedMessage(sievedStream, options.contains(TransformerOptions.KeyConvert) ? convertedMapExtra.get(key4) : key4, "servid4", accountId4, containerId4, encryptionKey, usermetadata4, data4, blobVersion, blobType);
verifySievedTransformedMessage(sievedStream, options.contains(TransformerOptions.KeyConvert) ? convertedMapExtra.get(key5) : key5, "servid5", accountId5, containerId5, null, usermetadata5, data5, blobVersion, blobType);
}
} else {
Assert.assertEquals(totalMessageStreamContent.length, sievedStream.getSize());
byte[] sievedBytes = Utils.readBytesFromStream(sievedStream, sievedStream.getSize());
Assert.assertArrayEquals(totalMessageStreamContent, sievedBytes);
}
Assert.assertEquals(-1, sievedStream.read());
}
Aggregations