use of com.github.ambry.utils.ByteBufferOutputStream in project ambry by linkedin.
the class HardDeleteRecoveryMetadata method getUserMetadataInfo.
private DeserializedUserMetadata getUserMetadataInfo(MessageReadSet readSet, int readSetIndex, int relativeOffset, int userMetadataSize) throws MessageFormatException, IOException {
/* Read the serialized user metadata from the channel */
ByteBuffer userMetaData = ByteBuffer.allocate(userMetadataSize);
readSet.writeTo(readSetIndex, Channels.newChannel(new ByteBufferOutputStream(userMetaData)), relativeOffset, userMetadataSize);
userMetaData.flip();
return deserializeAndGetUserMetadataWithVersion(new ByteBufferInputStream(userMetaData));
}
use of com.github.ambry.utils.ByteBufferOutputStream in project ambry by linkedin.
the class MessageFormatSendTest method sendWriteTestWithBadId.
@Test
public void sendWriteTestWithBadId() throws IOException, MessageFormatException {
// add header,system metadata, user metadata and data to the buffers
ByteBuffer buf1 = ByteBuffer.allocate(1010);
// fill header
// version
buf1.putShort((short) 1);
// total size
buf1.putLong(950);
// put relative offsets
// blob property relative offset
buf1.putInt(60);
// delete relative offset
buf1.putInt(-1);
// user metadata relative offset
buf1.putInt(81);
// data relative offset
buf1.putInt(191);
Crc32 crc = new Crc32();
crc.update(buf1.array(), 0, buf1.position());
// crc
buf1.putLong(crc.getValue());
// blob id
String id = new String("012345678910123456789012");
buf1.putShort((short) id.length());
buf1.put(id.getBytes());
// blob property version
buf1.putShort((short) 1);
String attribute1 = "ttl";
String attribute2 = "del";
// ttl name
buf1.put(attribute1.getBytes());
// ttl value
buf1.putLong(12345);
// delete name
buf1.put(attribute2.getBytes());
byte b = 1;
// delete flag
buf1.put(b);
// crc
buf1.putInt(456);
// user metadata version
buf1.putShort((short) 1);
buf1.putInt(100);
byte[] usermetadata = new byte[100];
new Random().nextBytes(usermetadata);
buf1.put(usermetadata);
buf1.putInt(123);
// blob version
buf1.putShort((short) 0);
// blob size
buf1.putLong(805);
// blob
byte[] data = new byte[805];
new Random().nextBytes(data);
buf1.put(data);
// blob crc
buf1.putInt(123);
buf1.flip();
ArrayList<ByteBuffer> listbuf = new ArrayList<ByteBuffer>();
listbuf.add(buf1);
ArrayList<StoreKey> storeKeys = new ArrayList<StoreKey>();
storeKeys.add(new MockId("012345678910123223233456789012"));
MessageReadSet readSet = new MockMessageReadSet(listbuf, storeKeys);
MetricRegistry registry = new MetricRegistry();
MessageFormatMetrics metrics = new MessageFormatMetrics(registry);
// get all
MessageFormatSend send = new MessageFormatSend(readSet, MessageFormatFlags.All, metrics, new MockIdFactory());
Assert.assertEquals(send.sizeInBytes(), 1010);
ByteBuffer bufresult = ByteBuffer.allocate(1010);
WritableByteChannel channel1 = Channels.newChannel(new ByteBufferOutputStream(bufresult));
while (!send.isSendComplete()) {
send.writeTo(channel1);
}
Assert.assertArrayEquals(buf1.array(), bufresult.array());
try {
// get blob
MessageFormatSend send1 = new MessageFormatSend(readSet, MessageFormatFlags.Blob, metrics, new MockIdFactory());
Assert.fail("Exception is expected");
} catch (MessageFormatException e) {
Assert.assertTrue(e.getErrorCode() == MessageFormatErrorCodes.Store_Key_Id_MisMatch);
}
}
use of com.github.ambry.utils.ByteBufferOutputStream in project ambry by linkedin.
the class BlobStoreCompactorTest method checkRecord.
/**
* Checks the record of an id by ensuring that the {@link BlobReadOptions} received from the store matches what is
* expected. Also checks the data if the key is not deleted and hard delete enabled.
* @param id the {@link MockId} whose record needs to be checked.
* @param options the {@link BlobReadOptions} received from the {@link PersistentIndex}.
* @throws IOException
*/
private void checkRecord(MockId id, BlobReadOptions options) throws IOException {
MessageReadSet readSet = new StoreMessageReadSet(Arrays.asList(options));
IndexValue value = state.getExpectedValue(id, true);
assertEquals("Unexpected key in BlobReadOptions", id, options.getMessageInfo().getStoreKey());
assertEquals("Unexpected size in BlobReadOptions", value.getSize(), options.getMessageInfo().getSize());
assertEquals("Unexpected expiresAtMs in BlobReadOptions", value.getExpiresAtMs(), options.getMessageInfo().getExpirationTimeInMs());
if (!state.index.hardDeleter.enabled.get() || !state.deletedKeys.contains(id)) {
ByteBuffer readBuf = ByteBuffer.allocate((int) value.getSize());
ByteBufferOutputStream stream = new ByteBufferOutputStream(readBuf);
WritableByteChannel channel = Channels.newChannel(stream);
readSet.writeTo(0, channel, 0, value.getSize());
byte[] expectedData = state.getExpectedData(id, true);
assertArrayEquals("Data obtained from reset does not match original", expectedData, readBuf.array());
}
}
use of com.github.ambry.utils.ByteBufferOutputStream in project ambry by linkedin.
the class CloudBlobStore method downloadBlob.
/**
* Download the blob corresponding to the {@code blobId} from the {@code CloudDestination} to the given {@code outputStream}
* If the blob was encrypted by vcr during upload, then this method also decrypts it.
* @param cloudBlobMetadata blob metadata to determine if the blob was encrypted by vcr during upload.
* @param blobId Id of the blob to the downloaded.
* @param outputStream {@code OutputStream} of the donwloaded blob.
* @throws StoreException if there is an error in downloading the blob.
*/
void downloadBlob(CloudBlobMetadata cloudBlobMetadata, BlobId blobId, OutputStream outputStream) throws StoreException {
try {
// TODO: if needed, fetch metadata here and check encryption
if (cloudBlobMetadata.getEncryptionOrigin() == EncryptionOrigin.VCR) {
ByteBuffer encryptedBlob = ByteBuffer.allocate((int) cloudBlobMetadata.getEncryptedSize());
requestAgent.doWithRetries(() -> {
cloudDestination.downloadBlob(blobId, new ByteBufferOutputStream(encryptedBlob));
return null;
}, "Download", cloudBlobMetadata.getPartitionId());
ByteBuffer decryptedBlob = cryptoAgent.decrypt(encryptedBlob);
outputStream.write(decryptedBlob.array());
} else {
requestAgent.doWithRetries(() -> {
cloudDestination.downloadBlob(blobId, outputStream);
return null;
}, "Download", cloudBlobMetadata.getPartitionId());
}
} catch (CloudStorageException | GeneralSecurityException | IOException e) {
throw new StoreException("Error occurred in downloading blob for blobid :" + blobId, StoreErrorCodes.IOError);
}
}
use of com.github.ambry.utils.ByteBufferOutputStream in project ambry by linkedin.
the class MessageFormatSendTest method doSendWriteSingleMessageTest.
/**
* Helper method for testing single message sends.
* @param encryptionKey the encryption key to include in the message while writing it.
* @param expectedEncryptionKey the key expected when reading the sent message.
*/
private void doSendWriteSingleMessageTest(ByteBuffer encryptionKey, ByteBuffer expectedEncryptionKey) throws Exception {
String serviceId = "serviceId";
String ownerId = "owner";
String contentType = "bin";
short accountId = 10;
short containerId = 2;
byte[] blob = TestUtils.getRandomBytes(10000);
byte[] userMetadata = TestUtils.getRandomBytes(2000);
StoreKey storeKey = new MockId("012345678910123456789012");
BlobProperties properties = new BlobProperties(blob.length, serviceId, ownerId, contentType, false, 100, accountId, containerId, encryptionKey != null, null, null, null);
MessageFormatInputStream putStream;
MessageFormatRecord.MessageHeader_Format header;
if (putFormat.equals(PutMessageFormatInputStream.class.getSimpleName())) {
header = getHeader(new PutMessageFormatInputStream(storeKey, encryptionKey == null ? null : encryptionKey.duplicate(), properties, ByteBuffer.wrap(userMetadata), new ByteBufferInputStream(ByteBuffer.wrap(blob)), blob.length, BlobType.DataBlob));
putStream = new PutMessageFormatInputStream(storeKey, encryptionKey, properties, ByteBuffer.wrap(userMetadata), new ByteBufferInputStream(ByteBuffer.wrap(blob)), blob.length, BlobType.DataBlob);
} else {
header = getHeader(new PutMessageFormatBlobV1InputStream(storeKey, properties, ByteBuffer.wrap(userMetadata), new ByteBufferInputStream(ByteBuffer.wrap(blob)), blob.length, BlobType.DataBlob));
putStream = new PutMessageFormatBlobV1InputStream(storeKey, properties, ByteBuffer.wrap(userMetadata), new ByteBufferInputStream(ByteBuffer.wrap(blob)), blob.length, BlobType.DataBlob);
}
ByteBuffer buf1 = ByteBuffer.allocate((int) putStream.getSize());
putStream.read(buf1.array());
ArrayList<ByteBuffer> listbuf = new ArrayList<ByteBuffer>();
listbuf.add(buf1);
ArrayList<StoreKey> storeKeys = new ArrayList<StoreKey>();
storeKeys.add(storeKey);
MockMessageReadSet readSet = new MockMessageReadSet(listbuf, storeKeys);
MetricRegistry registry = new MetricRegistry();
MessageFormatMetrics metrics = new MessageFormatMetrics(registry);
// get all
MessageFormatSend send = new MessageFormatSend(readSet, MessageFormatFlags.All, metrics, new MockIdFactory());
Assert.assertEquals(send.sizeInBytes(), putStream.getSize());
Assert.assertEquals(1, send.getMessageMetadataList().size());
Assert.assertEquals(null, send.getMessageMetadataList().get(0));
ByteBuffer bufresult = ByteBuffer.allocate((int) putStream.getSize());
WritableByteChannel channel = Channels.newChannel(new ByteBufferOutputStream(bufresult));
while (!send.isSendComplete()) {
send.writeTo(channel);
}
Assert.assertArrayEquals(buf1.array(), bufresult.array());
Assert.assertTrue(readSet.isPrefetchInfoCorrect(0, readSet.sizeInBytes(0)));
// get blob
send = new MessageFormatSend(readSet, MessageFormatFlags.Blob, metrics, new MockIdFactory());
long blobRecordSize = putFormat.equals(PutMessageFormatInputStream.class.getSimpleName()) ? MessageFormatRecord.Blob_Format_V2.getBlobRecordSize(blob.length) : MessageFormatRecord.Blob_Format_V1.getBlobRecordSize(blob.length);
Assert.assertEquals(send.sizeInBytes(), blobRecordSize);
bufresult.clear();
channel = Channels.newChannel(new ByteBufferOutputStream(bufresult));
while (!send.isSendComplete()) {
send.writeTo(channel);
}
for (int i = 0; i < blob.length; i++) {
Assert.assertEquals(blob[i], bufresult.array()[i + (int) blobRecordSize - MessageFormatRecord.Crc_Size - blob.length]);
}
if (expectedEncryptionKey == null) {
Assert.assertEquals(null, send.getMessageMetadataList().get(0));
} else {
Assert.assertEquals(expectedEncryptionKey, send.getMessageMetadataList().get(0).getEncryptionKey());
}
Assert.assertTrue(readSet.isPrefetchInfoCorrect(0, readSet.sizeInBytes(0)));
// get user metadata
send = new MessageFormatSend(readSet, MessageFormatFlags.BlobUserMetadata, metrics, new MockIdFactory());
long userMetadataRecordSize = MessageFormatRecord.UserMetadata_Format_V1.getUserMetadataSize(ByteBuffer.wrap(userMetadata));
Assert.assertEquals(send.sizeInBytes(), userMetadataRecordSize);
bufresult.clear();
channel = Channels.newChannel(new ByteBufferOutputStream(bufresult));
while (!send.isSendComplete()) {
send.writeTo(channel);
}
bufresult.flip();
// read off the header.
for (int i = 0; i < userMetadataRecordSize - MessageFormatRecord.Crc_Size - userMetadata.length; i++) {
bufresult.get();
}
verifyBlobUserMetadata(userMetadata, bufresult);
if (expectedEncryptionKey == null) {
Assert.assertEquals(null, send.getMessageMetadataList().get(0));
} else {
Assert.assertEquals(expectedEncryptionKey, send.getMessageMetadataList().get(0).getEncryptionKey());
}
Assert.assertTrue(readSet.isPrefetchInfoCorrect(header.getUserMetadataRecordRelativeOffset(), header.getUserMetadataRecordSize()));
// get blob properties
send = new MessageFormatSend(readSet, MessageFormatFlags.BlobProperties, metrics, new MockIdFactory());
long blobPropertiesRecordSize = MessageFormatRecord.BlobProperties_Format_V1.getBlobPropertiesRecordSize(properties);
Assert.assertEquals(send.sizeInBytes(), blobPropertiesRecordSize);
bufresult.clear();
channel = Channels.newChannel(new ByteBufferOutputStream(bufresult));
while (!send.isSendComplete()) {
send.writeTo(channel);
}
bufresult.flip();
// read off the header.
for (int i = 0; i < blobPropertiesRecordSize - MessageFormatRecord.Crc_Size - BlobPropertiesSerDe.getBlobPropertiesSerDeSize(properties); i++) {
bufresult.get();
}
verifyBlobProperties(properties, BlobPropertiesSerDe.getBlobPropertiesFromStream(new DataInputStream(new ByteBufferInputStream(bufresult))));
Assert.assertEquals(null, send.getMessageMetadataList().get(0));
Assert.assertTrue(readSet.isPrefetchInfoCorrect(header.getBlobPropertiesRecordRelativeOffset(), header.getBlobPropertiesRecordSize()));
// get blob info
send = new MessageFormatSend(readSet, MessageFormatFlags.BlobInfo, metrics, new MockIdFactory());
Assert.assertEquals(send.sizeInBytes(), blobPropertiesRecordSize + userMetadataRecordSize);
bufresult.clear();
channel = Channels.newChannel(new ByteBufferOutputStream(bufresult));
while (!send.isSendComplete()) {
send.writeTo(channel);
}
bufresult.flip();
for (int i = 0; i < blobPropertiesRecordSize - MessageFormatRecord.Crc_Size - BlobPropertiesSerDe.getBlobPropertiesSerDeSize(properties); i++) {
bufresult.get();
}
verifyBlobProperties(properties, BlobPropertiesSerDe.getBlobPropertiesFromStream(new DataInputStream(new ByteBufferInputStream(bufresult))));
for (int i = 0; i < userMetadataRecordSize - userMetadata.length; i++) {
bufresult.get();
}
verifyBlobUserMetadata(userMetadata, bufresult);
if (expectedEncryptionKey == null) {
Assert.assertEquals(null, send.getMessageMetadataList().get(0));
} else {
Assert.assertEquals(expectedEncryptionKey, send.getMessageMetadataList().get(0).getEncryptionKey());
}
Assert.assertTrue(readSet.isPrefetchInfoCorrect(header.getBlobPropertiesRecordRelativeOffset(), header.getBlobPropertiesRecordSize() + header.getUserMetadataRecordSize()));
}
Aggregations