use of com.github.ambry.utils.ByteBufferOutputStream in project ambry by linkedin.
the class MessageFormatSendTest method doSendWriteCompositeMessagesTest.
/**
* Helper method to test multiple messages in a single Send involving different combinations of header format
* versions, put formats and encryption keys.
* @param blob the array of blob records for the messages.
* @param userMetadata the array of userMetadata for the messages.
* @param storeKeys the array of store keys for the messages.
* @param encryptionKeys the array of encryption keys for the messages.
* @param putFormats the array of Put Format class names to use to create the message streams.
* @param headerVersions the array of Message Header versions to use for the messages.
*/
private void doSendWriteCompositeMessagesTest(byte[][] blob, byte[][] userMetadata, StoreKey[] storeKeys, ByteBuffer[] encryptionKeys, String[] putFormats, short[] headerVersions) throws MessageFormatException, IOException {
String serviceIdPrefix = "serviceId";
String ownerIdPrefix = "owner";
String contentTypePrefix = "bin";
short accountIdBase = 10;
short containerIdBase = 2;
BlobProperties[] properties = new BlobProperties[5];
for (int i = 0; i < 5; i++) {
properties[i] = new BlobProperties(blob[i].length, serviceIdPrefix + i, ownerIdPrefix + i, contentTypePrefix + i, false, 100, (short) (accountIdBase + i), (short) (containerIdBase + i), encryptionKeys[i] != null, null, null, null);
}
MessageFormatInputStream[] putStreams = new MessageFormatInputStream[5];
for (int i = 0; i < 5; i++) {
MessageFormatRecord.headerVersionToUse = headerVersions[i];
if (putFormats[i].equals(PutMessageFormatInputStream.class.getSimpleName())) {
putStreams[i] = new PutMessageFormatInputStream(storeKeys[i], (ByteBuffer) encryptionKeys[i].rewind(), properties[i], ByteBuffer.wrap(userMetadata[i]), new ByteBufferInputStream(ByteBuffer.wrap(blob[i])), blob[i].length, BlobType.DataBlob);
} else {
putStreams[i] = new PutMessageFormatBlobV1InputStream(storeKeys[i], properties[i], ByteBuffer.wrap(userMetadata[i]), new ByteBufferInputStream(ByteBuffer.wrap(blob[i])), blob[i].length, BlobType.DataBlob);
}
}
int totalStreamSize = (int) Arrays.stream(putStreams).mapToLong(MessageFormatInputStream::getSize).sum();
ByteBuffer compositeBuf = ByteBuffer.allocate(totalStreamSize);
ArrayList<ByteBuffer> listbuf = new ArrayList<>();
for (int i = 0; i < 5; i++) {
ByteBuffer buf = ByteBuffer.allocate((int) putStreams[i].getSize());
putStreams[i].read(buf.array());
compositeBuf.put(buf.array());
listbuf.add(buf);
}
MessageReadSet readSet = new MockMessageReadSet(listbuf, new ArrayList<>(Arrays.asList(storeKeys)));
MetricRegistry registry = new MetricRegistry();
MessageFormatMetrics metrics = new MessageFormatMetrics(registry);
// get all
MessageFormatSend send = new MessageFormatSend(readSet, MessageFormatFlags.All, metrics, new MockIdFactory());
Assert.assertEquals(send.sizeInBytes(), totalStreamSize);
Assert.assertEquals(5, send.getMessageMetadataList().size());
for (int i = 0; i < 5; i++) {
Assert.assertEquals(null, send.getMessageMetadataList().get(i));
}
ByteBuffer bufresult = ByteBuffer.allocate(totalStreamSize);
WritableByteChannel channel = Channels.newChannel(new ByteBufferOutputStream(bufresult));
while (!send.isSendComplete()) {
send.writeTo(channel);
}
Assert.assertArrayEquals(compositeBuf.array(), bufresult.array());
// get blob
send = new MessageFormatSend(readSet, MessageFormatFlags.Blob, metrics, new MockIdFactory());
int[] blobRecordSizes = new int[5];
for (int i = 0; i < 5; i++) {
blobRecordSizes[i] = (int) (putFormats[i].equals(PutMessageFormatInputStream.class.getSimpleName()) ? MessageFormatRecord.Blob_Format_V2.getBlobRecordSize(blob[i].length) : MessageFormatRecord.Blob_Format_V1.getBlobRecordSize(blob[i].length));
}
Assert.assertEquals(send.sizeInBytes(), (long) Arrays.stream(blobRecordSizes).sum());
bufresult.clear();
channel = Channels.newChannel(new ByteBufferOutputStream(bufresult));
while (!send.isSendComplete()) {
send.writeTo(channel);
}
int startOffset = 0;
for (int i = 0; i < 5; i++) {
DeserializedBlob deserializedBlob = MessageFormatRecord.deserializeAndGetBlobWithVersion(new ByteArrayInputStream(bufresult.array(), startOffset, blobRecordSizes[i]));
Assert.assertEquals(putFormats[i].equals(PutMessageFormatInputStream.class.getSimpleName()) ? MessageFormatRecord.Blob_Version_V2 : MessageFormatRecord.Blob_Version_V1, deserializedBlob.getVersion());
Assert.assertEquals(BlobType.DataBlob, deserializedBlob.getBlobData().getBlobType());
Assert.assertEquals(blob[i].length, deserializedBlob.getBlobData().getSize());
byte[] readBlob = new byte[blob[i].length];
deserializedBlob.getBlobData().content().readBytes(readBlob);
Assert.assertArrayEquals(blob[i], readBlob);
deserializedBlob.getBlobData().release();
if (headerVersions[i] == MessageFormatRecord.Message_Header_Version_V1) {
Assert.assertEquals(null, send.getMessageMetadataList().get(i));
} else {
Assert.assertEquals(encryptionKeys[i].rewind(), send.getMessageMetadataList().get(i).getEncryptionKey());
}
startOffset += blobRecordSizes[i];
}
// get user metadata
send = new MessageFormatSend(readSet, MessageFormatFlags.BlobUserMetadata, metrics, new MockIdFactory());
int[] userMetadataSizes = new int[5];
for (int i = 0; i < 5; i++) {
userMetadataSizes[i] = MessageFormatRecord.UserMetadata_Format_V1.getUserMetadataSize(ByteBuffer.wrap(userMetadata[i]));
}
Assert.assertEquals(send.sizeInBytes(), (long) Arrays.stream(userMetadataSizes).sum());
bufresult.clear();
channel = Channels.newChannel(new ByteBufferOutputStream(bufresult));
while (!send.isSendComplete()) {
send.writeTo(channel);
}
startOffset = 0;
for (int i = 0; i < 5; i++) {
DeserializedUserMetadata deserializedUserMetadata = MessageFormatRecord.deserializeAndGetUserMetadataWithVersion(new ByteArrayInputStream(bufresult.array(), startOffset, userMetadataSizes[i]));
Assert.assertEquals(MessageFormatRecord.UserMetadata_Version_V1, deserializedUserMetadata.getVersion());
verifyBlobUserMetadata(userMetadata[i], deserializedUserMetadata.getUserMetadata());
if (headerVersions[i] == MessageFormatRecord.Message_Header_Version_V1) {
Assert.assertEquals(null, send.getMessageMetadataList().get(i));
} else {
Assert.assertEquals(encryptionKeys[i].rewind(), send.getMessageMetadataList().get(i).getEncryptionKey());
}
startOffset += userMetadataSizes[i];
}
// get blob properties
send = new MessageFormatSend(readSet, MessageFormatFlags.BlobProperties, metrics, new MockIdFactory());
int[] blobPropertiesSizes = new int[5];
for (int i = 0; i < 5; i++) {
blobPropertiesSizes[i] = MessageFormatRecord.BlobProperties_Format_V1.getBlobPropertiesRecordSize(properties[i]);
}
Assert.assertEquals(send.sizeInBytes(), (long) Arrays.stream(blobPropertiesSizes).sum());
bufresult.clear();
channel = Channels.newChannel(new ByteBufferOutputStream(bufresult));
while (!send.isSendComplete()) {
send.writeTo(channel);
}
startOffset = 0;
for (int i = 0; i < 5; i++) {
DeserializedBlobProperties deserializedBlobProperties = MessageFormatRecord.deserializeAndGetBlobPropertiesWithVersion(new ByteArrayInputStream(bufresult.array(), startOffset, blobPropertiesSizes[i]));
Assert.assertEquals(MessageFormatRecord.BlobProperties_Version_V1, deserializedBlobProperties.getVersion());
verifyBlobProperties(properties[i], deserializedBlobProperties.getBlobProperties());
Assert.assertEquals(null, send.getMessageMetadataList().get(i));
startOffset += blobPropertiesSizes[i];
}
// get blob info
send = new MessageFormatSend(readSet, MessageFormatFlags.BlobInfo, metrics, new MockIdFactory());
int[] blobInfoSizes = new int[5];
for (int i = 0; i < 5; i++) {
blobInfoSizes[i] = MessageFormatRecord.BlobProperties_Format_V1.getBlobPropertiesRecordSize(properties[i]) + MessageFormatRecord.UserMetadata_Format_V1.getUserMetadataSize(ByteBuffer.wrap(userMetadata[i]));
}
Assert.assertEquals(send.sizeInBytes(), (long) Arrays.stream(blobInfoSizes).sum());
bufresult.clear();
channel = Channels.newChannel(new ByteBufferOutputStream(bufresult));
while (!send.isSendComplete()) {
send.writeTo(channel);
}
startOffset = 0;
for (int i = 0; i < 5; i++) {
ByteArrayInputStream inputStream = new ByteArrayInputStream(bufresult.array(), startOffset, blobInfoSizes[i]);
DeserializedBlobProperties deserializedBlobProperties = MessageFormatRecord.deserializeAndGetBlobPropertiesWithVersion(inputStream);
DeserializedUserMetadata deserializedUserMetadata = MessageFormatRecord.deserializeAndGetUserMetadataWithVersion(inputStream);
Assert.assertEquals(MessageFormatRecord.BlobProperties_Version_V1, deserializedBlobProperties.getVersion());
verifyBlobProperties(properties[i], deserializedBlobProperties.getBlobProperties());
Assert.assertEquals(MessageFormatRecord.UserMetadata_Version_V1, deserializedUserMetadata.getVersion());
verifyBlobUserMetadata(userMetadata[i], deserializedUserMetadata.getUserMetadata());
if (headerVersions[i] == MessageFormatRecord.Message_Header_Version_V1) {
Assert.assertEquals(null, send.getMessageMetadataList().get(i));
} else {
Assert.assertEquals(encryptionKeys[i].rewind(), send.getMessageMetadataList().get(i).getEncryptionKey());
}
startOffset += blobInfoSizes[i];
}
}
use of com.github.ambry.utils.ByteBufferOutputStream in project ambry by linkedin.
the class HardDeleteRecoveryMetadata method getBlobRecordInfo.
private DeserializedBlob getBlobRecordInfo(MessageReadSet readSet, int readSetIndex, int relativeOffset, long blobRecordSize) throws MessageFormatException, IOException {
/* Read the field from the channel */
ByteBuffer blobRecord = ByteBuffer.allocate((int) blobRecordSize);
readSet.writeTo(readSetIndex, Channels.newChannel(new ByteBufferOutputStream(blobRecord)), relativeOffset, blobRecordSize);
blobRecord.flip();
return deserializeAndGetBlobWithVersion(new ByteBufferInputStream(blobRecord));
}
use of com.github.ambry.utils.ByteBufferOutputStream in project ambry by linkedin.
the class MessageReadSetIndexInputStream method read.
@Override
public int read(byte[] b, int off, int len) throws IOException {
if (off < 0 || len < 0 || len > b.length - off) {
throw new IndexOutOfBoundsException();
}
if (len == 0) {
return 0;
}
if (currentOffset >= messageReadSet.sizeInBytes(indexToRead)) {
return -1;
}
ByteBuffer buf = ByteBuffer.wrap(b);
buf.position(off);
ByteBufferOutputStream bufferStream = new ByteBufferOutputStream(buf);
long sizeToRead = Math.min(len, messageReadSet.sizeInBytes(indexToRead) - currentOffset);
long bytesWritten = messageReadSet.writeTo(indexToRead, Channels.newChannel(bufferStream), currentOffset, sizeToRead);
currentOffset += bytesWritten;
return (int) bytesWritten;
}
use of com.github.ambry.utils.ByteBufferOutputStream in project ambry by linkedin.
the class MessageReadSetIndexInputStream method read.
@Override
public int read() throws IOException {
if (currentOffset >= messageReadSet.sizeInBytes(indexToRead)) {
return -1;
}
ByteBuffer buf = ByteBuffer.allocate(1);
ByteBufferOutputStream bufferStream = new ByteBufferOutputStream(buf);
long bytesRead = messageReadSet.writeTo(indexToRead, Channels.newChannel(bufferStream), currentOffset, 1);
if (bytesRead != 1) {
throw new IllegalStateException("Number of bytes read for read from messageReadSet should be 1");
}
currentOffset++;
buf.flip();
return buf.get() & 0xFF;
}
use of com.github.ambry.utils.ByteBufferOutputStream in project ambry by linkedin.
the class MessageReadSetIndexInputStream method extractEncryptionKey.
/**
* Extract the encryption key from the message at the given index from the readSet.
* @param readSetIndex the index in the readSet for the message from which the encryption key has to be extracted.
* @param encryptionKeyRelativeOffset the relative offset of the encryption key record in the message.
* @param encryptionKeySize the size of the encryption key record in the message.
* @return the extracted encryption key.
* @throws IOException if an IO error is encountered while deserializing the message.
* @throws MessageFormatException if a Message Format error is encountered while deserializing the message.
*/
private ByteBuffer extractEncryptionKey(int readSetIndex, int encryptionKeyRelativeOffset, int encryptionKeySize) throws IOException, MessageFormatException {
ByteBuffer serializedEncryptionKeyRecord = ByteBuffer.allocate(encryptionKeySize);
readSet.writeTo(readSetIndex, Channels.newChannel(new ByteBufferOutputStream(serializedEncryptionKeyRecord)), encryptionKeyRelativeOffset, encryptionKeySize);
serializedEncryptionKeyRecord.flip();
return deserializeBlobEncryptionKey(new ByteBufferInputStream(serializedEncryptionKeyRecord));
}
Aggregations