use of com.github.ambry.protocol.PutRequest in project ambry by linkedin.
the class ServerHardDeleteTest method putBlob.
/**
* Uploads a single blob to ambry server node
* @param blobId the {@link BlobId} that needs to be put
* @param properties the {@link BlobProperties} of the blob being uploaded
* @param usermetadata the user metadata of the blob being uploaded
* @param data the blob content of the blob being uploaded
* @param channel the {@link ConnectedChannel} to use to send and receive data
* @throws IOException
*/
void putBlob(BlobId blobId, BlobProperties properties, byte[] encryptionKey, byte[] usermetadata, byte[] data, ConnectedChannel channel) throws IOException {
PutRequest putRequest0 = new PutRequest(1, "client1", blobId, properties, ByteBuffer.wrap(usermetadata), Unpooled.wrappedBuffer(data), properties.getBlobSize(), BlobType.DataBlob, encryptionKey == null ? null : ByteBuffer.wrap(encryptionKey));
PutResponse response0 = PutResponse.readFrom(channel.sendAndReceive(putRequest0).getInputStream());
Assert.assertEquals(ServerErrorCode.No_Error, response0.getError());
}
use of com.github.ambry.protocol.PutRequest in project ambry by linkedin.
the class ServerHardDeleteTest method putBlob.
/**
* Uploads a single blob to ambry server node
* @param blobId the {@link BlobId} that needs to be put
* @param properties the {@link BlobProperties} of the blob being uploaded
* @param usermetadata the user metadata of the blob being uploaded
* @param data the blob content of the blob being uploaded
* @param channel the {@link BlockingChannel} to use to send and receive data
* @throws IOException
*/
void putBlob(BlobId blobId, BlobProperties properties, byte[] encryptionKey, byte[] usermetadata, byte[] data, BlockingChannel channel) throws IOException {
PutRequest putRequest0 = new PutRequest(1, "client1", blobId, properties, ByteBuffer.wrap(usermetadata), ByteBuffer.wrap(data), properties.getBlobSize(), BlobType.DataBlob, encryptionKey == null ? null : ByteBuffer.wrap(encryptionKey));
channel.send(putRequest0);
InputStream putResponseStream = channel.receive().getInputStream();
PutResponse response0 = PutResponse.readFrom(new DataInputStream(putResponseStream));
Assert.assertEquals(response0.getError(), ServerErrorCode.No_Error);
}
use of com.github.ambry.protocol.PutRequest in project ambry by linkedin.
the class ServerTestUtil method testLatePutRequest.
/**
* Tests the case where a late PutRequest is sent to each server when it already has a record for the given BlobId.
* The expected error from each server should be the given error.
* @param blobId the {@link BlobId} of the blob to be put.
* @param properties the {@link BlobProperties} of the blob to be put.
* @param usermetadata the usermetadata of the blob to be put.
* @param data the blob data of the blob to be put.
* @param encryptionKey the encryption key of the blob. Could be null for non-encrypted blobs
* @param channelToDatanode1 the {@link BlockingChannel} to the Datanode1.
* @param channelToDatanode2 the {@link BlockingChannel} to the Datanode2.
* @param channelToDatanode3 the {@link BlockingChannel} to the Datanode3.
* @param expectedErrorCode the {@link ServerErrorCode} that is expected from every Datanode.
* @throws IOException
*/
private static void testLatePutRequest(BlobId blobId, BlobProperties properties, byte[] usermetadata, byte[] data, byte[] encryptionKey, BlockingChannel channelToDatanode1, BlockingChannel channelToDatanode2, BlockingChannel channelToDatanode3, ServerErrorCode expectedErrorCode) throws IOException {
// Send put requests for an existing blobId for the exact blob to simulate a request arriving late.
PutRequest latePutRequest1 = new PutRequest(1, "client1", blobId, properties, ByteBuffer.wrap(usermetadata), ByteBuffer.wrap(data), properties.getBlobSize(), BlobType.DataBlob, encryptionKey != null ? ByteBuffer.wrap(encryptionKey) : null);
PutRequest latePutRequest2 = new PutRequest(1, "client2", blobId, properties, ByteBuffer.wrap(usermetadata), ByteBuffer.wrap(data), properties.getBlobSize(), BlobType.DataBlob, encryptionKey != null ? ByteBuffer.wrap(encryptionKey) : null);
PutRequest latePutRequest3 = new PutRequest(1, "client3", blobId, properties, ByteBuffer.wrap(usermetadata), ByteBuffer.wrap(data), properties.getBlobSize(), BlobType.DataBlob, encryptionKey != null ? ByteBuffer.wrap(encryptionKey) : null);
channelToDatanode1.send(latePutRequest1);
InputStream putResponseStream = channelToDatanode1.receive().getInputStream();
PutResponse response = PutResponse.readFrom(new DataInputStream(putResponseStream));
assertEquals(expectedErrorCode, response.getError());
channelToDatanode2.send(latePutRequest2);
putResponseStream = channelToDatanode2.receive().getInputStream();
response = PutResponse.readFrom(new DataInputStream(putResponseStream));
assertEquals(expectedErrorCode, response.getError());
channelToDatanode3.send(latePutRequest3);
putResponseStream = channelToDatanode3.receive().getInputStream();
response = PutResponse.readFrom(new DataInputStream(putResponseStream));
assertEquals(expectedErrorCode, response.getError());
}
use of com.github.ambry.protocol.PutRequest in project ambry by linkedin.
the class MockReadableStreamChannel method verifyCompositeBlob.
/**
* Verify Composite blob for content, userMetadata and
* @param properties {@link BlobProperties} of the blob
* @param originalPutContent original out content
* @param originalUserMetadata original user-metadata
* @param dataBlobIds {@link List} of {@link StoreKey}s of the composite blob in context
* @param request {@link com.github.ambry.protocol.PutRequest.ReceivedPutRequest} to fetch info from
* @param serializedRequests the mapping from blob ids to their corresponding serialized {@link PutRequest}.
* @throws Exception
*/
private void verifyCompositeBlob(BlobProperties properties, byte[] originalPutContent, byte[] originalUserMetadata, List<StoreKey> dataBlobIds, PutRequest.ReceivedPutRequest request, HashMap<String, ByteBuffer> serializedRequests) throws Exception {
StoreKey lastKey = dataBlobIds.get(dataBlobIds.size() - 1);
byte[] content = new byte[(int) request.getBlobProperties().getBlobSize()];
AtomicInteger offset = new AtomicInteger(0);
for (StoreKey key : dataBlobIds) {
PutRequest.ReceivedPutRequest dataBlobPutRequest = deserializePutRequest(serializedRequests.get(key.getID()));
AtomicInteger dataBlobLength = new AtomicInteger((int) dataBlobPutRequest.getBlobSize());
InputStream dataBlobStream = dataBlobPutRequest.getBlobStream();
if (!properties.isEncrypted()) {
Utils.readBytesFromStream(dataBlobStream, content, offset.get(), dataBlobLength.get());
Assert.assertArrayEquals("UserMetadata mismatch", originalUserMetadata, dataBlobPutRequest.getUsermetadata().array());
} else {
byte[] dataBlobContent = Utils.readBytesFromStream(dataBlobStream, dataBlobLength.get());
// reason to directly call run() instead of spinning up a thread instead of calling start() is that, any exceptions or
// assertion failures in non main thread will not fail the test.
new DecryptJob(dataBlobPutRequest.getBlobId(), dataBlobPutRequest.getBlobEncryptionKey().duplicate(), ByteBuffer.wrap(dataBlobContent), dataBlobPutRequest.getUsermetadata().duplicate(), cryptoService, kms, new CryptoJobMetricsTracker(metrics.decryptJobMetrics), (result, exception) -> {
Assert.assertNull("Exception should not be thrown", exception);
Assert.assertEquals("BlobId mismatch", dataBlobPutRequest.getBlobId(), result.getBlobId());
Assert.assertArrayEquals("UserMetadata mismatch", originalUserMetadata, result.getDecryptedUserMetadata().array());
dataBlobLength.set(result.getDecryptedBlobContent().remaining());
result.getDecryptedBlobContent().get(content, offset.get(), dataBlobLength.get());
}).run();
}
if (key != lastKey) {
Assert.assertEquals("all chunks except last should be fully filled", chunkSize, dataBlobLength.get());
} else {
Assert.assertEquals("Last chunk should be of non-zero length and equal to the length of the remaining bytes", (originalPutContent.length - 1) % chunkSize + 1, dataBlobLength.get());
}
offset.addAndGet(dataBlobLength.get());
Assert.assertEquals("dataBlobStream should have no more data", -1, dataBlobStream.read());
notificationSystem.verifyNotification(key.getID(), NotificationBlobType.DataChunk, dataBlobPutRequest.getBlobProperties());
}
Assert.assertArrayEquals("Input blob and written blob should be the same", originalPutContent, content);
}
use of com.github.ambry.protocol.PutRequest in project ambry by linkedin.
the class InMemoryCloudDestinationErrorSimulationTest method doPut.
/**
* Do a put directly to the cloud mock servers.
* @param partitionId partition id
* @param expectedCode expected response error code
* @return the blob id
* @throws Exception
*/
private BlobId doPut(PartitionId partitionId, ServerErrorCode expectedCode) throws Exception {
int blobSize = 4096;
// direct put DataBlob
BlobType blobType = BlobType.DataBlob;
BlobProperties blobProperties = new BlobProperties(blobSize, "serviceId", "memberId", "contentType", false, Utils.Infinite_Time, Utils.getRandomShort(random), Utils.getRandomShort(random), false, null, null, null);
byte[] userMetadata = new byte[10];
random.nextBytes(userMetadata);
byte[] putContent = new byte[blobSize];
random.nextBytes(putContent);
ByteBuf blobContent = PooledByteBufAllocator.DEFAULT.heapBuffer(blobSize);
blobContent.writeBytes(putContent);
BlobId blobId = new BlobId(routerConfig.routerBlobidCurrentVersion, BlobId.BlobIdType.NATIVE, mockClusterMap.getLocalDatacenterId(), blobProperties.getAccountId(), blobProperties.getContainerId(), partitionId, blobProperties.isEncrypted(), blobType == BlobType.MetadataBlob ? BlobId.BlobDataType.METADATA : BlobId.BlobDataType.DATACHUNK);
ByteBuffer userMetadataBuf = ByteBuffer.wrap(userMetadata);
// send to Cloud destinations.
PutRequest request = new PutRequest(random.nextInt(), "clientId", blobId, blobProperties, userMetadataBuf.duplicate(), blobContent.retainedDuplicate(), blobContent.readableBytes(), blobType, null);
RequestInfo requestInfo = new RequestInfo(hostname, port, request, replica, null);
ResponseInfo responseInfo = sendAndWaitForResponses(requestInfo);
Assert.assertEquals("doPut should succeed.", responseInfo.getError(), null);
// PutResponse response = PutResponse.readFrom(new NettyByteBufDataInputStream(responseInfo.content()));
PutResponse response = (PutResponse) RouterUtils.mapToReceivedResponse((PutResponse) responseInfo.getResponse());
Assert.assertEquals("The PutResponse is not expected.", expectedCode, response.getError());
request.release();
blobContent.release();
return blobId;
}
Aggregations