use of com.github.ambry.commons.ByteBufferReadableStreamChannel in project ambry by linkedin.
the class PostBlobHandlerTest method uploadChunksViaRouter.
// stitchedUploadTest() helpers
/**
* Upload chunks using the router directly.
* @param creationTimeMs the creation time to set for the chunks.
* @param container the {@link Container} to create the chunks in.
* @param chunkSizes the sizes for each chunk to upload.
* @return a list of {@link ChunkInfo} objects that contains metadata about each chunk uploaded.
*/
private List<ChunkInfo> uploadChunksViaRouter(long creationTimeMs, Container container, int... chunkSizes) throws Exception {
long blobTtlSecs = TimeUnit.DAYS.toSeconds(1);
List<ChunkInfo> chunks = new ArrayList<>();
for (int chunkSize : chunkSizes) {
byte[] content = TestUtils.getRandomBytes(chunkSize);
BlobProperties blobProperties = new BlobProperties(-1, SERVICE_ID, OWNER_ID, CONTENT_TYPE, !container.isCacheable(), blobTtlSecs, creationTimeMs, container.getParentAccountId(), container.getId(), container.isEncrypted(), null, null, null);
String blobId = router.putBlob(blobProperties, null, new ByteBufferReadableStreamChannel(ByteBuffer.wrap(content)), new PutBlobOptionsBuilder().chunkUpload(true).build()).get(TIMEOUT_SECS, TimeUnit.SECONDS);
chunks.add(new ChunkInfo(blobId, chunkSize, Utils.addSecondsToEpochTime(creationTimeMs, blobTtlSecs)));
}
return chunks;
}
use of com.github.ambry.commons.ByteBufferReadableStreamChannel in project ambry by linkedin.
the class NamedBlobPutHandlerTest method uploadChunksViaRouter.
/**
* Upload chunks using the router directly.
* @param creationTimeMs the creation time to set for the chunks.
* @param container the {@link Container} to create the chunks in.
* @param chunkSizes the sizes for each chunk to upload.
* @return a list of {@link ChunkInfo} objects that contains metadata about each chunk uploaded.
*/
private List<ChunkInfo> uploadChunksViaRouter(long creationTimeMs, Container container, int... chunkSizes) throws Exception {
long blobTtlSecs = TimeUnit.DAYS.toSeconds(1);
List<ChunkInfo> chunks = new ArrayList<>();
for (int chunkSize : chunkSizes) {
byte[] content = TestUtils.getRandomBytes(chunkSize);
BlobProperties blobProperties = new BlobProperties(-1, SERVICE_ID, OWNER_ID, CONTENT_TYPE, !container.isCacheable(), blobTtlSecs, creationTimeMs, container.getParentAccountId(), container.getId(), container.isEncrypted(), null, null, null);
String blobId = router.putBlob(blobProperties, null, new ByteBufferReadableStreamChannel(ByteBuffer.wrap(content)), new PutBlobOptionsBuilder().chunkUpload(true).build()).get(TIMEOUT_SECS, TimeUnit.SECONDS);
chunks.add(new ChunkInfo(blobId, chunkSize, Utils.addSecondsToEpochTime(creationTimeMs, blobTtlSecs)));
}
return chunks;
}
use of com.github.ambry.commons.ByteBufferReadableStreamChannel in project ambry by linkedin.
the class GetBlobOperationTest method doPut.
/**
* Generates random content, and does a single put of the content, and saves the blob id string returned. The tests
* use this blob id string to perform the gets. Tests asserting success compare the contents of the returned blob
* with the content that is generated within this method.
* @throws Exception
*/
private void doPut() throws Exception {
blobProperties = new BlobProperties(-1, "serviceId", "memberId", "contentType", false, Utils.Infinite_Time, Utils.getRandomShort(random), Utils.getRandomShort(random), testEncryption, null, null, null);
userMetadata = new byte[10];
random.nextBytes(userMetadata);
putContent = new byte[blobSize];
random.nextBytes(putContent);
ReadableStreamChannel putChannel = new ByteBufferReadableStreamChannel(ByteBuffer.wrap(putContent));
// TODO fix null quota charge event listener
blobIdStr = router.putBlob(blobProperties, userMetadata, putChannel, new PutBlobOptionsBuilder().build()).get();
blobId = RouterUtils.getBlobIdFromString(blobIdStr, mockClusterMap);
}
use of com.github.ambry.commons.ByteBufferReadableStreamChannel in project ambry by linkedin.
the class NonBlockingRouterTestBase method setOperationParams.
/**
* Setup test suite to perform a {@link Router#putBlob} call.
* @param putContentSize the size of the content to put
* @param ttlSecs the TTL in seconds for the blob.
*/
protected void setOperationParams(int putContentSize, long ttlSecs) {
putBlobProperties = new BlobProperties(-1, "serviceId", "memberId", "contentType", false, ttlSecs, Utils.getRandomShort(TestUtils.RANDOM), Utils.getRandomShort(TestUtils.RANDOM), testEncryption, null, null, null);
putUserMetadata = new byte[USER_METADATA_SIZE];
random.nextBytes(putUserMetadata);
putContent = new byte[putContentSize];
random.nextBytes(putContent);
putChannel = new ByteBufferReadableStreamChannel(ByteBuffer.wrap(putContent));
}
use of com.github.ambry.commons.ByteBufferReadableStreamChannel in project ambry by linkedin.
the class MockReadableStreamChannel method submitPut.
/**
* Submits put operations. This is called by {@link #submitPutsAndAssertSuccess(boolean)} and
* {@link #submitPutsAndAssertFailure(Exception, boolean, boolean, boolean)} methods.
* @return a {@link CountDownLatch} to await on for operation completion.
*/
private CountDownLatch submitPut() throws Exception {
notificationSystem.blobCreatedEvents.clear();
final CountDownLatch doneLatch = new CountDownLatch(requestAndResultsList.size());
// This check is here for certain tests (like testConcurrentPuts) that require using the same router.
if (instantiateNewRouterForPuts) {
router = getNonBlockingRouter();
}
for (final RequestAndResult requestAndResult : requestAndResultsList) {
Utils.newThread(() -> {
try {
ReadableStreamChannel putChannel = new ByteBufferReadableStreamChannel(ByteBuffer.wrap(requestAndResult.putContent));
if (requestAndResult.chunksToStitch == null) {
requestAndResult.result = (FutureResult<String>) router.putBlob(requestAndResult.putBlobProperties, requestAndResult.putUserMetadata, putChannel, requestAndResult.options);
} else {
requestAndResult.result = (FutureResult<String>) router.stitchBlob(requestAndResult.putBlobProperties, requestAndResult.putUserMetadata, requestAndResult.chunksToStitch);
}
requestAndResult.result.await(MAX_WAIT_MS, TimeUnit.MILLISECONDS);
} catch (Exception e) {
requestAndResult.result = new FutureResult<>();
requestAndResult.result.done(null, e);
} finally {
doneLatch.countDown();
}
}, true).start();
}
return doneLatch;
}
Aggregations