use of io.pravega.common.util.ByteArraySegment in project pravega by pravega.
the class ContainerReadIndexTests method testLargeAppends.
/**
* Tests the basic append-read functionality of the ContainerReadIndex using appends larger than the maximum allowed
* by the Cache Storage.
*/
@Test
public void testLargeAppends() throws Exception {
final int maxEntryLength = 64 * 1024;
final int appendCount = 10;
final Random rnd = new Random(0);
@Cleanup TestContext context = new TestContext();
context.cacheStorage.maxEntryLength = maxEntryLength;
long segmentId = createSegments(context).get(0);
HashMap<Long, ByteArrayOutputStream> segmentContents = new HashMap<>();
// Add a bunch of writes.
for (int i = 0; i < appendCount; i++) {
val data = new ByteArraySegment(new byte[maxEntryLength + i * 10240]);
rnd.nextBytes(data.array());
appendSingleWrite(segmentId, data, context);
recordAppend(segmentId, data, segmentContents);
}
// Check all the appended data.
checkReadIndex("PostAppend", segmentContents, context);
}
use of io.pravega.common.util.ByteArraySegment in project pravega by pravega.
the class ContainerReadIndexTests method testConcurrentReadTransactionStorageMerge.
/**
* Tests the following scenario, where the Read Index has a read from a portion in a parent segment where a transaction
* was just merged (fully in storage), but the read request might result in either an ObjectClosedException or
* StreamSegmentNotExistsException:
* * A Parent Segment has a Transaction with some data in it, and at least 1 byte of data not in cache.
* * The Transaction is begin-merged in the parent (Tier 1 only).
* * A Read Request is issued to the Parent for the range of data from the Transaction, which includes the 1 byte not in cache.
* * The Transaction is fully merged (Tier 2).
* * The Read Request is invoked and its content requested. This should correctly retrieve the data from the Parent
* Segment in Storage, and not attempt to access the now-defunct Transaction segment.
*/
@Test
public void testConcurrentReadTransactionStorageMerge() throws Exception {
CachePolicy cachePolicy = new CachePolicy(1, Duration.ZERO, Duration.ofMillis(1));
@Cleanup TestContext context = new TestContext(DEFAULT_CONFIG, cachePolicy);
// Create parent segment and one transaction
long parentId = createSegment(0, context);
long transactionId = createTransaction(1, context);
createSegmentsInStorage(context);
ByteArraySegment writeData = getAppendData(context.metadata.getStreamSegmentMetadata(transactionId).getName(), transactionId, 0, 0);
ReadResultEntry entry = setupMergeRead(parentId, transactionId, writeData.getCopy(), context);
context.readIndex.completeMerge(parentId, transactionId);
BufferView contents = entry.getContent().get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
byte[] readData = contents.getCopy();
Assert.assertArrayEquals("Unexpected data read from parent segment.", writeData.getCopy(), readData);
}
use of io.pravega.common.util.ByteArraySegment in project pravega by pravega.
the class InMemoryStorageTests method testReplace.
@Test
public void testReplace() throws Exception {
@Cleanup val s = new InMemoryStorage();
s.initialize(1);
Assert.assertFalse(s.supportsReplace());
val h = s.create("segment");
AssertExtensions.assertThrows("", () -> s.replace(h, new ByteArraySegment(new byte[1])), ex -> ex instanceof UnsupportedOperationException);
Assert.assertSame(s, s.withReplaceSupport());
}
use of io.pravega.common.util.ByteArraySegment in project pravega by pravega.
the class DurableDataLogRepairCommand method createOperationContents.
/**
* Provides two ways of creating the payload of {@link Operation}s with binary content (MetadataCheckpointOperation,
* StorageMetadataCheckpointOperation, StreamSegmentAppendOperation): i) zero, which means to provide a content of
* a defined length consisting of just 0s, ii) file, which will read the contents of a specified file and use it as
* payload for the {@link Operation}.
*
* @return Binary contents for the {@link Operation}.
*/
@VisibleForTesting
ByteArraySegment createOperationContents() {
ByteArraySegment content = null;
do {
try {
switch(getStringUserInput("You are about to create the content for the new Operation. " + "The available options are i) generating 0s as payload (zero), " + "ii) load the contents from a provided file (file), iii) quit: [zero|file|quit]")) {
case "zero":
int contentLength = getIntUserInput("Input length of the Operation content: ");
content = new ByteArraySegment(new byte[contentLength]);
break;
case "file":
String path = getStringUserInput("Input the path for the file to use as Operation content:");
content = new ByteArraySegment(Files.readAllBytes(Path.of(path)));
break;
case "quit":
throw new AbortedUserOperation();
default:
output("Wrong option. Please, select one of the following options: [zero|file]");
}
} catch (AbortedUserOperation ex) {
output("Content generation operation aborted by user.");
throw ex;
} catch (Exception ex) {
outputError("Some problem has happened.");
outputException(ex);
}
} while (content == null);
return content;
}
use of io.pravega.common.util.ByteArraySegment in project pravega by pravega.
the class ContainerMetadataSerializer method deserialize.
@Override
public String deserialize(ByteBuffer serializedValue) {
StringBuilder stringValueBuilder;
try {
SegmentInfo data = SERIALIZER.deserialize(new ByteArraySegment(serializedValue).getReader());
stringValueBuilder = new StringBuilder();
appendField(stringValueBuilder, SEGMENT_ID, String.valueOf(data.getSegmentId()));
SegmentProperties sp = data.getProperties();
SEGMENT_PROPERTIES_FIELD_MAP.forEach((name, f) -> appendField(stringValueBuilder, name, String.valueOf(f.apply(sp))));
sp.getAttributes().forEach((attributeId, attributeValue) -> appendField(stringValueBuilder, attributeId.toString(), attributeValue.toString()));
} catch (IOException e) {
throw new RuntimeException(e);
}
return stringValueBuilder.toString();
}
Aggregations