use of org.junit.rules.Timeout in project pravega by pravega.
the class ReadOnlySegmentContainerTests method populate.
private byte[] populate(int length, int truncationOffset, TestContext context) {
val rnd = new Random(0);
byte[] data = new byte[length];
rnd.nextBytes(data);
context.storage.create(SEGMENT_NAME, TIMEOUT).thenCompose(handle -> context.storage.write(handle, 0, new ByteArrayInputStream(data), data.length, TIMEOUT).thenCompose(v -> context.storage.truncate(handle, truncationOffset, TIMEOUT))).join();
return data;
}
use of org.junit.rules.Timeout in project pravega by pravega.
the class ReadOnlySegmentContainerTests method testGetStreamSegmentInfo.
/**
* Tests the getStreamSegmentInfo() method.
*/
@Test
public void testGetStreamSegmentInfo() {
@Cleanup val context = new TestContext();
context.container.startAsync().awaitRunning();
// Non-existent segment.
AssertExtensions.assertSuppliedFutureThrows("Unexpected exception when the segment does not exist.", () -> context.container.getStreamSegmentInfo(SEGMENT_NAME, TIMEOUT), ex -> ex instanceof StreamSegmentNotExistsException);
// Create a segment, add some data, set some attributes, "truncate" it and then seal it.
val storageInfo = context.storage.create(SEGMENT_NAME, TIMEOUT).thenCompose(handle -> context.storage.write(handle, 0, new ByteArrayInputStream(new byte[10]), 10, TIMEOUT)).thenCompose(v -> context.storage.getStreamSegmentInfo(SEGMENT_NAME, TIMEOUT)).join();
val expectedInfo = StreamSegmentInformation.from(storageInfo).startOffset(storageInfo.getLength() / 2).attributes(ImmutableMap.of(AttributeId.randomUUID(), 100L, Attributes.EVENT_COUNT, 1L)).build();
// Fetch the SegmentInfo from the ReadOnlyContainer and verify it is as expected.
val actual = context.container.getStreamSegmentInfo(SEGMENT_NAME, TIMEOUT).join();
Assert.assertEquals("Unexpected Name.", expectedInfo.getName(), actual.getName());
Assert.assertEquals("Unexpected Length.", expectedInfo.getLength(), actual.getLength());
Assert.assertEquals("Unexpected Sealed status.", expectedInfo.isSealed(), actual.isSealed());
}
use of org.junit.rules.Timeout in project pravega by pravega.
the class ContainerEventProcessorTests method testReadWithFailingSegment.
/**
* Test the behavior of the EventProcessor when internal Segment reads fail.
*
* @throws Exception
*/
@Test(timeout = 10000)
public void testReadWithFailingSegment() throws Exception {
DirectSegmentAccess faultySegment = spy(new SegmentMock(this.executorService()));
Function<String, CompletableFuture<DirectSegmentAccess>> faultySegmentSupplier = s -> CompletableFuture.completedFuture(faultySegment);
@Cleanup ContainerEventProcessor eventProcessorService = new ContainerEventProcessorImpl(0, faultySegmentSupplier, ITERATION_DELAY, CONTAINER_OPERATION_TIMEOUT, this.executorService());
int maxItemsProcessed = 10;
int maxOutstandingBytes = 4 * 1024 * 1024;
int truncationDataSize = 500;
ContainerEventProcessor.EventProcessorConfig config = new ContainerEventProcessor.EventProcessorConfig(maxItemsProcessed, maxOutstandingBytes, truncationDataSize);
ReusableLatch latch = new ReusableLatch();
Function<List<BufferView>, CompletableFuture<Void>> doNothing = l -> {
latch.release();
return CompletableFuture.completedFuture(null);
};
// Make the internal Segment of the processor to fail upon a read.
when(faultySegment.read(anyLong(), anyInt(), any(Duration.class))).thenThrow(IntentionalException.class).thenCallRealMethod();
@Cleanup ContainerEventProcessor.EventProcessor processor = eventProcessorService.forConsumer("testSegmentMax", doNothing, config).get(TIMEOUT_FUTURE.toSeconds(), TimeUnit.SECONDS);
// Write an event to make sure that the processor is running and await for it to be processed.
BufferView event = new ByteArraySegment("Test".getBytes());
processor.add(event, TIMEOUT_FUTURE).join();
latch.await();
}
use of org.junit.rules.Timeout in project pravega by pravega.
the class StreamSegmentContainerTests method testExtensions.
/**
* Tests the ability to register extensions.
*/
@Test
public void testExtensions() throws Exception {
String segmentName = getSegmentName(123);
ByteArraySegment data = getAppendData(segmentName, 0);
// Configure extension.
val operationProcessed = new CompletableFuture<SegmentOperation>();
AtomicInteger count = new AtomicInteger();
val writerProcessor = new TestWriterProcessor(op -> {
if (op.getStreamSegmentId() != EXPECTED_METADATA_SEGMENT_ID) {
// We need to exclude any appends that come from the MetadataStore as those do not concern us.
count.incrementAndGet();
if (!operationProcessed.isDone()) {
operationProcessed.complete(op);
}
}
});
val extension = new AtomicReference<TestSegmentContainerExtension>();
SegmentContainerFactory.CreateExtensions additionalExtensions = (container, executor) -> {
Assert.assertTrue("Already created", extension.compareAndSet(null, new TestSegmentContainerExtension(Collections.singleton(writerProcessor))));
return Collections.singletonMap(TestSegmentContainerExtension.class, extension.get());
};
@Cleanup val context = new TestContext(DEFAULT_CONFIG, additionalExtensions);
context.container.startAsync().awaitRunning();
// Verify getExtension().
val p = context.container.getExtension(TestSegmentContainerExtension.class);
Assert.assertEquals("Unexpected result from getExtension().", extension.get(), p);
// Verify Writer Segment Processors are properly wired in.
context.container.createStreamSegment(segmentName, getSegmentType(segmentName), null, TIMEOUT).join();
context.container.append(segmentName, data, null, TIMEOUT).join();
val rawOp = operationProcessed.get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
Assert.assertTrue("Unexpected operation type.", rawOp instanceof CachedStreamSegmentAppendOperation);
// Our operation has been transformed into a CachedStreamSegmentAppendOperation, which means it just points to
// a location in the cache. We do not have access to that cache, so we can only verify its metadata.
val appendOp = (CachedStreamSegmentAppendOperation) rawOp;
Assert.assertEquals("Unexpected offset.", 0, appendOp.getStreamSegmentOffset());
Assert.assertEquals("Unexpected data length.", data.getLength(), appendOp.getLength());
Assert.assertNull("Unexpected attribute updates.", appendOp.getAttributeUpdates());
// Verify extension is closed when the SegmentContainer is closed.
context.container.close();
Assert.assertTrue("Extension not closed.", extension.get().closed.get());
}
use of org.junit.rules.Timeout in project pravega by pravega.
the class StreamSegmentContainerTests method testSegmentRegularOperations.
/**
* Tests the createSegment, append, updateAttributes, read, getSegmentInfo, getActiveSegments.
*/
@Test
public void testSegmentRegularOperations() throws Exception {
final AttributeId attributeAccumulate = AttributeId.randomUUID();
final AttributeId attributeReplace = AttributeId.randomUUID();
final AttributeId attributeReplaceIfGreater = AttributeId.randomUUID();
final AttributeId attributeReplaceIfEquals = AttributeId.randomUUID();
final AttributeId attributeNoUpdate = AttributeId.randomUUID();
final long expectedAttributeValue = APPENDS_PER_SEGMENT + ATTRIBUTE_UPDATES_PER_SEGMENT;
@Cleanup TestContext context = createContext();
context.container.startAsync().awaitRunning();
// 1. Create the StreamSegments.
ArrayList<String> segmentNames = createSegments(context);
checkActiveSegments(context.container, 0);
activateAllSegments(segmentNames, context);
checkActiveSegments(context.container, segmentNames.size());
// 2. Add some appends.
ArrayList<CompletableFuture<Void>> opFutures = new ArrayList<>();
ArrayList<RefCountByteArraySegment> appends = new ArrayList<>();
HashMap<String, Long> lengths = new HashMap<>();
HashMap<String, ByteArrayOutputStream> segmentContents = new HashMap<>();
for (int i = 0; i < APPENDS_PER_SEGMENT; i++) {
for (String segmentName : segmentNames) {
val attributeUpdates = new AttributeUpdateCollection();
attributeUpdates.add(new AttributeUpdate(attributeAccumulate, AttributeUpdateType.Accumulate, 1));
attributeUpdates.add(new AttributeUpdate(attributeReplace, AttributeUpdateType.Replace, i + 1));
attributeUpdates.add(new AttributeUpdate(attributeReplaceIfGreater, AttributeUpdateType.ReplaceIfGreater, i + 1));
attributeUpdates.add(new AttributeUpdate(attributeReplaceIfEquals, i == 0 ? AttributeUpdateType.Replace : AttributeUpdateType.ReplaceIfEquals, i + 1, i));
RefCountByteArraySegment appendData = getAppendData(segmentName, i);
long expectedLength = lengths.getOrDefault(segmentName, 0L) + appendData.getLength();
val append = (i % 2 == 0) ? context.container.append(segmentName, appendData, attributeUpdates, TIMEOUT) : context.container.append(segmentName, lengths.get(segmentName), appendData, attributeUpdates, TIMEOUT);
opFutures.add(append.thenApply(length -> {
assertEquals(expectedLength, length.longValue());
return null;
}));
lengths.put(segmentName, expectedLength);
recordAppend(segmentName, appendData, segmentContents, appends);
}
}
// 2.1 Update some of the attributes.
for (String segmentName : segmentNames) {
// Record a one-off update.
opFutures.add(context.container.updateAttributes(segmentName, AttributeUpdateCollection.from(new AttributeUpdate(attributeNoUpdate, AttributeUpdateType.None, expectedAttributeValue)), TIMEOUT));
for (int i = 0; i < ATTRIBUTE_UPDATES_PER_SEGMENT; i++) {
val attributeUpdates = new AttributeUpdateCollection();
attributeUpdates.add(new AttributeUpdate(attributeAccumulate, AttributeUpdateType.Accumulate, 1));
attributeUpdates.add(new AttributeUpdate(attributeReplace, AttributeUpdateType.Replace, APPENDS_PER_SEGMENT + i + 1));
attributeUpdates.add(new AttributeUpdate(attributeReplaceIfGreater, AttributeUpdateType.ReplaceIfGreater, APPENDS_PER_SEGMENT + i + 1));
attributeUpdates.add(new AttributeUpdate(attributeReplaceIfEquals, AttributeUpdateType.ReplaceIfEquals, APPENDS_PER_SEGMENT + i + 1, APPENDS_PER_SEGMENT + i));
opFutures.add(context.container.updateAttributes(segmentName, attributeUpdates, TIMEOUT));
}
}
Futures.allOf(opFutures).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
// 3. getSegmentInfo
for (String segmentName : segmentNames) {
SegmentProperties sp = context.container.getStreamSegmentInfo(segmentName, TIMEOUT).join();
long expectedLength = lengths.get(segmentName);
Assert.assertEquals("Unexpected StartOffset for non-truncated segment " + segmentName, 0, sp.getStartOffset());
Assert.assertEquals("Unexpected length for segment " + segmentName, expectedLength, sp.getLength());
Assert.assertFalse("Unexpected value for isDeleted for segment " + segmentName, sp.isDeleted());
Assert.assertFalse("Unexpected value for isSealed for segment " + segmentName, sp.isDeleted());
// Verify all attribute values.
Assert.assertEquals("Unexpected value for attribute " + attributeAccumulate + " for segment " + segmentName, expectedAttributeValue, (long) sp.getAttributes().getOrDefault(attributeNoUpdate, Attributes.NULL_ATTRIBUTE_VALUE));
Assert.assertEquals("Unexpected value for attribute " + attributeAccumulate + " for segment " + segmentName, expectedAttributeValue, (long) sp.getAttributes().getOrDefault(attributeAccumulate, Attributes.NULL_ATTRIBUTE_VALUE));
Assert.assertEquals("Unexpected value for attribute " + attributeReplace + " for segment " + segmentName, expectedAttributeValue, (long) sp.getAttributes().getOrDefault(attributeReplace, Attributes.NULL_ATTRIBUTE_VALUE));
Assert.assertEquals("Unexpected value for attribute " + attributeReplaceIfGreater + " for segment " + segmentName, expectedAttributeValue, (long) sp.getAttributes().getOrDefault(attributeReplaceIfGreater, Attributes.NULL_ATTRIBUTE_VALUE));
Assert.assertEquals("Unexpected value for attribute " + attributeReplaceIfEquals + " for segment " + segmentName, expectedAttributeValue, (long) sp.getAttributes().getOrDefault(attributeReplaceIfEquals, Attributes.NULL_ATTRIBUTE_VALUE));
val expectedType = getSegmentType(segmentName);
val actualType = SegmentType.fromAttributes(sp.getAttributes());
Assert.assertEquals("Unexpected Segment Type.", expectedType, actualType);
}
checkActiveSegments(context.container, segmentNames.size());
// 4. Reads (regular reads, not tail reads).
checkReadIndex(segmentContents, lengths, context);
// 4.1. After we ensured that all data has been ingested and processed, verify that all data buffers have been released.
checkAppendLeaks(appends);
// 5. Writer moving data to Storage.
waitForSegmentsInStorage(segmentNames, context).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
checkStorage(segmentContents, lengths, context);
context.container.stopAsync().awaitTerminated();
}
Aggregations