use of io.pravega.common.util.BufferView in project pravega by pravega.
the class TableBucketReader method findAllExisting.
// endregion
// region Searching
/**
* Locates all {@link ResultT} instances in a TableBucket.
*
* @param bucketOffset The current segment offset of the Table Bucket we are looking into.
* @param timer A {@link TimeoutTimer} for the operation.
* @return A CompletableFuture that, when completed, will contain a List with the desired result items. This list
* will exclude all {@link ResultT} items that are marked as deleted.
*/
CompletableFuture<List<ResultT>> findAllExisting(long bucketOffset, TimeoutTimer timer) {
val result = new HashMap<BufferView, ResultT>();
// This handler ensures that items are only added once (per key) and only if they are not deleted. Since the items
// are processed in descending version order, the first time we encounter its key is its latest value.
BiConsumer<ResultT, Long> handler = (item, offset) -> {
TableKey key = getKey(item);
if (!result.containsKey(key.getKey())) {
result.put(key.getKey(), key.getVersion() == TableKey.NOT_EXISTS ? null : item);
}
};
return findAll(bucketOffset, handler, timer).thenApply(v -> result.values().stream().filter(Objects::nonNull).collect(Collectors.toList()));
}
use of io.pravega.common.util.BufferView in project pravega by pravega.
the class TableEntryDeltaIterator method parseEntries.
@SneakyThrows(IOException.class)
private List<Map.Entry<DeltaIteratorState, TableEntry>> parseEntries(BufferView data, long startOffset, int readLength) {
long currentOffset = startOffset;
final long maxOffset = startOffset + readLength;
BufferView.Reader input = data.getBufferViewReader();
List<Map.Entry<DeltaIteratorState, TableEntry>> entries = new ArrayList<>();
try {
while (currentOffset < maxOffset) {
val entry = AsyncTableEntryReader.readEntryComponents(input, currentOffset, this.entrySerializer);
boolean reachedEnd = currentOffset + entry.getHeader().getTotalLength() >= this.maxBytesToRead + startOffset;
// We must preserve deletions to accurately construct a delta.
BufferView value = entry.getValue() == null ? BufferView.empty() : entry.getValue();
currentOffset += entry.getHeader().getTotalLength();
entries.add(new AbstractMap.SimpleEntry<>(new DeltaIteratorState(currentOffset, reachedEnd, this.shouldClear, entry.getHeader().isDeletion()), TableEntry.versioned(entry.getKey(), value, entry.getVersion())));
}
} catch (BufferView.Reader.OutOfBoundsException ex) {
// Handles the event that our computed maxOffset lies within (but not on the boundary) of a TableEntry, or
// reaches the end the TableSegment. Silently handling this exception is sufficient because it acknowledges
// that we have processed the maximal set of TableEntries and thus is safe to return.
}
this.currentBatchOffset = currentOffset;
return entries;
}
use of io.pravega.common.util.BufferView in project pravega by pravega.
the class TableCompactor method copyCandidates.
/**
* Copies the {@link Candidate}s in the given {@link CompactionArgs} set to a contiguous block at the end of the Segment.
*
* @param args A {@link CompactionArgs} containing the {@link Candidate}s to copy.
* @param timer Timer for the operation.
* @return A CompletableFuture that, when completed, indicate the candidates have been copied.
*/
private CompletableFuture<Void> copyCandidates(CompactionArgs args, TimeoutTimer timer) {
val attributes = generateAttributeUpdates(args);
// Collect all the candidates for copying and calculate the total serialization length.
val toWrite = new ArrayList<TableEntry>();
val totalLength = new AtomicInteger(0);
args.getAll().stream().sorted(Comparator.comparingLong(c -> c.entry.getKey().getVersion())).forEach(c -> {
toWrite.add(c.entry);
generateIndexUpdates(c, totalLength.get(), attributes);
totalLength.addAndGet(SERIALIZER.getUpdateLength(c.entry));
});
// Generate the necessary AttributeUpdates that will need to be applied regardless of whether we copy anything or not.
CompletableFuture<?> result;
if (totalLength.get() == 0) {
// Nothing to do; update the necessary segment attributes.
assert toWrite.size() == 0;
result = this.segment.updateAttributes(attributes, timer.getRemaining());
} else {
// Perform a Segment Append with re-serialized entries (Explicit versions), and atomically update the necessary
// segment attributes.
BufferView appendData = SERIALIZER.serializeUpdateWithExplicitVersion(toWrite);
result = this.segment.append(appendData, attributes, timer.getRemaining());
log.debug("{}: Compacting {}, CopyCount={}, CopyLength={}.", this.traceLogId, args, toWrite.size(), totalLength);
}
return Futures.toVoid(result);
}
use of io.pravega.common.util.BufferView in project pravega by pravega.
the class StreamSegmentContainerTests method testEventProcessorDurableQueueAndSwitchToConsumer.
/**
* Test the EventProcessor in durable queue mode (no handler). Then, close it and recreate another one on the same
* internal Segment (same name) that actually consumes the events stored previously.
*
* @throws Exception
*/
@Test(timeout = 10000)
public void testEventProcessorDurableQueueAndSwitchToConsumer() throws Exception {
@Cleanup TestContext context = createContext();
val container = (StreamSegmentContainer) context.container;
container.startAsync().awaitRunning();
int allEventsToProcess = 100;
@Cleanup ContainerEventProcessorImpl containerEventProcessor = new ContainerEventProcessorImpl(container, container.metadataStore, TIMEOUT_EVENT_PROCESSOR_ITERATION, TIMEOUT_EVENT_PROCESSOR_ITERATION, this.executorService());
ContainerEventProcessor.EventProcessor processor = containerEventProcessor.forDurableQueue("testDurableQueue").get(TIMEOUT_FUTURE.toSeconds(), TimeUnit.SECONDS);
// At this point, we can only add events, but not consuming them as the EventProcessor works in durable queue mode.
for (int i = 0; i < allEventsToProcess; i++) {
BufferView event = new ByteArraySegment(ByteBuffer.allocate(Integer.BYTES).putInt(i).array());
processor.add(event, TIMEOUT_FUTURE).join();
}
Assert.assertEquals("Event processor object not matching", processor, containerEventProcessor.forDurableQueue("testDurableQueue").get(TIMEOUT_FUTURE.toSeconds(), TimeUnit.SECONDS));
// Close the processor and unregister it.
processor.close();
// Make sure that EventProcessor eventually terminates.
((ContainerEventProcessorImpl.EventProcessorImpl) processor).awaitTerminated();
// Now, re-create the Event Processor with a handler to consume the events.
ContainerEventProcessor.EventProcessorConfig eventProcessorConfig = new ContainerEventProcessor.EventProcessorConfig(EVENT_PROCESSOR_EVENTS_AT_ONCE, EVENT_PROCESSOR_MAX_OUTSTANDING_BYTES, EVENT_PROCESSOR_TRUNCATE_SIZE_BYTES);
List<Integer> processorResults = new ArrayList<>();
Function<List<BufferView>, CompletableFuture<Void>> handler = l -> {
l.forEach(b -> {
try {
processorResults.add(ByteBuffer.wrap(b.getReader().readNBytes(Integer.BYTES)).getInt());
} catch (IOException e) {
throw new CompletionException(e);
}
});
return CompletableFuture.completedFuture(null);
};
processor = containerEventProcessor.forConsumer("testDurableQueue", handler, eventProcessorConfig).get(TIMEOUT_FUTURE.toSeconds(), TimeUnit.SECONDS);
// Wait for all items to be processed.
AssertExtensions.assertEventuallyEquals(true, () -> processorResults.size() == allEventsToProcess, 10000);
Assert.assertArrayEquals(processorResults.toArray(), IntStream.iterate(0, v -> v + 1).limit(allEventsToProcess).boxed().toArray());
// Just check failure callback.
((ContainerEventProcessorImpl.EventProcessorImpl) processor).failureCallback(new IntentionalException());
// Close the processor and unregister it.
processor.close();
// Make sure that EventProcessor eventually terminates.
((ContainerEventProcessorImpl.EventProcessorImpl) processor).awaitTerminated();
}
use of io.pravega.common.util.BufferView in project pravega by pravega.
the class ContainerEventProcessorTests method testAppendWithFailingSegment.
/**
* Check the behavior of the EventProcessor when there are failures when adding events to the internal Segment.
*
* @throws Exception
*/
@Test(timeout = 10000)
public void testAppendWithFailingSegment() throws Exception {
DirectSegmentAccess faultySegment = mock(SegmentMock.class);
when(faultySegment.append(any(), any(), any())).thenThrow(NullPointerException.class);
SegmentMetadata mockMetadata = mock(SegmentMetadata.class);
when(mockMetadata.getLength()).thenReturn(0L);
when(faultySegment.getInfo()).thenReturn(mockMetadata);
Function<String, CompletableFuture<DirectSegmentAccess>> faultySegmentSupplier = s -> CompletableFuture.completedFuture(faultySegment);
@Cleanup ContainerEventProcessor eventProcessorService = new ContainerEventProcessorImpl(0, faultySegmentSupplier, ITERATION_DELAY, CONTAINER_OPERATION_TIMEOUT, this.executorService());
int maxItemsProcessed = 10;
int maxOutstandingBytes = 4 * 1024 * 1024;
int truncationDataSize = 500;
ContainerEventProcessor.EventProcessorConfig config = new ContainerEventProcessor.EventProcessorConfig(maxItemsProcessed, maxOutstandingBytes, truncationDataSize);
Function<List<BufferView>, CompletableFuture<Void>> doNothing = l -> null;
@Cleanup ContainerEventProcessor.EventProcessor processor = eventProcessorService.forConsumer("testSegmentMax", doNothing, config).get(TIMEOUT_FUTURE.toSeconds(), TimeUnit.SECONDS);
// Verify that the client gets the exception if there is some issue on add().
BufferView event = new ByteArraySegment("Test".getBytes());
AssertExtensions.assertThrows(NullPointerException.class, () -> processor.add(event, TIMEOUT_FUTURE).join());
}
Aggregations