use of java.util.concurrent.CompletionException in project pravega by pravega.
the class StreamSegmentMapperTests method testGetStreamSegmentInfo.
/**
* Tests GetStreamSegmentInfo with various scenarios.
*/
@Test
public void testGetStreamSegmentInfo() {
final String segmentName = "segment";
final long segmentId = 1;
@Cleanup TestContext context = new TestContext();
HashSet<String> storageSegments = new HashSet<>();
// Segment not exists in Metadata or Storage.
setupStorageGetHandler(context, storageSegments, sn -> {
throw new CompletionException(new StreamSegmentNotExistsException(sn));
});
setSavedState(segmentName, segmentId, 0, ATTRIBUTE_COUNT, context);
val segmentState = context.stateStore.get(segmentName, TIMEOUT).join();
Map<UUID, Long> expectedAttributes = segmentState == null ? null : segmentState.getAttributes();
AssertExtensions.assertThrows("getStreamSegmentInfo did not throw correct exception when segment does not exist in Metadata or Storage.", () -> context.mapper.getStreamSegmentInfo(segmentName, TIMEOUT), ex -> ex instanceof StreamSegmentNotExistsException);
// Segment does not exist in Metadata, but does so in Storage.
// Since we do not setup an OperationLog, we guarantee that there is no attempt to map this in the metadata.
val segmentInfo = StreamSegmentInformation.builder().name(segmentName).length(123).sealed(true).build();
storageSegments.add(segmentName);
setupStorageGetHandler(context, storageSegments, sn -> segmentInfo);
val inStorageInfo = context.mapper.getStreamSegmentInfo(segmentName, TIMEOUT).join();
assertEquals("Unexpected SegmentInfo when Segment exists in Storage.", segmentInfo, inStorageInfo);
SegmentMetadataComparer.assertSameAttributes("Unexpected attributes when Segment exists in Storage", expectedAttributes, inStorageInfo);
Assert.assertEquals("Not expecting any segments to be mapped.", 0, context.metadata.getAllStreamSegmentIds().size());
// Segment exists in Metadata (and in Storage too) - here, we set different values in the Metadata to verify that
// the info is fetched from there.
val sm = context.metadata.mapStreamSegmentId(segmentName, segmentId);
sm.setLength(segmentInfo.getLength() + 1);
sm.updateAttributes(Collections.singletonMap(UUID.randomUUID(), 12345L));
val inMetadataInfo = context.mapper.getStreamSegmentInfo(segmentName, TIMEOUT).join();
assertEquals("Unexpected SegmentInfo when Segment exists in Metadata.", sm, inMetadataInfo);
SegmentMetadataComparer.assertSameAttributes("Unexpected attributes when Segment exists in Metadata.", sm.getAttributes(), inMetadataInfo);
// Segment exists in Metadata, but is marked as deleted.
sm.markDeleted();
AssertExtensions.assertThrows("getStreamSegmentInfo did not throw correct exception when segment is marked as Deleted in metadata.", () -> context.mapper.getStreamSegmentInfo(segmentName, TIMEOUT), ex -> ex instanceof StreamSegmentNotExistsException);
}
use of java.util.concurrent.CompletionException in project pravega by pravega.
the class DurableLogTests method testAddWithDataLogWriterNotPrimaryException.
/**
* Tests the ability of the DurableLog to handle a DataLogWriterNotPrimaryException.
*/
@Test
public void testAddWithDataLogWriterNotPrimaryException() throws Exception {
int streamSegmentCount = 1;
int appendsPerStreamSegment = 1;
// Setup a DurableLog and start it.
@Cleanup ContainerSetup setup = new ContainerSetup(executorService());
@Cleanup DurableLog durableLog = setup.createDurableLog();
durableLog.startAsync().awaitRunning();
HashSet<Long> streamSegmentIds = createStreamSegmentsInMetadata(streamSegmentCount, setup.metadata);
List<Operation> operations = generateOperations(streamSegmentIds, new HashMap<>(), appendsPerStreamSegment, METADATA_CHECKPOINT_EVERY, false, false);
ErrorInjector<Exception> aSyncErrorInjector = new ErrorInjector<>(count -> true, () -> new CompletionException(new DataLogWriterNotPrimaryException("intentional")));
setup.dataLog.get().setAppendErrorInjectors(null, aSyncErrorInjector);
// Process all generated operations.
List<OperationWithCompletion> completionFutures = processOperations(operations, durableLog);
// Wait for all such operations to complete. We are expecting exceptions, so verify that we do.
AssertExtensions.assertThrows("No operations failed.", OperationWithCompletion.allOf(completionFutures)::join, ex -> ex instanceof IOException || ex instanceof DataLogWriterNotPrimaryException);
// Verify that the OperationProcessor automatically shuts down and that it has the right failure cause.
ServiceListeners.awaitShutdown(durableLog, TIMEOUT, false);
Assert.assertEquals("DurableLog is not in a failed state after fence-out detected.", Service.State.FAILED, durableLog.state());
Assert.assertTrue("DurableLog did not fail with the correct exception.", Exceptions.unwrap(durableLog.failureCause()) instanceof DataLogWriterNotPrimaryException);
}
use of java.util.concurrent.CompletionException in project pravega by pravega.
the class StreamSegmentContainer method mergeTransaction.
@Override
public CompletableFuture<Void> mergeTransaction(String transactionName, Duration timeout) {
ensureRunning();
logRequest("mergeTransaction", transactionName);
this.metrics.mergeTxn();
TimeoutTimer timer = new TimeoutTimer(timeout);
return this.segmentMapper.getOrAssignStreamSegmentId(transactionName, timer.getRemaining(), transactionId -> {
SegmentMetadata transactionMetadata = this.metadata.getStreamSegmentMetadata(transactionId);
if (transactionMetadata == null) {
throw new CompletionException(new StreamSegmentNotExistsException(transactionName));
}
Operation op = new MergeTransactionOperation(transactionMetadata.getParentId(), transactionMetadata.getId());
return this.durableLog.add(op, timer.getRemaining());
}).thenComposeAsync(v -> this.stateStore.remove(transactionName, timer.getRemaining()), this.executor);
}
use of java.util.concurrent.CompletionException in project pravega by pravega.
the class ClientAdapterBase method mergeTransaction.
@Override
public CompletableFuture<Void> mergeTransaction(String transactionName, Duration timeout) {
ensureRunning();
String parentStream = StreamSegmentNameUtils.getParentStreamSegmentName(transactionName);
return CompletableFuture.runAsync(() -> {
try {
EventStreamWriter<byte[]> writer = getDefaultWriter(parentStream);
UUID txnId = getTransactionId(transactionName);
Transaction<byte[]> txn = writer.getTxn(txnId);
txn.commit();
} catch (TxnFailedException ex) {
throw new CompletionException(ex);
} finally {
this.transactionIds.remove(transactionName);
}
}, this.testExecutor);
}
use of java.util.concurrent.CompletionException in project pravega by pravega.
the class BookKeeperAdapter method createStream.
@Override
public CompletableFuture<Void> createStream(String logName, Duration timeout) {
ensureRunning();
int id;
synchronized (this.internalIds) {
if (this.internalIds.containsKey(logName)) {
return Futures.failedFuture(new StreamSegmentExistsException(logName));
}
id = this.internalIds.size();
this.internalIds.put(logName, id);
}
return CompletableFuture.runAsync(() -> {
DurableDataLog log = null;
boolean success = false;
try {
log = this.logFactory.createDurableDataLog(id);
this.logs.put(logName, log);
log.initialize(timeout);
success = true;
} catch (DurableDataLogException ex) {
throw new CompletionException(ex);
} finally {
if (!success) {
this.logs.remove(logName);
synchronized (this.internalIds) {
this.internalIds.remove(logName);
}
if (log != null) {
log.close();
}
}
}
}, this.executor);
}
Aggregations