Search in sources :

Example 6 with Timeout

use of org.junit.rules.Timeout in project evosuite by EvoSuite.

the class Scaffolding method generateTimeoutRule.

/**
 * Hanging tests have very, very high negative impact. They can mess up
 * everything (eg when running "mvn test"). As such, we should always have
 * timeouts. Adding timeouts only in certain conditions is too risky
 *
 * @param bd
 */
private void generateTimeoutRule(StringBuilder bd) {
    bd.append(METHOD_SPACE);
    bd.append("@org.junit.Rule \n");
    bd.append(METHOD_SPACE);
    int timeout = Properties.TIMEOUT + 1000;
    bd.append("public " + Timeout.class.getName() + " globalTimeout = new " + Timeout.class.getName() + "(" + timeout);
    boolean useNew = false;
    try {
        // FIXME: this check does not seem to work properly :(
        Class<?> timeoutOfSUTJunit = TestGenerationContext.getInstance().getClassLoaderForSUT().loadClass(Timeout.class.getName());
        Constructor c = timeoutOfSUTJunit.getDeclaredConstructor(Long.TYPE, TimeUnit.class);
        useNew = true;
    } catch (ClassNotFoundException e) {
        logger.error("Failed to load Timeout rule from SUT classloader: {}", e.getMessage(), e);
    } catch (NoSuchMethodException e) {
        logger.warn("SUT is using an old version of JUnit");
        useNew = false;
    }
    if (useNew) {
    // TODO: put back once above check works
    // bd.append(", " + TimeUnit.class.getName() + ".MILLISECONDS");
    }
    bd.append("); \n");
    bd.append("\n");
}
Also used : Timeout(org.junit.rules.Timeout) Constructor(java.lang.reflect.Constructor)

Example 7 with Timeout

use of org.junit.rules.Timeout in project flink by apache.

the class AsyncWaitOperatorTest method testRestartWithFullQueue.

/**
 * Tests that the AsyncWaitOperator can restart if checkpointed queue was full.
 *
 * <p>See FLINK-7949
 */
@Test(timeout = 10000)
public void testRestartWithFullQueue() throws Exception {
    final int capacity = 10;
    // 1. create the snapshot which contains capacity + 1 elements
    final CompletableFuture<Void> trigger = new CompletableFuture<>();
    final OneInputStreamOperatorTestHarness<Integer, Integer> snapshotHarness = createTestHarness(new ControllableAsyncFunction<>(// the NoOpAsyncFunction is like a blocking function
    trigger), 1000L, capacity, AsyncDataStream.OutputMode.ORDERED);
    snapshotHarness.open();
    final OperatorSubtaskState snapshot;
    final ArrayList<Integer> expectedOutput = new ArrayList<>(capacity);
    try {
        synchronized (snapshotHarness.getCheckpointLock()) {
            for (int i = 0; i < capacity; i++) {
                snapshotHarness.processElement(i, 0L);
                expectedOutput.add(i);
            }
        }
        synchronized (snapshotHarness.getCheckpointLock()) {
            // execute the snapshot within the checkpoint lock, because then it is guaranteed
            // that the lastElementWriter has written the exceeding element
            snapshot = snapshotHarness.snapshot(0L, 0L);
        }
        // trigger the computation to make the close call finish
        trigger.complete(null);
    } finally {
        synchronized (snapshotHarness.getCheckpointLock()) {
            snapshotHarness.close();
        }
    }
    // 2. restore the snapshot and check that we complete
    final OneInputStreamOperatorTestHarness<Integer, Integer> recoverHarness = createTestHarness(new ControllableAsyncFunction<>(CompletableFuture.completedFuture(null)), 1000L, capacity, AsyncDataStream.OutputMode.ORDERED);
    recoverHarness.initializeState(snapshot);
    synchronized (recoverHarness.getCheckpointLock()) {
        recoverHarness.open();
    }
    synchronized (recoverHarness.getCheckpointLock()) {
        recoverHarness.endInput();
        recoverHarness.close();
    }
    final ConcurrentLinkedQueue<Object> output = recoverHarness.getOutput();
    final List<Integer> outputElements = output.stream().map(r -> ((StreamRecord<Integer>) r).getValue()).collect(Collectors.toList());
    assertThat(outputElements, Matchers.equalTo(expectedOutput));
}
Also used : Tuple1(org.apache.flink.api.java.tuple.Tuple1) Arrays(java.util.Arrays) ScheduledFuture(java.util.concurrent.ScheduledFuture) SharedObjects(org.apache.flink.testutils.junit.SharedObjects) JobGraph(org.apache.flink.runtime.jobgraph.JobGraph) TimeoutException(java.util.concurrent.TimeoutException) ExceptionUtils(org.apache.flink.util.ExceptionUtils) RichAsyncFunction(org.apache.flink.streaming.api.functions.async.RichAsyncFunction) OneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness) MapFunction(org.apache.flink.api.common.functions.MapFunction) Lists(org.apache.flink.shaded.guava30.com.google.common.collect.Lists) BasicTypeInfo(org.apache.flink.api.common.typeinfo.BasicTypeInfo) Assert.assertThat(org.junit.Assert.assertThat) IntSerializer(org.apache.flink.api.common.typeutils.base.IntSerializer) TestTaskStateManager(org.apache.flink.runtime.state.TestTaskStateManager) AsyncFunction(org.apache.flink.streaming.api.functions.async.AsyncFunction) ChainingStrategy(org.apache.flink.streaming.api.operators.ChainingStrategy) TestLogger(org.apache.flink.util.TestLogger) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation) StreamElementQueue(org.apache.flink.streaming.api.operators.async.queue.StreamElementQueue) StreamTaskMailboxTestHarness(org.apache.flink.streaming.runtime.tasks.StreamTaskMailboxTestHarness) TaskStateSnapshot(org.apache.flink.runtime.checkpoint.TaskStateSnapshot) TypeSerializer(org.apache.flink.api.common.typeutils.TypeSerializer) ResultFuture(org.apache.flink.streaming.api.functions.async.ResultFuture) StreamElement(org.apache.flink.streaming.runtime.streamrecord.StreamElement) CheckpointOptions(org.apache.flink.runtime.checkpoint.CheckpointOptions) OperatorSubtaskState(org.apache.flink.runtime.checkpoint.OperatorSubtaskState) AsyncDataStream(org.apache.flink.streaming.api.datastream.AsyncDataStream) Preconditions(org.apache.flink.util.Preconditions) Collectors(java.util.stream.Collectors) OneInputStreamTaskTestHarness(org.apache.flink.streaming.runtime.tasks.OneInputStreamTaskTestHarness) Executors(java.util.concurrent.Executors) CountDownLatch(java.util.concurrent.CountDownLatch) List(java.util.List) CheckpointBarrier(org.apache.flink.runtime.io.network.api.CheckpointBarrier) TypeExtractor(org.apache.flink.api.java.typeutils.TypeExtractor) Assert.assertFalse(org.junit.Assert.assertFalse) OperatorID(org.apache.flink.runtime.jobgraph.OperatorID) Optional(java.util.Optional) Utils(org.apache.flink.api.java.Utils) Queue(java.util.Queue) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) StreamConfig(org.apache.flink.streaming.api.graph.StreamConfig) JobVertex(org.apache.flink.runtime.jobgraph.JobVertex) CheckpointMetaData(org.apache.flink.runtime.checkpoint.CheckpointMetaData) Watermark(org.apache.flink.streaming.api.watermark.Watermark) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) CompletableFuture(java.util.concurrent.CompletableFuture) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) StreamTaskMailboxTestHarnessBuilder(org.apache.flink.streaming.runtime.tasks.StreamTaskMailboxTestHarnessBuilder) StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) RichMapFunction(org.apache.flink.api.common.functions.RichMapFunction) TupleSerializer(org.apache.flink.api.java.typeutils.runtime.TupleSerializer) Timeout(org.junit.rules.Timeout) LinkedList(java.util.LinkedList) MockEnvironment(org.apache.flink.runtime.operators.testutils.MockEnvironment) TestHarnessUtil(org.apache.flink.streaming.util.TestHarnessUtil) SharedReference(org.apache.flink.testutils.junit.SharedReference) ExecutorService(java.util.concurrent.ExecutorService) DiscardingSink(org.apache.flink.streaming.api.functions.sink.DiscardingSink) Configuration(org.apache.flink.configuration.Configuration) SingleOutputStreamOperator(org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator) Matchers(org.hamcrest.Matchers) Assert.assertTrue(org.junit.Assert.assertTrue) OneInputStreamTask(org.apache.flink.streaming.runtime.tasks.OneInputStreamTask) Test(org.junit.Test) DataStream(org.apache.flink.streaming.api.datastream.DataStream) TimeUnit(java.util.concurrent.TimeUnit) Rule(org.junit.Rule) ForkJoinPool(java.util.concurrent.ForkJoinPool) Assert(org.junit.Assert) ArrayDeque(java.util.ArrayDeque) Comparator(java.util.Comparator) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) ArrayList(java.util.ArrayList) OperatorSubtaskState(org.apache.flink.runtime.checkpoint.OperatorSubtaskState) CompletableFuture(java.util.concurrent.CompletableFuture) Test(org.junit.Test)

Example 8 with Timeout

use of org.junit.rules.Timeout in project flink by apache.

the class ThreadInfoRequestCoordinatorTest method createMockTaskManagerGateway.

private static CompletableFuture<TaskExecutorThreadInfoGateway> createMockTaskManagerGateway(CompletionType completionType) {
    final CompletableFuture<TaskThreadInfoResponse> responseFuture = new CompletableFuture<>();
    switch(completionType) {
        case SUCCESSFULLY:
            ThreadInfoSample sample = JvmUtils.createThreadInfoSample(Thread.currentThread().getId(), 100).get();
            responseFuture.complete(new TaskThreadInfoResponse(Collections.singletonList(sample)));
            break;
        case EXCEPTIONALLY:
            responseFuture.completeExceptionally(new RuntimeException("Request failed."));
            break;
        case TIMEOUT:
            executorService.schedule(() -> responseFuture.completeExceptionally(new TimeoutException(REQUEST_TIMEOUT_MESSAGE)), REQUEST_TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
            break;
        case NEVER_COMPLETE:
            // do nothing
            break;
        default:
            throw new RuntimeException("Unknown completion type.");
    }
    final TaskExecutorThreadInfoGateway executorGateway = (taskExecutionAttemptId, requestParams, timeout) -> responseFuture;
    return CompletableFuture.completedFuture(executorGateway);
}
Also used : Matchers.emptyArray(org.hamcrest.Matchers.emptyArray) TaskThreadInfoResponse(org.apache.flink.runtime.messages.TaskThreadInfoResponse) BeforeClass(org.junit.BeforeClass) Matchers.not(org.hamcrest.Matchers.not) TimeoutException(java.util.concurrent.TimeoutException) ExceptionUtils(org.apache.flink.util.ExceptionUtils) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) ArrayList(java.util.ArrayList) After(org.junit.After) Duration(java.time.Duration) Map(java.util.Map) TestLogger(org.apache.flink.util.TestLogger) Timeout(org.junit.rules.Timeout) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) Assert.fail(org.junit.Assert.fail) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Before(org.junit.Before) ThreadInfoSample(org.apache.flink.runtime.messages.ThreadInfoSample) AfterClass(org.junit.AfterClass) JvmUtils(org.apache.flink.runtime.util.JvmUtils) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) ScheduledThreadPoolExecutor(java.util.concurrent.ScheduledThreadPoolExecutor) TaskExecutorThreadInfoGateway(org.apache.flink.runtime.taskexecutor.TaskExecutorThreadInfoGateway) ExecutionException(java.util.concurrent.ExecutionException) TimeUnit(java.util.concurrent.TimeUnit) ExecutionAttemptID(org.apache.flink.runtime.executiongraph.ExecutionAttemptID) List(java.util.List) Rule(org.junit.Rule) Assert.assertFalse(org.junit.Assert.assertFalse) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) TaskExecutorThreadInfoGateway(org.apache.flink.runtime.taskexecutor.TaskExecutorThreadInfoGateway) TaskThreadInfoResponse(org.apache.flink.runtime.messages.TaskThreadInfoResponse) CompletableFuture(java.util.concurrent.CompletableFuture) ThreadInfoSample(org.apache.flink.runtime.messages.ThreadInfoSample) TimeoutException(java.util.concurrent.TimeoutException)

Example 9 with Timeout

use of org.junit.rules.Timeout in project pravega by pravega.

the class BookKeeperLogTests method testRemoveEmptyLedgers.

/**
 * Tests the ability of BookKeeperLog to automatically remove empty ledgers during initialization.
 */
@Test
public void testRemoveEmptyLedgers() throws Exception {
    final int count = 100;
    final int writeEvery = count / 10;
    final Predicate<Integer> shouldAppendAnything = i -> i % writeEvery == 0;
    val allLedgers = new ArrayList<Map.Entry<Long, LedgerMetadata.Status>>();
    final Predicate<Integer> shouldExist = index -> (index >= allLedgers.size() - Ledgers.MIN_FENCE_LEDGER_COUNT) || (allLedgers.get(index).getValue() != LedgerMetadata.Status.Empty);
    for (int i = 0; i < count; i++) {
        try (BookKeeperLog log = (BookKeeperLog) createDurableDataLog()) {
            log.initialize(TIMEOUT);
            boolean shouldAppend = shouldAppendAnything.test(i);
            val currentMetadata = log.loadMetadata();
            val lastLedger = currentMetadata.getLedgers().get(currentMetadata.getLedgers().size() - 1);
            allLedgers.add(new AbstractMap.SimpleImmutableEntry<>(lastLedger.getLedgerId(), shouldAppend ? LedgerMetadata.Status.NotEmpty : LedgerMetadata.Status.Empty));
            val metadataLedgers = currentMetadata.getLedgers().stream().map(LedgerMetadata::getLedgerId).collect(Collectors.toSet());
            // Verify Log Metadata does not contain old empty ledgers.
            for (int j = 0; j < allLedgers.size(); j++) {
                val e = allLedgers.get(j);
                val expectedExist = shouldExist.test(j);
                Assert.assertEquals("Unexpected state for metadata. AllLedgerCount=" + allLedgers.size() + ", LedgerIndex=" + j + ", LedgerStatus=" + e.getValue(), expectedExist, metadataLedgers.contains(e.getKey()));
            }
            // Append some data to this Ledger, if needed.
            if (shouldAppend) {
                log.append(new CompositeByteArraySegment(getWriteData()), TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
            }
        }
    }
    // Verify that these ledgers have also been deleted from BookKeeper.
    for (int i = 0; i < allLedgers.size(); i++) {
        val e = allLedgers.get(i);
        if (shouldExist.test(i)) {
            // This should not throw any exceptions.
            Ledgers.openFence(e.getKey(), this.factory.get().getBookKeeperClient(), this.config.get());
        } else {
            AssertExtensions.assertThrows("Ledger not deleted from BookKeeper.", () -> Ledgers.openFence(e.getKey(), this.factory.get().getBookKeeperClient(), this.config.get()), ex -> true);
        }
    }
}
Also used : Arrays(java.util.Arrays) ThrottleSourceListener(io.pravega.segmentstore.storage.ThrottleSourceListener) SneakyThrows(lombok.SneakyThrows) AssertExtensions(io.pravega.test.common.AssertExtensions) Cleanup(lombok.Cleanup) Stat(org.apache.zookeeper.data.Stat) LogAddress(io.pravega.segmentstore.storage.LogAddress) DataLogNotAvailableException(io.pravega.segmentstore.storage.DataLogNotAvailableException) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Map(java.util.Map) After(org.junit.After) BookKeeper(org.apache.bookkeeper.client.api.BookKeeper) CancellationException(java.util.concurrent.CancellationException) Predicate(java.util.function.Predicate) WriteHandle(org.apache.bookkeeper.client.api.WriteHandle) DataLogWriterNotPrimaryException(io.pravega.segmentstore.storage.DataLogWriterNotPrimaryException) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) RetriesExhaustedException(io.pravega.common.util.RetriesExhaustedException) BKException(org.apache.bookkeeper.client.BKException) List(java.util.List) CuratorFramework(org.apache.curator.framework.CuratorFramework) DurableDataLogTestBase(io.pravega.segmentstore.storage.DurableDataLogTestBase) DataLogCorruptedException(io.pravega.segmentstore.storage.DataLogCorruptedException) BookKeeperAdmin(org.apache.bookkeeper.client.BookKeeperAdmin) WriteFailureException(io.pravega.segmentstore.storage.WriteFailureException) TestUtils(io.pravega.test.common.TestUtils) Futures(io.pravega.common.concurrent.Futures) ObjectClosedException(io.pravega.common.ObjectClosedException) BKLedgerClosedException(org.apache.bookkeeper.client.BKException.BKLedgerClosedException) CuratorFrameworkFactory(org.apache.curator.framework.CuratorFrameworkFactory) Setter(lombok.Setter) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) AtomicReference(java.util.concurrent.atomic.AtomicReference) CompositeByteArraySegment(io.pravega.common.util.CompositeByteArraySegment) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) Lists(com.google.common.collect.Lists) ExponentialBackoffRetry(org.apache.curator.retry.ExponentialBackoffRetry) Timeout(org.junit.rules.Timeout) DurableDataLogException(io.pravega.segmentstore.storage.DurableDataLogException) DigestType(org.apache.bookkeeper.client.api.DigestType) Before(org.junit.Before) DurableDataLog(io.pravega.segmentstore.storage.DurableDataLog) KeeperException(org.apache.zookeeper.KeeperException) lombok.val(lombok.val) Test(org.junit.Test) Timer(io.pravega.common.Timer) TimeUnit(java.util.concurrent.TimeUnit) ReadHandle(org.apache.bookkeeper.client.api.ReadHandle) AbstractMap(java.util.AbstractMap) Rule(org.junit.Rule) TreeMap(java.util.TreeMap) VisibleForTesting(com.google.common.annotations.VisibleForTesting) Comparator(java.util.Comparator) Assert(org.junit.Assert) Collections(java.util.Collections) lombok.val(lombok.val) ArrayList(java.util.ArrayList) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) AbstractMap(java.util.AbstractMap) CompositeByteArraySegment(io.pravega.common.util.CompositeByteArraySegment) Map(java.util.Map) HashMap(java.util.HashMap) AbstractMap(java.util.AbstractMap) TreeMap(java.util.TreeMap) Test(org.junit.Test)

Example 10 with Timeout

use of org.junit.rules.Timeout in project pravega by pravega.

the class StreamSegmentContainerTests method testAttributes.

/**
 * Tests the ability to set attributes (via append() or updateAttributes()), then fetch them back using getAttributes(),
 * emphasizing on Extended Attributes that are dumped into Storage and cleared from memory.
 */
@Test
public void testAttributes() throws Exception {
    final AttributeId coreAttribute = Attributes.EVENT_COUNT;
    final int variableAttributeIdLength = 4;
    final List<AttributeId> extendedAttributesUUID = Arrays.asList(AttributeId.randomUUID(), AttributeId.randomUUID());
    final List<AttributeId> extendedAttributesVariable = Arrays.asList(AttributeId.random(variableAttributeIdLength), AttributeId.random(variableAttributeIdLength));
    final List<AttributeId> allAttributesWithUUID = Stream.concat(extendedAttributesUUID.stream(), Stream.of(coreAttribute)).collect(Collectors.toList());
    final List<AttributeId> allAttributesWithVariable = Stream.concat(extendedAttributesVariable.stream(), Stream.of(coreAttribute)).collect(Collectors.toList());
    final AttributeId segmentLengthAttributeUUID = AttributeId.randomUUID();
    final AttributeId segmentLengthAttributeVariable = AttributeId.random(variableAttributeIdLength);
    final long expectedAttributeValue = APPENDS_PER_SEGMENT + ATTRIBUTE_UPDATES_PER_SEGMENT;
    final TestContainerConfig containerConfig = new TestContainerConfig();
    containerConfig.setSegmentMetadataExpiration(Duration.ofMillis(EVICTION_SEGMENT_EXPIRATION_MILLIS_SHORT));
    containerConfig.setMaxCachedExtendedAttributeCount(SEGMENT_COUNT * allAttributesWithUUID.size());
    @Cleanup TestContext context = createContext();
    OperationLogFactory localDurableLogFactory = new DurableLogFactory(FREQUENT_TRUNCATIONS_DURABLE_LOG_CONFIG, context.dataLogFactory, executorService());
    @Cleanup MetadataCleanupContainer localContainer = new MetadataCleanupContainer(CONTAINER_ID, containerConfig, localDurableLogFactory, context.readIndexFactory, context.attributeIndexFactory, context.writerFactory, context.storageFactory, context.getDefaultExtensions(), executorService());
    localContainer.startAsync().awaitRunning();
    // 1. Create the StreamSegments.
    val segmentNames = IntStream.range(0, SEGMENT_COUNT).boxed().collect(Collectors.toMap(StreamSegmentContainerTests::getSegmentName, i -> i % 2 == 0 ? variableAttributeIdLength : 0));
    ArrayList<CompletableFuture<Void>> opFutures = new ArrayList<>();
    for (val sn : segmentNames.entrySet()) {
        opFutures.add(localContainer.createStreamSegment(sn.getKey(), SegmentType.STREAM_SEGMENT, AttributeUpdateCollection.from(new AttributeUpdate(Attributes.ATTRIBUTE_ID_LENGTH, AttributeUpdateType.None, sn.getValue())), TIMEOUT));
    }
    Futures.allOf(opFutures).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
    Predicate<Map.Entry<String, Integer>> isUUIDOnly = e -> e.getValue() == 0;
    // 2. Add some appends.
    for (val sn : segmentNames.entrySet()) {
        boolean isUUID = isUUIDOnly.test(sn);
        for (int i = 0; i < APPENDS_PER_SEGMENT; i++) {
            AttributeUpdateCollection attributeUpdates = (isUUID ? allAttributesWithUUID : allAttributesWithVariable).stream().map(attributeId -> new AttributeUpdate(attributeId, AttributeUpdateType.Accumulate, 1)).collect(Collectors.toCollection(AttributeUpdateCollection::new));
            opFutures.add(Futures.toVoid(localContainer.append(sn.getKey(), getAppendData(sn.getKey(), i), attributeUpdates, TIMEOUT)));
        }
    }
    // 2.1 Update some of the attributes.
    for (val sn : segmentNames.entrySet()) {
        boolean isUUID = isUUIDOnly.test(sn);
        for (int i = 0; i < ATTRIBUTE_UPDATES_PER_SEGMENT; i++) {
            AttributeUpdateCollection attributeUpdates = (isUUID ? allAttributesWithUUID : allAttributesWithVariable).stream().map(attributeId -> new AttributeUpdate(attributeId, AttributeUpdateType.Accumulate, 1)).collect(Collectors.toCollection(AttributeUpdateCollection::new));
            opFutures.add(localContainer.updateAttributes(sn.getKey(), attributeUpdates, TIMEOUT));
        }
        // Verify that we are not allowed to update attributes of the wrong type.
        val badUpdate = new AttributeUpdate(isUUID ? AttributeId.random(variableAttributeIdLength) : AttributeId.randomUUID(), AttributeUpdateType.Accumulate, 1);
        AssertExtensions.assertSuppliedFutureThrows("updateAttributes allowed updating attributes with wrong type and/or length.", () -> localContainer.updateAttributes(sn.getKey(), AttributeUpdateCollection.from(badUpdate), TIMEOUT), ex -> ex instanceof AttributeIdLengthMismatchException);
    }
    Futures.allOf(opFutures).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
    // 2.2 Dynamic attributes.
    for (val sn : segmentNames.entrySet()) {
        boolean isUUID = isUUIDOnly.test(sn);
        val dynamicId = isUUID ? segmentLengthAttributeUUID : segmentLengthAttributeVariable;
        val dynamicAttributes = AttributeUpdateCollection.from(new DynamicAttributeUpdate(dynamicId, AttributeUpdateType.Replace, DynamicAttributeValue.segmentLength(10)));
        val appendData = getAppendData(sn.getKey(), 1000);
        val lastOffset = localContainer.append(sn.getKey(), appendData, dynamicAttributes, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
        val expectedValue = lastOffset - appendData.getLength() + 10;
        Assert.assertEquals(expectedValue, (long) localContainer.getAttributes(sn.getKey(), Collections.singleton(dynamicId), false, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS).get(dynamicId));
    }
    // 3. getSegmentInfo
    for (val sn : segmentNames.entrySet()) {
        val segmentName = sn.getKey();
        val allAttributes = isUUIDOnly.test(sn) ? allAttributesWithUUID : allAttributesWithVariable;
        val allAttributeValues = localContainer.getAttributes(segmentName, allAttributes, false, TIMEOUT).join();
        Assert.assertEquals("Unexpected number of attributes retrieved via getAttributes().", allAttributes.size(), allAttributeValues.size());
        // Verify all attribute values.
        SegmentProperties sp = localContainer.getStreamSegmentInfo(segmentName, TIMEOUT).join();
        for (val attributeId : allAttributes) {
            Assert.assertEquals("Unexpected value for attribute " + attributeId + " via getInfo() for segment " + segmentName, expectedAttributeValue, (long) sp.getAttributes().getOrDefault(attributeId, Attributes.NULL_ATTRIBUTE_VALUE));
            Assert.assertEquals("Unexpected value for attribute " + attributeId + " via getAttributes() for segment " + segmentName, expectedAttributeValue, (long) allAttributeValues.getOrDefault(attributeId, Attributes.NULL_ATTRIBUTE_VALUE));
        }
        // Verify we can't request wrong lengths/types.
        val badId = isUUIDOnly.test(sn) ? AttributeId.random(variableAttributeIdLength) : AttributeId.randomUUID();
        AssertExtensions.assertSuppliedFutureThrows("getAttributes allowed getting attributes with wrong type and/or length.", () -> localContainer.getAttributes(segmentName, Collections.singleton(badId), true, TIMEOUT), ex -> ex instanceof IllegalArgumentException);
    }
    // Force these segments out of memory, so that we may verify that extended attributes are still recoverable.
    localContainer.triggerMetadataCleanup(segmentNames.keySet()).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
    for (val sn : segmentNames.entrySet()) {
        val segmentName = sn.getKey();
        val allAttributes = isUUIDOnly.test(sn) ? allAttributesWithUUID : allAttributesWithVariable;
        val allAttributeValues = localContainer.getAttributes(segmentName, allAttributes, false, TIMEOUT).join();
        Assert.assertEquals("Unexpected number of attributes retrieved via getAttributes() after recovery for segment " + segmentName, allAttributes.size(), allAttributeValues.size());
        // Verify all attribute values. Core attributes should still be loaded in memory, while extended attributes can
        // only be fetched via their special API.
        SegmentProperties sp = localContainer.getStreamSegmentInfo(segmentName, TIMEOUT).join();
        for (val attributeId : allAttributes) {
            Assert.assertEquals("Unexpected value for attribute " + attributeId + " via getAttributes() after recovery for segment " + segmentName, expectedAttributeValue, (long) allAttributeValues.getOrDefault(attributeId, Attributes.NULL_ATTRIBUTE_VALUE));
            if (Attributes.isCoreAttribute(attributeId)) {
                Assert.assertEquals("Expecting core attribute to be loaded in memory.", expectedAttributeValue, (long) sp.getAttributes().getOrDefault(attributeId, Attributes.NULL_ATTRIBUTE_VALUE));
            } else {
                Assert.assertEquals("Not expecting extended attribute to be loaded in memory.", Attributes.NULL_ATTRIBUTE_VALUE, (long) sp.getAttributes().getOrDefault(attributeId, Attributes.NULL_ATTRIBUTE_VALUE));
            }
        }
        // Now instruct the Container to cache missing values (do it a few times so we make sure it's idempotent).
        // Also introduce some random new attribute to fetch. We want to make sure we can properly handle caching
        // missing attribute values.
        val missingAttributeId = isUUIDOnly.test(sn) ? AttributeId.randomUUID() : AttributeId.random(variableAttributeIdLength);
        val attributesToCache = new ArrayList<>(allAttributes);
        attributesToCache.add(missingAttributeId);
        val attributesToCacheValues = new HashMap<>(allAttributeValues);
        attributesToCacheValues.put(missingAttributeId, Attributes.NULL_ATTRIBUTE_VALUE);
        Map<AttributeId, Long> allAttributeValuesWithCache;
        for (int i = 0; i < 2; i++) {
            allAttributeValuesWithCache = localContainer.getAttributes(segmentName, attributesToCache, true, TIMEOUT).join();
            AssertExtensions.assertMapEquals("Inconsistent results from getAttributes(cache=true, attempt=" + i + ").", attributesToCacheValues, allAttributeValuesWithCache);
            sp = localContainer.getStreamSegmentInfo(segmentName, TIMEOUT).join();
            for (val attributeId : allAttributes) {
                Assert.assertEquals("Expecting all attributes to be loaded in memory.", expectedAttributeValue, (long) sp.getAttributes().getOrDefault(attributeId, Attributes.NULL_ATTRIBUTE_VALUE));
            }
            Assert.assertEquals("Unexpected value for missing Attribute Id", Attributes.NULL_ATTRIBUTE_VALUE, (long) sp.getAttributes().get(missingAttributeId));
        }
    }
    // 4. Make an update, then immediately seal the segment, then verify the update updated the root pointer.
    AttributeId attr = Attributes.ATTRIBUTE_SEGMENT_ROOT_POINTER;
    val oldRootPointers = new HashMap<String, Long>();
    for (val sn : segmentNames.entrySet()) {
        val segmentName = sn.getKey();
        val newAttributeId = isUUIDOnly.test(sn) ? AttributeId.randomUUID() : AttributeId.random(variableAttributeIdLength);
        // Get the old root pointer, then make a random attribute update, then immediately seal the segment.
        localContainer.getAttributes(segmentName, Collections.singleton(attr), false, TIMEOUT).thenCompose(values -> {
            oldRootPointers.put(segmentName, values.get(attr));
            return CompletableFuture.allOf(localContainer.updateAttributes(segmentName, AttributeUpdateCollection.from(new AttributeUpdate(newAttributeId, AttributeUpdateType.Replace, 1L)), TIMEOUT), localContainer.sealStreamSegment(segmentName, TIMEOUT));
        }).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
    }
    // which indicates the StorageWriter was able to successfully record it after its final Attribute Index update.
    for (String segmentName : segmentNames.keySet()) {
        Long oldValue = oldRootPointers.get(segmentName);
        TestUtils.await(() -> {
            val newVal = localContainer.getAttributes(segmentName, Collections.singleton(attr), false, TIMEOUT).join().get(attr);
            return oldValue < newVal;
        }, 10, TIMEOUT.toMillis());
    }
    waitForSegmentsInStorage(segmentNames.keySet(), localContainer, context).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
    localContainer.stopAsync().awaitTerminated();
}
Also used : Arrays(java.util.Arrays) Storage(io.pravega.segmentstore.storage.Storage) StreamSegmentInformation(io.pravega.segmentstore.contracts.StreamSegmentInformation) StreamSegmentNotExistsException(io.pravega.segmentstore.contracts.StreamSegmentNotExistsException) ContainerEventProcessor(io.pravega.segmentstore.server.ContainerEventProcessor) Cleanup(lombok.Cleanup) StorageWriterFactory(io.pravega.segmentstore.server.writer.StorageWriterFactory) UpdateableSegmentMetadata(io.pravega.segmentstore.server.UpdateableSegmentMetadata) Future(java.util.concurrent.Future) ContainerTableExtensionImpl(io.pravega.segmentstore.server.tables.ContainerTableExtensionImpl) InMemoryStorageFactory(io.pravega.segmentstore.storage.mocks.InMemoryStorageFactory) Duration(java.time.Duration) Map(java.util.Map) CachePolicy(io.pravega.segmentstore.server.CachePolicy) Operation(io.pravega.segmentstore.server.logs.operations.Operation) WriterFlushResult(io.pravega.segmentstore.server.WriterFlushResult) AsyncReadResultProcessor(io.pravega.segmentstore.server.reading.AsyncReadResultProcessor) ContainerReadIndexFactory(io.pravega.segmentstore.server.reading.ContainerReadIndexFactory) InMemoryDurableDataLogFactory(io.pravega.segmentstore.storage.mocks.InMemoryDurableDataLogFactory) DurableLogFactory(io.pravega.segmentstore.server.logs.DurableLogFactory) Attributes(io.pravega.segmentstore.contracts.Attributes) DurableLogConfig(io.pravega.segmentstore.server.logs.DurableLogConfig) Writer(io.pravega.segmentstore.server.Writer) StandardCharsets(java.nio.charset.StandardCharsets) Stream(java.util.stream.Stream) SegmentContainerFactory(io.pravega.segmentstore.server.SegmentContainerFactory) ContainerTableExtension(io.pravega.segmentstore.server.tables.ContainerTableExtension) ThreadPooledTestSuite(io.pravega.test.common.ThreadPooledTestSuite) SyncStorage(io.pravega.segmentstore.storage.SyncStorage) DirectMemoryCache(io.pravega.segmentstore.storage.cache.DirectMemoryCache) TestUtils(io.pravega.test.common.TestUtils) Futures(io.pravega.common.concurrent.Futures) CacheManager(io.pravega.segmentstore.server.CacheManager) ByteArrayOutputStream(java.io.ByteArrayOutputStream) IllegalContainerStateException(io.pravega.segmentstore.server.IllegalContainerStateException) TooManyActiveSegmentsException(io.pravega.segmentstore.contracts.TooManyActiveSegmentsException) EntrySerializerTests(io.pravega.segmentstore.server.tables.EntrySerializerTests) Exceptions(io.pravega.common.Exceptions) StorageFactory(io.pravega.segmentstore.storage.StorageFactory) BadAttributeUpdateException(io.pravega.segmentstore.contracts.BadAttributeUpdateException) Supplier(java.util.function.Supplier) ArrayList(java.util.ArrayList) UpdateableContainerMetadata(io.pravega.segmentstore.server.UpdateableContainerMetadata) SegmentType(io.pravega.segmentstore.contracts.SegmentType) Runnables(com.google.common.util.concurrent.Runnables) AttributeIndexConfig(io.pravega.segmentstore.server.attributes.AttributeIndexConfig) ReadIndexConfig(io.pravega.segmentstore.server.reading.ReadIndexConfig) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) BiConsumer(java.util.function.BiConsumer) Timeout(org.junit.rules.Timeout) WriterTableProcessor(io.pravega.segmentstore.server.tables.WriterTableProcessor) ConfigurationException(io.pravega.common.util.ConfigurationException) SegmentContainerExtension(io.pravega.segmentstore.server.SegmentContainerExtension) WriterFactory(io.pravega.segmentstore.server.WriterFactory) Properties(java.util.Properties) DurableDataLog(io.pravega.segmentstore.storage.DurableDataLog) Executor(java.util.concurrent.Executor) AttributeId(io.pravega.segmentstore.contracts.AttributeId) lombok.val(lombok.val) Assert.assertTrue(org.junit.Assert.assertTrue) OperationLog(io.pravega.segmentstore.server.OperationLog) TableExtensionConfig(io.pravega.segmentstore.server.tables.TableExtensionConfig) IOException(java.io.IOException) Test(org.junit.Test) SystemJournal(io.pravega.segmentstore.storage.chunklayer.SystemJournal) Service(com.google.common.util.concurrent.Service) AtomicLong(java.util.concurrent.atomic.AtomicLong) DirectSegmentAccess(io.pravega.segmentstore.server.DirectSegmentAccess) ContainerAttributeIndex(io.pravega.segmentstore.server.attributes.ContainerAttributeIndex) AttributeUpdateCollection(io.pravega.segmentstore.contracts.AttributeUpdateCollection) OperationLogFactory(io.pravega.segmentstore.server.OperationLogFactory) SegmentContainer(io.pravega.segmentstore.server.SegmentContainer) Assert(org.junit.Assert) TableEntry(io.pravega.segmentstore.contracts.tables.TableEntry) Assert.assertEquals(org.junit.Assert.assertEquals) DynamicAttributeValue(io.pravega.segmentstore.contracts.DynamicAttributeValue) OperationPriority(io.pravega.segmentstore.server.logs.operations.OperationPriority) WriterConfig(io.pravega.segmentstore.server.writer.WriterConfig) SneakyThrows(lombok.SneakyThrows) AssertExtensions(io.pravega.test.common.AssertExtensions) BiFunction(java.util.function.BiFunction) RequiredArgsConstructor(lombok.RequiredArgsConstructor) TimeoutException(java.util.concurrent.TimeoutException) ByteBuffer(java.nio.ByteBuffer) SegmentProperties(io.pravega.segmentstore.contracts.SegmentProperties) ReadIndexFactory(io.pravega.segmentstore.server.ReadIndexFactory) AttributeUpdate(io.pravega.segmentstore.contracts.AttributeUpdate) StreamSegmentSealedException(io.pravega.segmentstore.contracts.StreamSegmentSealedException) ContainerAttributeIndexFactoryImpl(io.pravega.segmentstore.server.attributes.ContainerAttributeIndexFactoryImpl) AttributeIndexFactory(io.pravega.segmentstore.server.attributes.AttributeIndexFactory) SegmentHandle(io.pravega.segmentstore.storage.SegmentHandle) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) BufferView(io.pravega.common.util.BufferView) AbstractService(com.google.common.util.concurrent.AbstractService) AttributeIdLengthMismatchException(io.pravega.segmentstore.server.logs.AttributeIdLengthMismatchException) ServiceListeners(io.pravega.segmentstore.server.ServiceListeners) ContainerOfflineException(io.pravega.segmentstore.server.ContainerOfflineException) Predicate(java.util.function.Predicate) Collection(java.util.Collection) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) CompletionException(java.util.concurrent.CompletionException) ReadResultEntryType(io.pravega.segmentstore.contracts.ReadResultEntryType) UUID(java.util.UUID) DataLogWriterNotPrimaryException(io.pravega.segmentstore.storage.DataLogWriterNotPrimaryException) DynamicAttributeUpdate(io.pravega.segmentstore.contracts.DynamicAttributeUpdate) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) SegmentMetadataComparer(io.pravega.segmentstore.server.SegmentMetadataComparer) List(java.util.List) ByteArraySegment(io.pravega.common.util.ByteArraySegment) BadOffsetException(io.pravega.segmentstore.contracts.BadOffsetException) WriterSegmentProcessor(io.pravega.segmentstore.server.WriterSegmentProcessor) DurableDataLogFactory(io.pravega.segmentstore.storage.DurableDataLogFactory) ReadResult(io.pravega.segmentstore.contracts.ReadResult) IntStream(java.util.stream.IntStream) ObjectClosedException(io.pravega.common.ObjectClosedException) Setter(lombok.Setter) Getter(lombok.Getter) AsyncStorageWrapper(io.pravega.segmentstore.storage.AsyncStorageWrapper) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) AtomicReference(java.util.concurrent.atomic.AtomicReference) Function(java.util.function.Function) CacheStorage(io.pravega.segmentstore.storage.cache.CacheStorage) HashSet(java.util.HashSet) SegmentMetadata(io.pravega.segmentstore.server.SegmentMetadata) ReadResultEntry(io.pravega.segmentstore.contracts.ReadResultEntry) ExecutorService(java.util.concurrent.ExecutorService) NameUtils(io.pravega.shared.NameUtils) ExecutorServiceHelpers.newScheduledThreadPool(io.pravega.common.concurrent.ExecutorServiceHelpers.newScheduledThreadPool) TimeoutTimer(io.pravega.common.TimeoutTimer) RollingStorage(io.pravega.segmentstore.storage.rolling.RollingStorage) IntentionalException(io.pravega.test.common.IntentionalException) StreamSegmentMergedException(io.pravega.segmentstore.contracts.StreamSegmentMergedException) TestReadResultHandler(io.pravega.segmentstore.server.reading.TestReadResultHandler) SnapshotInfo(io.pravega.segmentstore.storage.chunklayer.SnapshotInfo) TestDurableDataLogFactory(io.pravega.segmentstore.server.TestDurableDataLogFactory) TimeUnit(java.util.concurrent.TimeUnit) Consumer(java.util.function.Consumer) Rule(org.junit.Rule) SegmentOperation(io.pravega.segmentstore.server.SegmentOperation) CachedStreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation) TypedProperties(io.pravega.common.util.TypedProperties) AttributeUpdateType(io.pravega.segmentstore.contracts.AttributeUpdateType) ReadIndex(io.pravega.segmentstore.server.ReadIndex) Comparator(java.util.Comparator) Collections(java.util.Collections) StreamSegmentSealOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentSealOperation) InputStream(java.io.InputStream) AttributeUpdateCollection(io.pravega.segmentstore.contracts.AttributeUpdateCollection) AttributeUpdate(io.pravega.segmentstore.contracts.AttributeUpdate) DynamicAttributeUpdate(io.pravega.segmentstore.contracts.DynamicAttributeUpdate) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) AttributeId(io.pravega.segmentstore.contracts.AttributeId) ArrayList(java.util.ArrayList) Cleanup(lombok.Cleanup) OperationLogFactory(io.pravega.segmentstore.server.OperationLogFactory) DurableLogFactory(io.pravega.segmentstore.server.logs.DurableLogFactory) CompletableFuture(java.util.concurrent.CompletableFuture) TableEntry(io.pravega.segmentstore.contracts.tables.TableEntry) ReadResultEntry(io.pravega.segmentstore.contracts.ReadResultEntry) lombok.val(lombok.val) AttributeIdLengthMismatchException(io.pravega.segmentstore.server.logs.AttributeIdLengthMismatchException) DynamicAttributeUpdate(io.pravega.segmentstore.contracts.DynamicAttributeUpdate) AtomicLong(java.util.concurrent.atomic.AtomicLong) SegmentProperties(io.pravega.segmentstore.contracts.SegmentProperties) Test(org.junit.Test)

Aggregations

Timeout (org.junit.rules.Timeout)92 Rule (org.junit.Rule)91 Test (org.junit.Test)91 Assert (org.junit.Assert)84 AssertExtensions (io.pravega.test.common.AssertExtensions)81 Duration (java.time.Duration)81 ArrayList (java.util.ArrayList)80 lombok.val (lombok.val)79 TimeUnit (java.util.concurrent.TimeUnit)78 ThreadPooledTestSuite (io.pravega.test.common.ThreadPooledTestSuite)77 Cleanup (lombok.Cleanup)73 Collections (java.util.Collections)72 CompletableFuture (java.util.concurrent.CompletableFuture)72 HashMap (java.util.HashMap)69 ByteArraySegment (io.pravega.common.util.ByteArraySegment)68 Collectors (java.util.stream.Collectors)68 List (java.util.List)66 AtomicReference (java.util.concurrent.atomic.AtomicReference)66 IntentionalException (io.pravega.test.common.IntentionalException)62 Map (java.util.Map)62