Search in sources :

Example 1 with DebugStreamSegmentContainer

use of io.pravega.segmentstore.server.containers.DebugStreamSegmentContainer in project pravega by pravega.

the class DurableLogRecoveryCommand method startDebugSegmentContainers.

// Creates debug segment container instances, puts them in a map and returns it.
private Map<Integer, DebugStreamSegmentContainer> startDebugSegmentContainers(Context context, BookKeeperLogFactory dataLogFactory) throws Exception {
    // Start a debug segment container corresponding to the given container Id and put it in the Hashmap with the Id.
    Map<Integer, DebugStreamSegmentContainer> debugStreamSegmentContainerMap = new HashMap<>();
    OperationLogFactory localDurableLogFactory = new DurableLogFactory(NO_TRUNCATIONS_DURABLE_LOG_CONFIG, dataLogFactory, executorService);
    // Create a debug segment container instances using a
    for (int containerId = 0; containerId < this.containerCount; containerId++) {
        DebugStreamSegmentContainer debugStreamSegmentContainer = new DebugStreamSegmentContainer(containerId, CONTAINER_CONFIG, localDurableLogFactory, context.getReadIndexFactory(), context.getAttributeIndexFactory(), context.getWriterFactory(), this.storageFactory, context.getDefaultExtensions(), executorService);
        output("Starting debug segment container %d.", containerId);
        Services.startAsync(debugStreamSegmentContainer, executorService).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
        debugStreamSegmentContainerMap.put(containerId, debugStreamSegmentContainer);
    }
    return debugStreamSegmentContainerMap;
}
Also used : DurableLogFactory(io.pravega.segmentstore.server.logs.DurableLogFactory) DebugStreamSegmentContainer(io.pravega.segmentstore.server.containers.DebugStreamSegmentContainer) HashMap(java.util.HashMap) OperationLogFactory(io.pravega.segmentstore.server.OperationLogFactory)

Example 2 with DebugStreamSegmentContainer

use of io.pravega.segmentstore.server.containers.DebugStreamSegmentContainer in project pravega by pravega.

the class StreamSegmentStoreTestBase method testSegmentRestoration.

/**
 * SegmentStore is used to create some segments, write data to them and let them flush to the storage.
 * This test only uses this storage to restore the container metadata segments in a new durable data log. Segment
 * properties are matched for verification after the restoration.
 * @throws Exception If an exception occurred.
 */
public void testSegmentRestoration() throws Exception {
    ArrayList<String> segmentNames;
    HashMap<String, ArrayList<String>> transactionsBySegment;
    HashMap<String, Long> lengths = new HashMap<>();
    ArrayList<ByteBuf> appendBuffers = new ArrayList<>();
    HashMap<String, ByteArrayOutputStream> segmentContents = new HashMap<>();
    try (val builder = createBuilder(0, false)) {
        val segmentStore = builder.createStreamSegmentService();
        segmentNames = createSegments(segmentStore);
        log.info("Created Segments: {}.", String.join(", ", segmentNames));
        transactionsBySegment = createTransactions(segmentNames, segmentStore);
        log.info("Created Transactions: {}.", transactionsBySegment.values().stream().flatMap(Collection::stream).collect(Collectors.joining(", ")));
        // Add some appends and seal segments
        ArrayList<String> segmentsAndTransactions = new ArrayList<>(segmentNames);
        transactionsBySegment.values().forEach(segmentsAndTransactions::addAll);
        appendData(segmentsAndTransactions, segmentContents, lengths, appendBuffers, segmentStore).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
        log.info("Finished appending data.");
        // Wait for flushing the segments to tier2
        waitForSegmentsInStorage(segmentNames, segmentStore).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
        log.info("Finished waiting for segments in Storage.");
        // Get the persistent storage from readOnlySegmentStore.
        @Cleanup Storage storage = builder.createStorageFactory().createStorageAdapter();
        storage.initialize(DEFAULT_EPOCH);
        // Create the environment for DebugSegmentContainer using the given storageFactory.
        @Cleanup DebugStreamSegmentContainerTests.TestContext context = DebugStreamSegmentContainerTests.createContext(executorService());
        OperationLogFactory localDurableLogFactory = new DurableLogFactory(DURABLE_LOG_CONFIG, context.dataLogFactory, executorService());
        // Start a debug segment container corresponding to each container Id and put it in the Hashmap with the Id.
        Map<Integer, DebugStreamSegmentContainer> debugStreamSegmentContainerMap = new HashMap<>();
        for (int containerId = 0; containerId < CONTAINER_COUNT; containerId++) {
            // Delete container metadata segment and attributes index segment corresponding to the container Id from the long term storage
            ContainerRecoveryUtils.deleteMetadataAndAttributeSegments(storage, containerId, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
            DebugStreamSegmentContainerTests.MetadataCleanupContainer localContainer = new DebugStreamSegmentContainerTests.MetadataCleanupContainer(containerId, CONTAINER_CONFIG, localDurableLogFactory, context.readIndexFactory, context.attributeIndexFactory, context.writerFactory, context.storageFactory, context.getDefaultExtensions(), executorService());
            Services.startAsync(localContainer, executorService()).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
            debugStreamSegmentContainerMap.put(containerId, localContainer);
        }
        // Restore all segments from the long term storage using debug segment container.
        ContainerRecoveryUtils.recoverAllSegments(storage, debugStreamSegmentContainerMap, executorService(), TIMEOUT);
        // Verify that segment details match post restoration.
        SegmentToContainerMapper segToConMapper = new SegmentToContainerMapper(CONTAINER_COUNT, true);
        for (String segment : segmentNames) {
            int containerId = segToConMapper.getContainerId(segment);
            SegmentProperties props = debugStreamSegmentContainerMap.get(containerId).getStreamSegmentInfo(segment, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
            Assert.assertEquals("Segment length mismatch.", (long) lengths.get(segment), props.getLength());
        }
        for (int containerId = 0; containerId < CONTAINER_COUNT; containerId++) {
            debugStreamSegmentContainerMap.get(containerId).close();
        }
    }
}
Also used : HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) ByteBuf(io.netty.buffer.ByteBuf) Cleanup(lombok.Cleanup) OperationLogFactory(io.pravega.segmentstore.server.OperationLogFactory) DurableLogFactory(io.pravega.segmentstore.server.logs.DurableLogFactory) SegmentToContainerMapper(io.pravega.shared.segment.SegmentToContainerMapper) lombok.val(lombok.val) ByteArrayOutputStream(java.io.ByteArrayOutputStream) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Storage(io.pravega.segmentstore.storage.Storage) DebugStreamSegmentContainer(io.pravega.segmentstore.server.containers.DebugStreamSegmentContainer) AtomicLong(java.util.concurrent.atomic.AtomicLong) Collection(java.util.Collection) AttributeUpdateCollection(io.pravega.segmentstore.contracts.AttributeUpdateCollection) DebugStreamSegmentContainerTests(io.pravega.segmentstore.server.containers.DebugStreamSegmentContainerTests) SegmentProperties(io.pravega.segmentstore.contracts.SegmentProperties)

Example 3 with DebugStreamSegmentContainer

use of io.pravega.segmentstore.server.containers.DebugStreamSegmentContainer in project pravega by pravega.

the class DurableLogRecoveryCommand method execute.

@Override
public void execute() throws Exception {
    @Cleanup Storage storage = this.storageFactory.createStorageAdapter();
    @Cleanup val zkClient = createZKClient();
    val bkConfig = getCommandArgs().getState().getConfigBuilder().include(BookKeeperConfig.builder().with(BookKeeperConfig.ZK_ADDRESS, getServiceConfig().getZkURL())).build().getConfig(BookKeeperConfig::builder);
    @Cleanup val dataLogFactory = new BookKeeperLogFactory(bkConfig, zkClient, executorService);
    output("Container Count = %d", this.containerCount);
    dataLogFactory.initialize();
    output("Started ZK Client at %s.", getServiceConfig().getZkURL());
    storage.initialize(CONTAINER_EPOCH);
    output("Loaded %s Storage.", getServiceConfig().getStorageImplementation());
    output("Starting recovery...");
    // create back up of metadata segments
    Map<Integer, String> backUpMetadataSegments = ContainerRecoveryUtils.createBackUpMetadataSegments(storage, this.containerCount, executorService, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
    @Cleanup Context context = createContext(executorService);
    // create debug segment container instances using new new dataLog and old storage.
    Map<Integer, DebugStreamSegmentContainer> debugStreamSegmentContainerMap = startDebugSegmentContainers(context, dataLogFactory);
    output("Containers started. Recovering all segments...");
    ContainerRecoveryUtils.recoverAllSegments(storage, debugStreamSegmentContainerMap, executorService, TIMEOUT);
    output("All segments recovered.");
    // Update core attributes from the backUp Metadata segments
    output("Updating core attributes for segments registered.");
    ContainerRecoveryUtils.updateCoreAttributes(backUpMetadataSegments, debugStreamSegmentContainerMap, executorService, TIMEOUT);
    // Flush new metadata segment to the storage
    flushToStorage(debugStreamSegmentContainerMap);
    // Waits for metadata segments to be flushed to LTS and then stops the debug segment containers
    stopDebugSegmentContainers(debugStreamSegmentContainerMap);
    output("Segments have been recovered.");
    output("Recovery Done!");
}
Also used : lombok.val(lombok.val) Storage(io.pravega.segmentstore.storage.Storage) CacheStorage(io.pravega.segmentstore.storage.cache.CacheStorage) BookKeeperConfig(io.pravega.segmentstore.storage.impl.bookkeeper.BookKeeperConfig) DebugStreamSegmentContainer(io.pravega.segmentstore.server.containers.DebugStreamSegmentContainer) Cleanup(lombok.Cleanup) BookKeeperLogFactory(io.pravega.segmentstore.storage.impl.bookkeeper.BookKeeperLogFactory)

Example 4 with DebugStreamSegmentContainer

use of io.pravega.segmentstore.server.containers.DebugStreamSegmentContainer in project pravega by pravega.

the class RestoreBackUpDataRecoveryTest method startDebugSegmentContainers.

// Creates debug segment container instances, puts them in a map and returns it.
private Map<Integer, DebugStreamSegmentContainer> startDebugSegmentContainers(DebugStreamSegmentContainerTests.TestContext context, int containerCount, InMemoryDurableDataLogFactory dataLogFactory, StorageFactory storageFactory) throws Exception {
    // Start a debug segment container corresponding to the given container Id and put it in the Hashmap with the Id.
    Map<Integer, DebugStreamSegmentContainer> debugStreamSegmentContainerMap = new HashMap<>();
    OperationLogFactory localDurableLogFactory = new DurableLogFactory(NO_TRUNCATIONS_DURABLE_LOG_CONFIG, dataLogFactory, executorService());
    // Create a debug segment container instances using a
    for (int containerId = 0; containerId < containerCount; containerId++) {
        DebugStreamSegmentContainerTests.MetadataCleanupContainer debugStreamSegmentContainer = new DebugStreamSegmentContainerTests.MetadataCleanupContainer(containerId, CONTAINER_CONFIG, localDurableLogFactory, context.readIndexFactory, context.attributeIndexFactory, context.writerFactory, storageFactory, context.getDefaultExtensions(), executorService());
        Services.startAsync(debugStreamSegmentContainer, executorService()).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
        debugStreamSegmentContainerMap.put(containerId, debugStreamSegmentContainer);
    }
    return debugStreamSegmentContainerMap;
}
Also used : AtomicInteger(java.util.concurrent.atomic.AtomicInteger) DurableLogFactory(io.pravega.segmentstore.server.logs.DurableLogFactory) DebugStreamSegmentContainer(io.pravega.segmentstore.server.containers.DebugStreamSegmentContainer) HashMap(java.util.HashMap) DebugStreamSegmentContainerTests(io.pravega.segmentstore.server.containers.DebugStreamSegmentContainerTests) OperationLogFactory(io.pravega.segmentstore.server.OperationLogFactory)

Aggregations

DebugStreamSegmentContainer (io.pravega.segmentstore.server.containers.DebugStreamSegmentContainer)4 OperationLogFactory (io.pravega.segmentstore.server.OperationLogFactory)3 DurableLogFactory (io.pravega.segmentstore.server.logs.DurableLogFactory)3 HashMap (java.util.HashMap)3 DebugStreamSegmentContainerTests (io.pravega.segmentstore.server.containers.DebugStreamSegmentContainerTests)2 Storage (io.pravega.segmentstore.storage.Storage)2 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)2 Cleanup (lombok.Cleanup)2 lombok.val (lombok.val)2 ByteBuf (io.netty.buffer.ByteBuf)1 AttributeUpdateCollection (io.pravega.segmentstore.contracts.AttributeUpdateCollection)1 SegmentProperties (io.pravega.segmentstore.contracts.SegmentProperties)1 CacheStorage (io.pravega.segmentstore.storage.cache.CacheStorage)1 BookKeeperConfig (io.pravega.segmentstore.storage.impl.bookkeeper.BookKeeperConfig)1 BookKeeperLogFactory (io.pravega.segmentstore.storage.impl.bookkeeper.BookKeeperLogFactory)1 SegmentToContainerMapper (io.pravega.shared.segment.SegmentToContainerMapper)1 ByteArrayOutputStream (java.io.ByteArrayOutputStream)1 ArrayList (java.util.ArrayList)1 Collection (java.util.Collection)1 AtomicLong (java.util.concurrent.atomic.AtomicLong)1