Search in sources :

Example 51 with Pair

use of com.github.ambry.utils.Pair in project ambry by linkedin.

the class IndexTest method findEntriesSinceOneByOneTest.

/**
 * Uses {@link PersistentIndex#findEntriesSince(FindToken, long)} to get entries one by one.
 * @throws StoreException
 */
private void findEntriesSinceOneByOneTest() throws StoreException {
    Offset journalStartOffset = state.index.journal.getFirstOffset();
    StoreFindToken startToken = new StoreFindToken();
    Offset stoppedAt = null;
    for (Map.Entry<Offset, TreeMap<MockId, TreeSet<IndexValue>>> indexEntry : state.referenceIndex.entrySet()) {
        Offset indexSegmentStartOffset = indexEntry.getKey();
        // 2. The size of entries being obtained is <= the size of records in the current index segment
        if (indexSegmentStartOffset.compareTo(journalStartOffset) >= 0) {
            stoppedAt = indexSegmentStartOffset;
            break;
        }
        IndexSegment segmentOfToken = state.index.getIndexSegments().get(indexSegmentStartOffset);
        for (Map.Entry<MockId, TreeSet<IndexValue>> indexSegmentEntry : indexEntry.getValue().entrySet()) {
            MockId id = indexSegmentEntry.getKey();
            StoreFindToken expectedEndToken = new StoreFindToken(id, indexSegmentStartOffset, state.sessionId, state.incarnationId, segmentOfToken.getResetKey(), segmentOfToken.getResetKeyType(), segmentOfToken.getResetKeyLifeVersion());
            expectedEndToken.setBytesRead(state.index.getAbsolutePositionInLogForOffset(indexSegmentStartOffset));
            doFindEntriesSinceTest(startToken, getSizeOfAllValues(indexSegmentEntry.getValue()), Collections.singleton(id), expectedEndToken);
            startToken = expectedEndToken;
        }
    }
    Map.Entry<Offset, Pair<MockId, CuratedLogIndexState.LogEntry>> logEntry = state.logOrder.floorEntry(stoppedAt);
    while (logEntry != null) {
        Offset startOffset = logEntry.getKey();
        MockId id = logEntry.getValue().getFirst();
        // size returned is the size of the most recent record
        long size = state.getExpectedValue(id, EnumSet.allOf(PersistentIndex.IndexEntryType.class), null).getSize();
        IndexSegment segmentOfToken = state.index.getIndexSegments().floorEntry(startOffset).getValue();
        StoreFindToken expectedEndToken = new StoreFindToken(startOffset, state.sessionId, state.incarnationId, false, segmentOfToken.getResetKey(), segmentOfToken.getResetKeyType(), segmentOfToken.getResetKeyLifeVersion());
        Offset endOffset = state.log.getFileSpanForMessage(startOffset, size).getEndOffset();
        expectedEndToken.setBytesRead(state.index.getAbsolutePositionInLogForOffset(endOffset));
        doFindEntriesSinceTest(startToken, size, Collections.singleton(id), expectedEndToken);
        startToken = expectedEndToken;
        logEntry = state.logOrder.higherEntry(logEntry.getKey());
    }
}
Also used : StoreFindToken(com.github.ambry.store.StoreFindToken) CuratedLogIndexState(com.github.ambry.store.CuratedLogIndexState) TreeMap(java.util.TreeMap) TreeSet(java.util.TreeSet) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ConcurrentSkipListMap(java.util.concurrent.ConcurrentSkipListMap) TreeMap(java.util.TreeMap) Pair(com.github.ambry.utils.Pair)

Example 52 with Pair

use of com.github.ambry.utils.Pair in project ambry by linkedin.

the class CuratedLogIndexState method forceAddPutEntry.

/**
 * Add an existing Put IndexEntry forcely to index to create duplicate PUTs in index.
 * @param id The {@link MockId} of this duplicate put.
 * @param value The {@link IndexValue} of this duplicate put.
 * @param bytes The content of this duplicate put.
 * @throws StoreException
 */
void forceAddPutEntry(MockId id, IndexValue value, byte[] bytes) throws StoreException {
    if (!value.isPut()) {
        throw new IllegalArgumentException("Value has to be a put: " + value);
    }
    Offset endOffsetOfPrevMsg = index.getCurrentEndOffset();
    ByteBuffer buffer = ByteBuffer.wrap(bytes);
    ReadableByteChannel channel = Channels.newChannel(new ByteBufferInputStream(buffer));
    log.appendFrom(channel, buffer.capacity());
    FileSpan fileSpan = log.getFileSpanForMessage(endOffsetOfPrevMsg, bytes.length);
    Offset indexSegmentStartOffset = generateReferenceIndexSegmentStartOffset(fileSpan.getStartOffset());
    if (!referenceIndex.containsKey(indexSegmentStartOffset)) {
        // rollover will occur
        advanceTime(DELAY_BETWEEN_LAST_MODIFIED_TIMES_MS);
        referenceIndex.put(indexSegmentStartOffset, new TreeMap<>());
    }
    IndexValue newValue = new IndexValue(value);
    newValue.setNewOffset(fileSpan.getStartOffset());
    IndexEntry entry = new IndexEntry(id, newValue);
    logOrder.put(fileSpan.getStartOffset(), new Pair<>(id, new LogEntry(bytes, value)));
    allKeys.computeIfAbsent(id, k -> new TreeSet<>()).add(value);
    referenceIndex.get(indexSegmentStartOffset).computeIfAbsent(id, k -> new TreeSet<>()).add(value);
    long expiresAtMs = value.getExpiresAtMs();
    if (expiresAtMs != Utils.Infinite_Time && expiresAtMs < time.milliseconds()) {
        expiredKeys.add(id);
    } else {
        liveKeys.add(id);
    }
    index.addToIndex(Collections.singletonList(entry), fileSpan);
    lastModifiedTimesInSecs.put(indexSegmentStartOffset, value.getOperationTimeInMs() / Time.MsPerSec);
}
Also used : FilenameFilter(java.io.FilenameFilter) Arrays(java.util.Arrays) ListIterator(java.util.ListIterator) StoreStats(com.github.ambry.store.StoreStats) HashMap(java.util.HashMap) ByteBuffer(java.nio.ByteBuffer) TreeSet(java.util.TreeSet) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) TestUtils(com.github.ambry.utils.TestUtils) Map(java.util.Map) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) NoSuchElementException(java.util.NoSuchElementException) Time(com.github.ambry.utils.Time) EnumSet(java.util.EnumSet) StoreConfig(com.github.ambry.config.StoreConfig) ReadableByteChannel(java.nio.channels.ReadableByteChannel) MetricRegistry(com.codahale.metrics.MetricRegistry) Properties(java.util.Properties) Pair(com.github.ambry.utils.Pair) Iterator(java.util.Iterator) VerifiableProperties(com.github.ambry.config.VerifiableProperties) Channels(java.nio.channels.Channels) StoreTestUtils(com.github.ambry.store.StoreTestUtils) Set(java.util.Set) Utils(com.github.ambry.utils.Utils) IOException(java.io.IOException) NavigableSet(java.util.NavigableSet) UUID(java.util.UUID) NavigableMap(java.util.NavigableMap) File(java.io.File) TimeUnit(java.util.concurrent.TimeUnit) AtomicLong(java.util.concurrent.atomic.AtomicLong) List(java.util.List) MockTime(com.github.ambry.utils.MockTime) TreeMap(java.util.TreeMap) ByteBufferInputStream(com.github.ambry.utils.ByteBufferInputStream) Assert(org.junit.Assert) Collections(java.util.Collections) ReadableByteChannel(java.nio.channels.ReadableByteChannel) ByteBufferInputStream(com.github.ambry.utils.ByteBufferInputStream) ByteBuffer(java.nio.ByteBuffer) TreeSet(java.util.TreeSet)

Example 53 with Pair

use of com.github.ambry.utils.Pair in project ambry by linkedin.

the class IndexTest method findDeletedEntriesSinceOneByOneTest.

/**
 * Uses {@link PersistentIndex#findDeletedEntriesSince(FindToken, long, long)} to get entries one by one.
 * @throws StoreException
 */
private void findDeletedEntriesSinceOneByOneTest() throws StoreException {
    Offset journalStartOffset = state.index.journal.getFirstOffset();
    StoreFindToken startToken = new StoreFindToken();
    Offset stoppedAt = null;
    for (Map.Entry<Offset, TreeMap<MockId, TreeSet<IndexValue>>> indexEntry : state.referenceIndex.entrySet()) {
        Offset indexSegmentStartOffset = indexEntry.getKey();
        // 2. The size of entries being obtained is <= the size of records in the current index segment
        if (indexSegmentStartOffset.compareTo(journalStartOffset) >= 0) {
            stoppedAt = indexSegmentStartOffset;
            break;
        }
        IndexSegment segmentOfToken = state.index.getIndexSegments().get(indexSegmentStartOffset);
        for (Map.Entry<MockId, TreeSet<IndexValue>> indexSegmentEntry : indexEntry.getValue().entrySet()) {
            MockId id = indexSegmentEntry.getKey();
            boolean isDeleted = indexSegmentEntry.getValue().last().isDelete() && state.getExpectedValue(indexSegmentEntry.getKey(), EnumSet.of(PersistentIndex.IndexEntryType.UNDELETE), null) == null;
            StoreFindToken expectedEndToken = new StoreFindToken(id, indexSegmentStartOffset, state.sessionId, state.incarnationId, segmentOfToken.getResetKey(), segmentOfToken.getResetKeyType(), segmentOfToken.getResetKeyLifeVersion());
            long size = getSizeOfAllValues(indexSegmentEntry.getValue());
            doFindDeletedEntriesSinceTest(startToken, size, isDeleted ? Collections.singleton(id) : Collections.emptySet(), expectedEndToken);
            startToken = expectedEndToken;
        }
    }
    Map.Entry<Offset, Pair<MockId, CuratedLogIndexState.LogEntry>> logEntry = state.logOrder.floorEntry(stoppedAt);
    while (logEntry != null) {
        Offset startOffset = logEntry.getKey();
        MockId id = logEntry.getValue().getFirst();
        IndexValue value = state.getExpectedValue(id, false);
        boolean isDeleted = value.isDelete() && state.getExpectedValue(id, EnumSet.of(PersistentIndex.IndexEntryType.UNDELETE), null) == null;
        // size returned is the size of the delete if the key has been deleted.
        long size = value.getSize();
        IndexSegment segmentOfToken = state.index.getIndexSegments().floorEntry(startOffset).getValue();
        StoreFindToken expectedEndToken = new StoreFindToken(startOffset, state.sessionId, state.incarnationId, false, segmentOfToken.getResetKey(), segmentOfToken.getResetKeyType(), segmentOfToken.getResetKeyLifeVersion());
        doFindDeletedEntriesSinceTest(startToken, size, isDeleted ? Collections.singleton(id) : Collections.emptySet(), expectedEndToken);
        startToken = expectedEndToken;
        logEntry = state.logOrder.higherEntry(logEntry.getKey());
    }
}
Also used : StoreFindToken(com.github.ambry.store.StoreFindToken) CuratedLogIndexState(com.github.ambry.store.CuratedLogIndexState) TreeMap(java.util.TreeMap) TreeSet(java.util.TreeSet) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ConcurrentSkipListMap(java.util.concurrent.ConcurrentSkipListMap) TreeMap(java.util.TreeMap) Pair(com.github.ambry.utils.Pair)

Example 54 with Pair

use of com.github.ambry.utils.Pair in project ambry by linkedin.

the class StorageManagerTest method createFilesAndDirsAtPath.

// unrecognizedDirsOnDiskTest() helpers
/**
 * Creates {@code fileCount} files and {@code dirCount} directories at {@code dir}.
 * @param dir the directory to create the files and dirs at
 * @param fileCount the number of files to be created
 * @param dirCount the number of directories to be created
 * @return the list of files,dirs created as a pair.
 * @throws IOException
 */
private Pair<List<File>, List<File>> createFilesAndDirsAtPath(File dir, int fileCount, int dirCount) throws IOException {
    List<File> createdFiles = new ArrayList<>();
    for (int i = 0; i < fileCount; i++) {
        File createdFile = new File(dir, "created-file-" + i);
        if (!createdFile.exists()) {
            assertTrue("Could not create " + createdFile, createdFile.createNewFile());
        }
        createdFile.deleteOnExit();
        createdFiles.add(createdFile);
    }
    List<File> createdDirs = new ArrayList<>();
    for (int i = 0; i < dirCount; i++) {
        File createdDir = new File(dir, "created-dir-" + i);
        assertTrue("Could not create " + createdDir + " now", createdDir.mkdir());
        createdDir.deleteOnExit();
        createdDirs.add(createdDir);
    }
    return new Pair<>(createdFiles, createdDirs);
}
Also used : ArrayList(java.util.ArrayList) File(java.io.File) Pair(com.github.ambry.utils.Pair)

Example 55 with Pair

use of com.github.ambry.utils.Pair in project ambry by linkedin.

the class StoreFindTokenTest method equalityTest.

/**
 * Tests the correctness of {@link StoreFindToken#equals(Object)}.
 */
@Test
public void equalityTest() {
    UUID sessionId = UUID.randomUUID();
    UUID incarnationId = UUID.randomUUID();
    LogSegmentName logSegmentName = LogSegmentName.generateFirstSegmentName(isLogSegmented);
    Offset offset = new Offset(logSegmentName, 0);
    Offset otherOffset = new Offset(logSegmentName, 1);
    MockId key = new MockId(TestUtils.getRandomString(10));
    MockId otherKey = new MockId(TestUtils.getRandomString(10));
    MockId resetKey = new MockId(TestUtils.getRandomString(10));
    PersistentIndex.IndexEntryType resetKeyType = PersistentIndex.IndexEntryType.values()[random.nextInt(PersistentIndex.IndexEntryType.values().length)];
    short resetKeyVersion = (short) random.nextInt(5);
    StoreFindToken initToken = new StoreFindToken();
    StoreFindToken otherInitToken = new StoreFindToken();
    StoreFindToken indexToken = new StoreFindToken(key, offset, sessionId, incarnationId, resetKey, resetKeyType, resetKeyVersion);
    StoreFindToken otherIndexToken = new StoreFindToken(key, offset, sessionId, incarnationId, resetKey, resetKeyType, resetKeyVersion);
    StoreFindToken journalToken = new StoreFindToken(offset, sessionId, incarnationId, false, resetKey, resetKeyType, resetKeyVersion);
    StoreFindToken otherJournalToken = new StoreFindToken(offset, sessionId, incarnationId, false, resetKey, resetKeyType, resetKeyVersion);
    StoreFindToken inclusiveJournalToken = new StoreFindToken(offset, sessionId, incarnationId, true, resetKey, resetKeyType, resetKeyVersion);
    StoreFindToken otherInclusiveJournalToken = new StoreFindToken(offset, sessionId, incarnationId, true, resetKey, resetKeyType, resetKeyVersion);
    // equality
    compareTokens(initToken, initToken);
    compareTokens(initToken, otherInitToken);
    compareTokens(indexToken, indexToken);
    compareTokens(indexToken, otherIndexToken);
    compareTokens(journalToken, journalToken);
    compareTokens(journalToken, otherJournalToken);
    compareTokens(inclusiveJournalToken, otherInclusiveJournalToken);
    UUID newSessionId = getRandomUUID(sessionId);
    UUID newIncarnationId = getRandomUUID(incarnationId);
    // equality even if session IDs are different
    compareTokens(indexToken, new StoreFindToken(key, offset, newSessionId, incarnationId, resetKey, resetKeyType, resetKeyVersion));
    compareTokens(journalToken, new StoreFindToken(offset, newSessionId, incarnationId, false, resetKey, resetKeyType, resetKeyVersion));
    // equality even if incarnation IDs are different
    compareTokens(indexToken, new StoreFindToken(key, offset, sessionId, newIncarnationId, resetKey, resetKeyType, resetKeyVersion));
    compareTokens(journalToken, new StoreFindToken(offset, sessionId, newIncarnationId, false, resetKey, resetKeyType, resetKeyVersion));
    // inequality if some fields differ
    List<Pair<StoreFindToken, StoreFindToken>> unequalPairs = new ArrayList<>();
    unequalPairs.add(new Pair<>(initToken, indexToken));
    unequalPairs.add(new Pair<>(initToken, journalToken));
    unequalPairs.add(new Pair<>(initToken, inclusiveJournalToken));
    unequalPairs.add(new Pair<>(indexToken, journalToken));
    unequalPairs.add(new Pair<>(indexToken, inclusiveJournalToken));
    unequalPairs.add(new Pair<>(indexToken, new StoreFindToken(key, otherOffset, sessionId, incarnationId, null, null, UNINITIALIZED_RESET_KEY_VERSION)));
    unequalPairs.add(new Pair<>(indexToken, new StoreFindToken(otherKey, offset, sessionId, incarnationId, null, null, UNINITIALIZED_RESET_KEY_VERSION)));
    unequalPairs.add(new Pair<>(journalToken, new StoreFindToken(otherOffset, sessionId, incarnationId, false, null, null, UNINITIALIZED_RESET_KEY_VERSION)));
    unequalPairs.add(new Pair<>(inclusiveJournalToken, journalToken));
    unequalPairs.add(new Pair<>(indexToken, new StoreFindToken(key, offset, sessionId, incarnationId, resetKey, resetKeyType, UNINITIALIZED_RESET_KEY_VERSION)));
    for (Pair<StoreFindToken, StoreFindToken> unequalPair : unequalPairs) {
        StoreFindToken first = unequalPair.getFirst();
        StoreFindToken second = unequalPair.getSecond();
        assertFalse("StoreFindTokens [" + first + "] and [" + second + "] should not be equal", unequalPair.getFirst().equals(unequalPair.getSecond()));
    }
}
Also used : ArrayList(java.util.ArrayList) StoreFindToken(com.github.ambry.store.StoreFindToken) UUID(java.util.UUID) Pair(com.github.ambry.utils.Pair) Test(org.junit.Test)

Aggregations

Pair (com.github.ambry.utils.Pair)64 ArrayList (java.util.ArrayList)29 HashMap (java.util.HashMap)28 Map (java.util.Map)28 Test (org.junit.Test)20 IOException (java.io.IOException)15 MetricRegistry (com.codahale.metrics.MetricRegistry)14 List (java.util.List)14 ByteBuffer (java.nio.ByteBuffer)13 Collections (java.util.Collections)13 File (java.io.File)12 Assert (org.junit.Assert)12 VerifiableProperties (com.github.ambry.config.VerifiableProperties)11 Utils (com.github.ambry.utils.Utils)10 HashSet (java.util.HashSet)10 Properties (java.util.Properties)10 Container (com.github.ambry.account.Container)9 TestUtils (com.github.ambry.utils.TestUtils)9 Arrays (java.util.Arrays)9 Set (java.util.Set)9