use of com.github.ambry.store.StoreFindToken in project ambry by linkedin.
the class CompactionLogTest method iterationWithoutReloadTest.
/**
* Tests the use of {@link CompactionLog} when it is used without closing b/w operations and compaction cycles.
* @throws IOException
*/
@Test
public void iterationWithoutReloadTest() throws IOException {
String storeName = "store";
List<CompactionDetails> detailsList = getCompactionDetailsList(5);
CompactionDetails combined = combineListOfDetails(detailsList);
assertFalse("Compaction should not be in progress", CompactionLog.isCompactionInProgress(tempDirStr, storeName));
CompactionLog cLog = new CompactionLog(tempDirStr, storeName, time, combined, config);
assertTrue("Compaction should be in progress", CompactionLog.isCompactionInProgress(tempDirStr, storeName));
int currentIdx = 0;
Iterator<CompactionDetails> detailsIterator = detailsList.iterator();
CompactionDetails currDetails = detailsIterator.next();
while (currDetails != null) {
detailsIterator.remove();
assertEquals("CurrentIdx not as expected", currentIdx, cLog.getCurrentIdx());
verifyEquality(combined, cLog.getCompactionDetails());
assertEquals("Should be in the PREPARE phase", CompactionLog.Phase.PREPARE, cLog.getCompactionPhase());
cLog.markCopyStart();
assertEquals("Should be in the COPY phase", CompactionLog.Phase.COPY, cLog.getCompactionPhase());
Offset offset = new Offset(LogSegmentName.generateFirstSegmentName(true), Utils.getRandomLong(TestUtils.RANDOM, Long.MAX_VALUE));
cLog.setStartOffsetOfLastIndexSegmentForDeleteCheck(offset);
assertEquals("Offset that was set was not the one returned", offset, cLog.getStartOffsetOfLastIndexSegmentForDeleteCheck());
StoreFindToken safeToken = new StoreFindToken(new MockId("dummy"), new Offset(LogSegmentName.generateFirstSegmentName(true), 0), new UUID(1, 1), new UUID(1, 1), null, null, UNINITIALIZED_RESET_KEY_VERSION);
cLog.setSafeToken(safeToken);
assertEquals("Returned token not the same as the one that was set", safeToken, cLog.getSafeToken());
CompactionDetails nextDetails = detailsIterator.hasNext() ? detailsIterator.next() : null;
if (nextDetails != null) {
cLog.splitCurrentCycle(nextDetails.getLogSegmentsUnderCompaction().get(0));
verifyEquality(currDetails, cLog.getCompactionDetails());
}
cLog.markCommitStart();
assertEquals("Should be in the SWITCH phase", CompactionLog.Phase.COMMIT, cLog.getCompactionPhase());
cLog.markCleanupStart();
assertEquals("Should be in the CLEANUP phase", CompactionLog.Phase.CLEANUP, cLog.getCompactionPhase());
cLog.markCycleComplete();
currentIdx++;
currDetails = nextDetails;
if (nextDetails != null) {
combined = combineListOfDetails(detailsList);
}
}
assertEquals("CurrentIdx not as expected", -1, cLog.getCurrentIdx());
assertEquals("Should be in the DONE phase", CompactionLog.Phase.DONE, cLog.getCompactionPhase());
cLog.close();
assertFalse("Compaction should not be in progress", CompactionLog.isCompactionInProgress(tempDirStr, storeName));
}
use of com.github.ambry.store.StoreFindToken in project ambry by linkedin.
the class StoreFindTokenTest method equalityTest.
/**
* Tests the correctness of {@link StoreFindToken#equals(Object)}.
*/
@Test
public void equalityTest() {
UUID sessionId = UUID.randomUUID();
UUID incarnationId = UUID.randomUUID();
LogSegmentName logSegmentName = LogSegmentName.generateFirstSegmentName(isLogSegmented);
Offset offset = new Offset(logSegmentName, 0);
Offset otherOffset = new Offset(logSegmentName, 1);
MockId key = new MockId(TestUtils.getRandomString(10));
MockId otherKey = new MockId(TestUtils.getRandomString(10));
MockId resetKey = new MockId(TestUtils.getRandomString(10));
PersistentIndex.IndexEntryType resetKeyType = PersistentIndex.IndexEntryType.values()[random.nextInt(PersistentIndex.IndexEntryType.values().length)];
short resetKeyVersion = (short) random.nextInt(5);
StoreFindToken initToken = new StoreFindToken();
StoreFindToken otherInitToken = new StoreFindToken();
StoreFindToken indexToken = new StoreFindToken(key, offset, sessionId, incarnationId, resetKey, resetKeyType, resetKeyVersion);
StoreFindToken otherIndexToken = new StoreFindToken(key, offset, sessionId, incarnationId, resetKey, resetKeyType, resetKeyVersion);
StoreFindToken journalToken = new StoreFindToken(offset, sessionId, incarnationId, false, resetKey, resetKeyType, resetKeyVersion);
StoreFindToken otherJournalToken = new StoreFindToken(offset, sessionId, incarnationId, false, resetKey, resetKeyType, resetKeyVersion);
StoreFindToken inclusiveJournalToken = new StoreFindToken(offset, sessionId, incarnationId, true, resetKey, resetKeyType, resetKeyVersion);
StoreFindToken otherInclusiveJournalToken = new StoreFindToken(offset, sessionId, incarnationId, true, resetKey, resetKeyType, resetKeyVersion);
// equality
compareTokens(initToken, initToken);
compareTokens(initToken, otherInitToken);
compareTokens(indexToken, indexToken);
compareTokens(indexToken, otherIndexToken);
compareTokens(journalToken, journalToken);
compareTokens(journalToken, otherJournalToken);
compareTokens(inclusiveJournalToken, otherInclusiveJournalToken);
UUID newSessionId = getRandomUUID(sessionId);
UUID newIncarnationId = getRandomUUID(incarnationId);
// equality even if session IDs are different
compareTokens(indexToken, new StoreFindToken(key, offset, newSessionId, incarnationId, resetKey, resetKeyType, resetKeyVersion));
compareTokens(journalToken, new StoreFindToken(offset, newSessionId, incarnationId, false, resetKey, resetKeyType, resetKeyVersion));
// equality even if incarnation IDs are different
compareTokens(indexToken, new StoreFindToken(key, offset, sessionId, newIncarnationId, resetKey, resetKeyType, resetKeyVersion));
compareTokens(journalToken, new StoreFindToken(offset, sessionId, newIncarnationId, false, resetKey, resetKeyType, resetKeyVersion));
// inequality if some fields differ
List<Pair<StoreFindToken, StoreFindToken>> unequalPairs = new ArrayList<>();
unequalPairs.add(new Pair<>(initToken, indexToken));
unequalPairs.add(new Pair<>(initToken, journalToken));
unequalPairs.add(new Pair<>(initToken, inclusiveJournalToken));
unequalPairs.add(new Pair<>(indexToken, journalToken));
unequalPairs.add(new Pair<>(indexToken, inclusiveJournalToken));
unequalPairs.add(new Pair<>(indexToken, new StoreFindToken(key, otherOffset, sessionId, incarnationId, null, null, UNINITIALIZED_RESET_KEY_VERSION)));
unequalPairs.add(new Pair<>(indexToken, new StoreFindToken(otherKey, offset, sessionId, incarnationId, null, null, UNINITIALIZED_RESET_KEY_VERSION)));
unequalPairs.add(new Pair<>(journalToken, new StoreFindToken(otherOffset, sessionId, incarnationId, false, null, null, UNINITIALIZED_RESET_KEY_VERSION)));
unequalPairs.add(new Pair<>(inclusiveJournalToken, journalToken));
unequalPairs.add(new Pair<>(indexToken, new StoreFindToken(key, offset, sessionId, incarnationId, resetKey, resetKeyType, UNINITIALIZED_RESET_KEY_VERSION)));
for (Pair<StoreFindToken, StoreFindToken> unequalPair : unequalPairs) {
StoreFindToken first = unequalPair.getFirst();
StoreFindToken second = unequalPair.getSecond();
assertFalse("StoreFindTokens [" + first + "] and [" + second + "] should not be equal", unequalPair.getFirst().equals(unequalPair.getSecond()));
}
}
use of com.github.ambry.store.StoreFindToken in project ambry by linkedin.
the class IndexTest method findEntriesSinceTest.
/**
* Tests {@link PersistentIndex#findEntriesSince(FindToken, long)} for various cases
* 1. All cases that result in getting an index based token
* 2. All cases that result in getting a journal based token
* 3. Getting entries one by one
* 4. Getting entries using an index based token for an offset in the journal
* 5. Error case - trying to findEntriesSince() using an index based token that contains the last index segment
* 6. Using findEntriesSince() in an empty index
* 7. Token that has the log end offset
* @throws StoreException
*/
@Test
public void findEntriesSinceTest() throws StoreException {
// add some more entries so that the journal gets entries across segments and doesn't start at the beginning
// of an index segment.
state.addPutEntries(7, CuratedLogIndexState.PUT_RECORD_SIZE, Utils.Infinite_Time);
state.addDeleteEntry(state.getIdToDeleteFromIndexSegment(state.referenceIndex.lastKey(), false));
// token with log end offset should not return anything
StoreFindToken token = new StoreFindToken(state.log.getEndOffset(), state.sessionId, state.incarnationId, false, null, null, UNINITIALIZED_RESET_KEY_VERSION);
token.setBytesRead(state.index.getLogUsedCapacity());
doFindEntriesSinceTest(token, Long.MAX_VALUE, Collections.emptySet(), token);
findEntriesSinceToIndexBasedTest();
findEntriesSinceToJournalBasedTest();
findEntriesSinceOneByOneTest();
findEntriesSinceIndexBasedTokenForOffsetInJournalTest();
// error case - can never have provided an index based token that is contains the offset of the last segment
token = new StoreFindToken(state.referenceIndex.lastEntry().getValue().firstKey(), state.referenceIndex.lastKey(), state.sessionId, state.incarnationId, null, null, UNINITIALIZED_RESET_KEY_VERSION);
doFindEntriesSinceFailureTest(token, StoreErrorCodes.Unknown_Error);
findEntriesSinceInEmptyIndexTest(false);
findEntriesSinceTtlUpdateCornerCaseTest();
findEntriesSinceTtlUpdateAndPutInJournalTest();
}
use of com.github.ambry.store.StoreFindToken in project ambry by linkedin.
the class StoreFindTokenTest method doSerDeTest.
// serDeTest() helpers
/**
* Serializes {@code token} in all formats and ensures that the {@link StoreFindToken} obtained from the
* deserialization matches the original.
* @param token the {@link StoreFindToken} that has to be serialized/deserialized.
* @param versions {@link List} of valid versions that the token to be tested for
* @throws IOException
*/
private void doSerDeTest(StoreFindToken token, Short... versions) throws IOException {
for (Short version : versions) {
DataInputStream stream = getSerializedStream(token, version);
StoreFindToken deSerToken = StoreFindToken.fromBytes(stream, STORE_KEY_FACTORY);
assertEquals("Stream should have ended ", 0, stream.available());
assertEquals("Version mismatch for token ", version.shortValue(), deSerToken.getVersion());
compareTokens(token, deSerToken);
assertEquals("SessionId does not match", token.getSessionId(), deSerToken.getSessionId());
if (version >= VERSION_2) {
assertEquals("IncarnationId mismatch ", token.getIncarnationId(), deSerToken.getIncarnationId());
}
if (version == VERSION_3) {
assertEquals("Reset key mismatch ", token.getResetKey(), deSerToken.getResetKey());
assertEquals("Reset key type mismatch", token.getResetKeyType(), deSerToken.getResetKeyType());
assertEquals("Reset key life version mismatch", token.getResetKeyVersion(), deSerToken.getResetKeyVersion());
}
// use StoreFindToken's actual serialize method to verify that token is serialized in the expected
// version
stream = new DataInputStream(new ByteBufferInputStream(ByteBuffer.wrap(deSerToken.toBytes())));
deSerToken = StoreFindToken.fromBytes(stream, STORE_KEY_FACTORY);
assertEquals("Stream should have ended ", 0, stream.available());
assertEquals("Version mismatch for token ", version.shortValue(), deSerToken.getVersion());
compareTokens(token, deSerToken);
assertEquals("SessionId does not match", token.getSessionId(), deSerToken.getSessionId());
if (version >= VERSION_2) {
assertEquals("IncarnationId mismatch ", token.getIncarnationId(), deSerToken.getIncarnationId());
}
}
}
Aggregations