use of com.github.ambry.store.StoreFindToken in project ambry by linkedin.
the class StoreFindTokenTest method constructionErrorCasesTest.
/**
* Tests {@link StoreFindToken} for construction error cases.
*/
@Test
public void constructionErrorCasesTest() {
UUID sessionId = UUID.randomUUID();
UUID incarnationId = UUID.randomUUID();
LogSegmentName logSegmentName = LogSegmentName.generateFirstSegmentName(isLogSegmented);
Offset offset = new Offset(logSegmentName, 0);
MockId key = new MockId(TestUtils.getRandomString(10));
MockId resetKey = new MockId(TestUtils.getRandomString(10));
PersistentIndex.IndexEntryType resetKeyType = PersistentIndex.IndexEntryType.values()[(new Random()).nextInt(PersistentIndex.IndexEntryType.values().length)];
short resetKeyVersion = (short) random.nextInt(5);
// no offset
testConstructionFailure(key, sessionId, incarnationId, null);
// no session id
testConstructionFailure(key, null, incarnationId, offset);
// no incarnation Id
testConstructionFailure(key, sessionId, null, offset);
// no key in IndexBased
try {
new StoreFindToken(null, offset, sessionId, null, null, null, UNINITIALIZED_RESET_KEY_VERSION);
fail("Construction of StoreFindToken should have failed");
} catch (IllegalArgumentException e) {
// expected. Nothing to do.
}
// version 3 token without reset key or reset key type
for (FindTokenType type : EnumSet.of(FindTokenType.JournalBased, FindTokenType.IndexBased)) {
for (Pair<MockId, PersistentIndex.IndexEntryType> pair : Arrays.asList(new Pair<MockId, PersistentIndex.IndexEntryType>(resetKey, null), new Pair<MockId, PersistentIndex.IndexEntryType>(null, resetKeyType))) {
try {
new StoreFindToken(type, offset, key, sessionId, incarnationId, type == FindTokenType.JournalBased, VERSION_3, pair.getFirst(), pair.getSecond(), resetKeyVersion);
fail("Construction of StoreFindToken should have failed because rest key or its type is null.");
} catch (IllegalArgumentException e) {
// expected
}
}
}
}
use of com.github.ambry.store.StoreFindToken in project ambry by linkedin.
the class StoreFindTokenTest method serDeTest.
/**
* Tests {@link StoreFindToken} serialization/deserialization.
* @throws IOException
*/
@Test
public void serDeTest() throws IOException {
UUID sessionId = UUID.randomUUID();
UUID incarnationId = UUID.randomUUID();
LogSegmentName logSegmentName = LogSegmentName.generateFirstSegmentName(isLogSegmented);
Offset offset = new Offset(logSegmentName, 0);
MockId key = new MockId(TestUtils.getRandomString(10));
MockId resetKey = new MockId(TestUtils.getRandomString(10));
PersistentIndex.IndexEntryType resetKeyType = PersistentIndex.IndexEntryType.values()[(new Random()).nextInt(PersistentIndex.IndexEntryType.values().length)];
short resetKeyVersion = (short) random.nextInt(5);
if (!isLogSegmented) {
// UnInitialized
doSerDeTest(new StoreFindToken(), VERSION_0, VERSION_1, VERSION_2, VERSION_3);
// Journal based token
doSerDeTest(new StoreFindToken(offset, sessionId, incarnationId, false, null, null, UNINITIALIZED_RESET_KEY_VERSION), VERSION_0, VERSION_1, VERSION_2, VERSION_3);
// Journal based token with resetKey and resetKeyType specified (VERSION_3)
doSerDeTest(new StoreFindToken(offset, sessionId, incarnationId, false, resetKey, resetKeyType, resetKeyVersion), VERSION_3);
// inclusiveness is present only in {VERSION_2, VERSION_3}
doSerDeTest(new StoreFindToken(offset, sessionId, incarnationId, true, null, null, UNINITIALIZED_RESET_KEY_VERSION), VERSION_2, VERSION_3);
doSerDeTest(new StoreFindToken(offset, sessionId, incarnationId, true, resetKey, resetKeyType, resetKeyVersion), VERSION_3);
// Index based
doSerDeTest(new StoreFindToken(key, offset, sessionId, incarnationId, null, null, UNINITIALIZED_RESET_KEY_VERSION), VERSION_0, VERSION_1, VERSION_2, VERSION_3);
doSerDeTest(new StoreFindToken(key, offset, sessionId, incarnationId, resetKey, resetKeyType, resetKeyVersion), VERSION_3);
} else {
// UnInitialized
doSerDeTest(new StoreFindToken(), VERSION_1, VERSION_2, VERSION_3);
// Journal based token
doSerDeTest(new StoreFindToken(offset, sessionId, incarnationId, false, null, null, UNINITIALIZED_RESET_KEY_VERSION), VERSION_1, VERSION_2, VERSION_3);
doSerDeTest(new StoreFindToken(offset, sessionId, incarnationId, false, resetKey, resetKeyType, resetKeyVersion), VERSION_3);
// inclusiveness is present only in VERSION_2
doSerDeTest(new StoreFindToken(offset, sessionId, incarnationId, true, null, null, UNINITIALIZED_RESET_KEY_VERSION), VERSION_2, VERSION_3);
doSerDeTest(new StoreFindToken(offset, sessionId, incarnationId, true, resetKey, resetKeyType, resetKeyVersion), VERSION_3);
// Index based
doSerDeTest(new StoreFindToken(key, offset, sessionId, incarnationId, null, null, UNINITIALIZED_RESET_KEY_VERSION), VERSION_1, VERSION_2, VERSION_3);
doSerDeTest(new StoreFindToken(key, offset, sessionId, incarnationId, resetKey, resetKeyType, resetKeyVersion), VERSION_3);
}
}
use of com.github.ambry.store.StoreFindToken in project ambry by linkedin.
the class IndexTest method rebuildTokenBasedOnResetKeyTest.
/**
* Tests cases where find token is rebuilt based on reset key.
* @throws StoreException
*/
@Test
public void rebuildTokenBasedOnResetKeyTest() throws StoreException {
assumeTrue(isLogSegmented);
IndexSegment firstIndexSegment = stateForTokenTest.index.getIndexSegments().firstEntry().getValue();
IndexSegment secondIndexSegment = stateForTokenTest.index.getIndexSegments().higherEntry(firstIndexSegment.getStartOffset()).getValue();
StoreKey firstKey = firstIndexSegment.iterator().next().getKey();
StoreKey keyFromFirstSegment = firstIndexSegment.listIterator(DEFAULT_MAX_IN_MEM_ELEMENTS - 2).next().getKey();
// 1. generate an invalid index-based token with reset key and the key can be found in current index
Offset invalidOffset = new Offset(firstIndexSegment.getLogSegmentName(), firstIndexSegment.getStartOffset().getOffset() - 1);
StoreFindToken startToken = new StoreFindToken(keyFromFirstSegment, invalidOffset, stateForTokenTest.sessionId, stateForTokenTest.incarnationId, firstIndexSegment.getResetKey(), firstIndexSegment.getResetKeyType(), firstIndexSegment.getResetKeyLifeVersion());
// the invalid token should be revalidated by looking up reset key in index
FindInfo findInfo = stateForTokenTest.index.findEntriesSince(startToken, 1);
StoreFindToken token = (StoreFindToken) findInfo.getFindToken();
// the key in the token should be first key in first index segment, as we only get one entry from new start point,
// which is first key in first index segment (inclusive)
StoreFindToken expectedToken = new StoreFindToken(firstKey, firstIndexSegment.getStartOffset(), stateForTokenTest.sessionId, stateForTokenTest.incarnationId, firstIndexSegment.getResetKey(), firstIndexSegment.getResetKeyType(), firstIndexSegment.getResetKeyLifeVersion());
compareTokens(expectedToken, token);
// 2. generate an invalid journal based token with reset key pointing to 2nd index segment (this is to mock that the
// reset key fell out of journal and was copied to 2nd index segment by compaction)
IndexSegment lastIndexSegment = stateForTokenTest.index.getIndexSegments().lastEntry().getValue();
invalidOffset = new Offset(lastIndexSegment.getLogSegmentName().getNextGenerationName(), lastIndexSegment.getStartOffset().getOffset() + 1);
startToken = new StoreFindToken(invalidOffset, stateForTokenTest.sessionId, stateForTokenTest.incarnationId, false, secondIndexSegment.getResetKey(), secondIndexSegment.getResetKeyType(), secondIndexSegment.getResetKeyLifeVersion());
findInfo = stateForTokenTest.index.findEntriesSince(startToken, 1);
token = (StoreFindToken) findInfo.getFindToken();
// expected token should be the first key in 2nd index segment (index-based)
StoreKey firstKeyFromSecondSegment = secondIndexSegment.iterator().next().getKey();
expectedToken = new StoreFindToken(firstKeyFromSecondSegment, secondIndexSegment.getStartOffset(), stateForTokenTest.sessionId, stateForTokenTest.incarnationId, secondIndexSegment.getResetKey(), secondIndexSegment.getResetKeyType(), secondIndexSegment.getResetKeyLifeVersion());
compareTokens(expectedToken, token);
// 3. generate an invalid token with reset key but the reset key is not found in current index
StoreKey keyNotFound = stateForTokenTest.getUniqueId();
startToken = new StoreFindToken(invalidOffset, stateForTokenTest.sessionId, stateForTokenTest.incarnationId, false, keyNotFound, PersistentIndex.IndexEntryType.PUT, (short) 0);
findInfo = stateForTokenTest.index.findEntriesSince(startToken, 1);
token = (StoreFindToken) findInfo.getFindToken();
// token should be reset to the very beginning which is the first index segment
expectedToken = new StoreFindToken(firstKey, firstIndexSegment.getStartOffset(), stateForTokenTest.sessionId, stateForTokenTest.incarnationId, firstIndexSegment.getResetKey(), firstIndexSegment.getResetKeyType(), firstIndexSegment.getResetKeyLifeVersion());
compareTokens(expectedToken, token);
}
use of com.github.ambry.store.StoreFindToken in project ambry by linkedin.
the class IndexTest method findDeletedEntriesSinceTest.
/**
* Tests {@link PersistentIndex#findDeletedEntriesSince(FindToken, long, long)} for various cases
* 1. All cases that result in getting an index based token
* 2. All cases that result in getting a journal based token
* 3. Getting entries one by one
* 4. Getting entries using an index based token for an offset in the journal
* 5. Using findDeletedEntriesSince() in an empty index
* 6. Token that has the log end offset
* @throws StoreException
*/
@Test
public void findDeletedEntriesSinceTest() throws StoreException {
// add some more entries so that the journal gets entries across segments and doesn't start at the beginning
// of an index segment.
state.addPutEntries(7, CuratedLogIndexState.PUT_RECORD_SIZE, Utils.Infinite_Time);
MockId idToDelete = state.getIdToDeleteFromIndexSegment(state.referenceIndex.lastKey(), false);
state.addDeleteEntry(idToDelete);
// token with log end offset should not return anything
StoreFindToken token = new StoreFindToken(state.log.getEndOffset(), state.sessionId, state.incarnationId, false, null, null, UNINITIALIZED_RESET_KEY_VERSION);
doFindDeletedEntriesSinceTest(token, Long.MAX_VALUE, Collections.emptySet(), token);
findDeletedEntriesSinceToIndexBasedTest();
findDeletedEntriesSinceToJournalBasedTest();
findDeletedEntriesSinceOneByOneTest();
findDeletedEntriesSinceIndexBasedTokenForOffsetInJournalTest();
findEntriesSinceInEmptyIndexTest(true);
}
use of com.github.ambry.store.StoreFindToken in project ambry by linkedin.
the class IndexTest method findEntriesSinceIncarnationIdTest.
/**
* Tests behaviour of {@link PersistentIndex#findEntriesSince(FindToken, long)} relating to incarnationId
* @throws StoreException
*/
@Test
public void findEntriesSinceIncarnationIdTest() throws StoreException {
Offset lastRecordOffset = state.index.journal.getLastOffset();
state.appendToLog(2 * CuratedLogIndexState.PUT_RECORD_SIZE);
// will be recovered
FileSpan firstRecordFileSpan = state.log.getFileSpanForMessage(state.index.getCurrentEndOffset(), CuratedLogIndexState.PUT_RECORD_SIZE);
// will not be recovered
FileSpan secondRecordFileSpan = state.log.getFileSpanForMessage(firstRecordFileSpan.getEndOffset(), CuratedLogIndexState.PUT_RECORD_SIZE);
UUID oldSessionId = state.sessionId;
UUID oldIncarnationId = state.incarnationId;
final MockId newId = state.getUniqueId();
short accountId = Utils.getRandomShort(TestUtils.RANDOM);
short containerId = Utils.getRandomShort(TestUtils.RANDOM);
long operationTimeMs = state.time.milliseconds();
// add to allKeys() so that doFindEntriesSinceTest() works correctly.
IndexValue putValue = new IndexValue(CuratedLogIndexState.PUT_RECORD_SIZE, firstRecordFileSpan.getStartOffset(), Utils.Infinite_Time, operationTimeMs, accountId, containerId);
state.allKeys.computeIfAbsent(newId, k -> new TreeSet<>()).add(putValue);
state.recovery = (read, startOffset, endOffset, factory) -> Collections.singletonList(new MessageInfo(newId, CuratedLogIndexState.PUT_RECORD_SIZE, accountId, containerId, operationTimeMs));
// change in incarnationId
state.incarnationId = UUID.randomUUID();
state.reloadIndex(true, true);
long bytesRead = state.index.getAbsolutePositionInLogForOffset(firstRecordFileSpan.getEndOffset());
// create a token that will be past the index end offset on startup after recovery with old incarnationId
StoreFindToken startToken = new StoreFindToken(secondRecordFileSpan.getEndOffset(), oldSessionId, oldIncarnationId, false, null, null, UNINITIALIZED_RESET_KEY_VERSION);
// token should get reset internally, all keys should be returned and the returned token should be pointing to
// start offset of firstRecordFileSpan.
IndexSegment segmentOfToken = state.index.getIndexSegments().floorEntry(firstRecordFileSpan.getStartOffset()).getValue();
StoreFindToken expectedEndToken = new StoreFindToken(firstRecordFileSpan.getStartOffset(), state.sessionId, state.incarnationId, false, segmentOfToken.getResetKey(), segmentOfToken.getResetKeyType(), segmentOfToken.getResetKeyLifeVersion());
expectedEndToken.setBytesRead(bytesRead);
doFindEntriesSinceTest(startToken, Long.MAX_VALUE, state.allKeys.keySet(), expectedEndToken);
// create a token that is not past the index end offset on startup after recovery with old incarnationId.
// token should get reset internally, all keys should be returned and the returned token should be be pointing to
// start offset of firstRecordFileSpan.
startToken = new StoreFindToken(lastRecordOffset, oldSessionId, oldIncarnationId, false, null, null, UNINITIALIZED_RESET_KEY_VERSION);
expectedEndToken = new StoreFindToken(firstRecordFileSpan.getStartOffset(), state.sessionId, state.incarnationId, false, segmentOfToken.getResetKey(), segmentOfToken.getResetKeyType(), segmentOfToken.getResetKeyLifeVersion());
expectedEndToken.setBytesRead(bytesRead);
doFindEntriesSinceTest(startToken, Long.MAX_VALUE, state.allKeys.keySet(), expectedEndToken);
}
Aggregations