use of com.github.ambry.utils.CrcOutputStream in project ambry by linkedin.
the class IndexSegment method map.
/**
* Memory maps the segment of index. Optionally, it also persist the bloom filter to disk
* @param persistBloom True, if the bloom filter needs to be persisted. False otherwise.
* @throws IOException
* @throws StoreException
*/
void map(boolean persistBloom) throws IOException, StoreException {
RandomAccessFile raf = new RandomAccessFile(indexFile, "r");
rwLock.writeLock().lock();
try {
mmap = raf.getChannel().map(FileChannel.MapMode.READ_ONLY, 0, indexFile.length());
mmap.position(0);
version = mmap.getShort();
StoreKey storeKey;
int keySize;
short resetKeyType;
switch(version) {
case PersistentIndex.VERSION_0:
indexSizeExcludingEntries = VERSION_FIELD_LENGTH + KEY_OR_ENTRY_SIZE_FIELD_LENGTH + VALUE_SIZE_FIELD_LENGTH + LOG_END_OFFSET_FIELD_LENGTH + CRC_FIELD_LENGTH;
keySize = mmap.getInt();
valueSize = mmap.getInt();
persistedEntrySize = keySize + valueSize;
endOffset.set(new Offset(startOffset.getName(), mmap.getLong()));
lastModifiedTimeSec.set(indexFile.lastModified() / 1000);
firstKeyRelativeOffset = indexSizeExcludingEntries - CRC_FIELD_LENGTH;
break;
case PersistentIndex.VERSION_1:
keySize = mmap.getInt();
valueSize = mmap.getInt();
persistedEntrySize = keySize + valueSize;
endOffset.set(new Offset(startOffset.getName(), mmap.getLong()));
lastModifiedTimeSec.set(mmap.getLong());
storeKey = factory.getStoreKey(new DataInputStream(new ByteBufferInputStream(mmap)));
resetKeyType = mmap.getShort();
resetKey = new Pair<>(storeKey, PersistentIndex.IndexEntryType.values()[resetKeyType]);
indexSizeExcludingEntries = VERSION_FIELD_LENGTH + KEY_OR_ENTRY_SIZE_FIELD_LENGTH + VALUE_SIZE_FIELD_LENGTH + LOG_END_OFFSET_FIELD_LENGTH + CRC_FIELD_LENGTH + LAST_MODIFIED_TIME_FIELD_LENGTH + resetKey.getFirst().sizeInBytes() + RESET_KEY_TYPE_FIELD_LENGTH;
firstKeyRelativeOffset = indexSizeExcludingEntries - CRC_FIELD_LENGTH;
break;
case PersistentIndex.VERSION_2:
persistedEntrySize = mmap.getInt();
valueSize = mmap.getInt();
endOffset.set(new Offset(startOffset.getName(), mmap.getLong()));
lastModifiedTimeSec.set(mmap.getLong());
storeKey = factory.getStoreKey(new DataInputStream(new ByteBufferInputStream(mmap)));
resetKeyType = mmap.getShort();
resetKey = new Pair<>(storeKey, PersistentIndex.IndexEntryType.values()[resetKeyType]);
indexSizeExcludingEntries = VERSION_FIELD_LENGTH + KEY_OR_ENTRY_SIZE_FIELD_LENGTH + VALUE_SIZE_FIELD_LENGTH + LOG_END_OFFSET_FIELD_LENGTH + CRC_FIELD_LENGTH + LAST_MODIFIED_TIME_FIELD_LENGTH + resetKey.getFirst().sizeInBytes() + RESET_KEY_TYPE_FIELD_LENGTH;
firstKeyRelativeOffset = indexSizeExcludingEntries - CRC_FIELD_LENGTH;
break;
default:
throw new StoreException("IndexSegment : " + indexFile.getAbsolutePath() + " unknown version in index file", StoreErrorCodes.Index_Version_Error);
}
mapped.set(true);
index = null;
} finally {
raf.close();
rwLock.writeLock().unlock();
}
// we only persist the bloom filter once during its entire lifetime
if (persistBloom) {
CrcOutputStream crcStream = new CrcOutputStream(new FileOutputStream(bloomFile));
DataOutputStream stream = new DataOutputStream(crcStream);
FilterFactory.serialize(bloomFilter, stream);
long crcValue = crcStream.getValue();
stream.writeLong(crcValue);
}
}
use of com.github.ambry.utils.CrcOutputStream in project ambry by linkedin.
the class MockIndexSegment method loadBloomFileFailureTest.
/**
* Test failure on loading bloom filter file and verify that bloom filter is rebuilt.
* @throws Exception
*/
@Test
public void loadBloomFileFailureTest() throws Exception {
assumeTrue(formatVersion > PersistentIndex.VERSION_1);
LogSegmentName logSegmentName1 = LogSegmentName.fromPositionAndGeneration(0, 0);
int indexValueSize = PersistentIndex.CURRENT_VERSION >= PersistentIndex.VERSION_3 ? IndexValue.INDEX_VALUE_SIZE_IN_BYTES_V3_V4 : IndexValue.INDEX_VALUE_SIZE_IN_BYTES_V1_V2;
IndexSegment indexSegment1 = new IndexSegment(tempDir.getAbsolutePath(), new Offset(logSegmentName1, 0), STORE_KEY_FACTORY, KEY_SIZE + indexValueSize, indexValueSize, config, metrics, time);
Random random = new Random();
short accountId1 = getRandomShort(random);
short containerId1 = getRandomShort(random);
MockId id1 = new MockId(TestUtils.getRandomString(CUSTOM_ID_SIZE), accountId1, containerId1);
IndexValue value1 = IndexValueTest.getIndexValue(1000, new Offset(logSegmentName1, 0), Utils.Infinite_Time, time.milliseconds(), accountId1, containerId1, (short) 0, formatVersion);
indexSegment1.addEntry(new IndexEntry(id1, value1), new Offset(logSegmentName1, 1000));
indexSegment1.writeIndexSegmentToFile(new Offset(logSegmentName1, 1000));
indexSegment1.seal();
File bloomFile = new File(tempDir, generateIndexSegmentFilenamePrefix(new Offset(logSegmentName1, 0)) + BLOOM_FILE_NAME_SUFFIX);
assertTrue("The bloom file should exist", bloomFile.exists());
CrcInputStream crcBloom = new CrcInputStream(new FileInputStream(bloomFile));
IFilter bloomFilter;
try (DataInputStream stream = new DataInputStream(crcBloom)) {
bloomFilter = FilterFactory.deserialize(stream, config.storeBloomFilterMaximumPageCount);
long crcValue = crcBloom.getValue();
assertEquals("Crc mismatch", crcValue, stream.readLong());
}
// induce crc mismatch during serialization
CrcOutputStream crcStream = new CrcOutputStream(new FileOutputStream(bloomFile));
DataOutputStream stream = new DataOutputStream(crcStream);
FilterFactory.serialize(bloomFilter, stream);
long crcValue = crcStream.getValue() + 1;
stream.writeLong(crcValue);
stream.close();
// load from file, which should trigger bloom filter rebuild as crc value doesn't match
new IndexSegment(indexSegment1.getFile(), true, STORE_KEY_FACTORY, config, metrics, null, time);
assertEquals("Bloom filter rebuild count mismatch", 1, metrics.bloomRebuildOnLoadFailureCount.getCount());
}
use of com.github.ambry.utils.CrcOutputStream in project ambry by linkedin.
the class CompactionLogTest method oldVersionsReadTest.
/**
* Tests the reading of versions older than the current versions.
* @throws IOException
*/
@Test
public void oldVersionsReadTest() throws IOException {
String storeName = "store";
long startTimeMs = Utils.getRandomLong(TestUtils.RANDOM, Long.MAX_VALUE);
long referenceTimeMs = Utils.getRandomLong(TestUtils.RANDOM, Long.MAX_VALUE);
CompactionDetails details = getCompactionDetails(referenceTimeMs);
for (int i = 0; i < CompactionLog.CURRENT_VERSION; i++) {
File file = new File(tempDir, storeName + CompactionLog.COMPACTION_LOG_SUFFIX);
switch(i) {
case CompactionLog.VERSION_0:
try (FileOutputStream fileOutputStream = new FileOutputStream(file)) {
CrcOutputStream crcOutputStream = new CrcOutputStream(fileOutputStream);
DataOutputStream stream = new DataOutputStream(crcOutputStream);
stream.writeShort(i);
stream.writeLong(startTimeMs);
stream.writeInt(0);
stream.writeInt(1);
stream.write(new CompactionLog.CycleLog(details).toBytes());
stream.writeLong(crcOutputStream.getValue());
fileOutputStream.getChannel().force(true);
}
CompactionLog cLog = new CompactionLog(tempDirStr, storeName, STORE_KEY_FACTORY, time, config);
verifyEquality(details, cLog.getCompactionDetails());
assertEquals("Current Idx not as expected", 0, cLog.getCurrentIdx());
break;
default:
throw new IllegalStateException("No serialization implementation for version: " + i);
}
}
}
use of com.github.ambry.utils.CrcOutputStream in project ambry by linkedin.
the class StoreDescriptorTest method testStoreDescriptor.
/**
* Tests {@link StoreDescriptor} for unit tests for instantiation and converting bytes into StoreDescriptor
* @throws IOException
*/
@Test
public void testStoreDescriptor() throws IOException {
StoreConfig config = new StoreConfig(new VerifiableProperties(new Properties()));
File tempDir = StoreTestUtils.createTempDirectory("storeDir");
File storeDescriptorFile = new File(tempDir.getAbsolutePath(), StoreDescriptor.STORE_DESCRIPTOR_FILENAME);
StoreDescriptor storeDescriptor = new StoreDescriptor(tempDir.getAbsolutePath(), config);
// store descriptor file should have been created.
StoreDescriptor newStoreDescriptor = new StoreDescriptor(tempDir.getAbsolutePath(), config);
assertEquals("IncarnationId mismatch ", storeDescriptor.getIncarnationId(), newStoreDescriptor.getIncarnationId());
assertTrue("Store descriptor file could not be deleted", storeDescriptorFile.delete());
// Create StoreDescriptor file with new incarnationId
UUID incarnationIdUUID = UUID.randomUUID();
byte[] toBytes = getBytesForStoreDescriptor(StoreDescriptor.VERSION_0, incarnationIdUUID);
storeDescriptorFile = new File(tempDir.getAbsolutePath(), StoreDescriptor.STORE_DESCRIPTOR_FILENAME);
assertTrue("Store descriptor file could not be created", storeDescriptorFile.createNewFile());
createStoreFile(storeDescriptorFile, toBytes);
storeDescriptor = new StoreDescriptor(tempDir.getAbsolutePath(), config);
assertEquals("IncarnationId mismatch ", incarnationIdUUID, storeDescriptor.getIncarnationId());
// check for wrong version
assertTrue("Store descriptor file could not be deleted", storeDescriptorFile.delete());
toBytes = getBytesForStoreDescriptor((short) 1, incarnationIdUUID);
assertTrue("Store descriptor file could not be created", storeDescriptorFile.createNewFile());
createStoreFile(storeDescriptorFile, toBytes);
try {
new StoreDescriptor(tempDir.getAbsolutePath(), config);
fail("Wrong version should have thrown IllegalArgumentException ");
} catch (IllegalArgumentException e) {
}
// check for wrong Crc
assertTrue("Store descriptor file could not be deleted", storeDescriptorFile.delete());
assertTrue("Store descriptor file could not be created", storeDescriptorFile.createNewFile());
toBytes = getBytesForStoreDescriptor(StoreDescriptor.VERSION_0, incarnationIdUUID);
CrcOutputStream crcOutputStream = new CrcOutputStream(new FileOutputStream(storeDescriptorFile));
DataOutputStream dataOutputStream = new DataOutputStream(crcOutputStream);
dataOutputStream.write(toBytes);
dataOutputStream.writeLong(crcOutputStream.getValue() + 1);
dataOutputStream.close();
try {
new StoreDescriptor(tempDir.getAbsolutePath(), config);
fail("Wrong CRC should have thrown IllegalStateException ");
} catch (IllegalStateException e) {
}
}
use of com.github.ambry.utils.CrcOutputStream in project ambry by linkedin.
the class DiskTokenPersistorTest method serializeVersion0Tokens.
/**
* Serialize token in VERSION_0 format.
* @param tokenInfoList {@link RemoteReplicaInfo.ReplicaTokenInfo} list to serialize.
* @param outputStream {@link FileOutputStream} to persist the tokens to.
* @throws IOException if an exception happens during serialization.
*/
private void serializeVersion0Tokens(List<RemoteReplicaInfo.ReplicaTokenInfo> tokenInfoList, FileOutputStream outputStream) throws IOException {
CrcOutputStream crcOutputStream = new CrcOutputStream(outputStream);
DataOutputStream writer = new DataOutputStream(crcOutputStream);
try {
// write the current version
writer.writeShort(0);
for (RemoteReplicaInfo.ReplicaTokenInfo replicaTokenInfo : tokenInfoList) {
writer.write(replicaTokenInfo.getPartitionId().getBytes());
// Write hostname
writer.writeInt(replicaTokenInfo.getHostname().getBytes().length);
writer.write(replicaTokenInfo.getHostname().getBytes());
// Write replica path
writer.writeInt(replicaTokenInfo.getReplicaPath().getBytes().length);
writer.write(replicaTokenInfo.getReplicaPath().getBytes());
// Write port
writer.writeInt(replicaTokenInfo.getPort());
// Write total bytes read from local store
writer.writeLong(replicaTokenInfo.getTotalBytesReadFromLocalStore());
// Write replica token
writer.write(replicaTokenInfo.getReplicaToken().toBytes());
}
long crcValue = crcOutputStream.getValue();
writer.writeLong(crcValue);
} finally {
if (outputStream != null) {
// flush and overwrite file
outputStream.getChannel().force(true);
}
}
}
Aggregations