use of org.apache.cassandra.io.util.RandomAccessReader in project cassandra by apache.
the class MetadataSerializerTest method testOldReadsNew.
public void testOldReadsNew(String oldV, String newV) throws IOException {
Map<MetadataType, MetadataComponent> originalMetadata = constructMetadata();
MetadataSerializer serializer = new MetadataSerializer();
// Write metadata in two minor formats.
File statsFileLb = serialize(originalMetadata, serializer, BigFormat.instance.getVersion(newV));
File statsFileLa = serialize(originalMetadata, serializer, BigFormat.instance.getVersion(oldV));
// Reading both as earlier version should yield identical results.
SSTableFormat.Type stype = SSTableFormat.Type.current();
Descriptor desc = new Descriptor(stype.info.getVersion(oldV), statsFileLb.getParentFile(), "", "", 0, stype);
try (RandomAccessReader inLb = RandomAccessReader.open(statsFileLb);
RandomAccessReader inLa = RandomAccessReader.open(statsFileLa)) {
Map<MetadataType, MetadataComponent> deserializedLb = serializer.deserialize(desc, inLb, EnumSet.allOf(MetadataType.class));
Map<MetadataType, MetadataComponent> deserializedLa = serializer.deserialize(desc, inLa, EnumSet.allOf(MetadataType.class));
for (MetadataType type : MetadataType.values()) {
assertEquals(deserializedLa.get(type), deserializedLb.get(type));
if (!originalMetadata.get(type).equals(deserializedLb.get(type))) {
// Currently only STATS can be different. Change if no longer the case
assertEquals(MetadataType.STATS, type);
}
}
}
}
use of org.apache.cassandra.io.util.RandomAccessReader in project cassandra by apache.
the class MetadataSerializerTest method testSerialization.
@Test
public void testSerialization() throws IOException {
Map<MetadataType, MetadataComponent> originalMetadata = constructMetadata();
MetadataSerializer serializer = new MetadataSerializer();
File statsFile = serialize(originalMetadata, serializer, BigFormat.latestVersion);
Descriptor desc = new Descriptor(statsFile.getParentFile(), "", "", 0, SSTableFormat.Type.BIG);
try (RandomAccessReader in = RandomAccessReader.open(statsFile)) {
Map<MetadataType, MetadataComponent> deserialized = serializer.deserialize(desc, in, EnumSet.allOf(MetadataType.class));
for (MetadataType type : MetadataType.values()) {
assertEquals(originalMetadata.get(type), deserialized.get(type));
}
}
}
use of org.apache.cassandra.io.util.RandomAccessReader in project cassandra by apache.
the class TokenTreeTest method buildSerializeIterateAndSkip.
public void buildSerializeIterateAndSkip(TokenTreeBuilder builder, SortedMap<Long, LongSet> tokens) throws Exception {
builder.finish();
final File treeFile = File.createTempFile("token-tree-iterate-test2", "tt");
treeFile.deleteOnExit();
try (SequentialWriter writer = new SequentialWriter(treeFile, DEFAULT_OPT)) {
builder.write(writer);
writer.sync();
}
final RandomAccessReader reader = RandomAccessReader.open(treeFile);
final TokenTree tokenTree = new TokenTree(new MappedBuffer(reader));
final RangeIterator<Long, Token> treeIterator = tokenTree.iterator(KEY_CONVERTER);
final RangeIterator<Long, TokenWithOffsets> listIterator = new EntrySetSkippableIterator(tokens);
long lastToken = 0L;
while (treeIterator.hasNext() && lastToken < 12) {
Token treeNext = treeIterator.next();
TokenWithOffsets listNext = listIterator.next();
Assert.assertEquals(listNext.token, (lastToken = treeNext.get()));
Assert.assertEquals(convert(listNext.offsets), convert(treeNext));
}
treeIterator.skipTo(100548L);
listIterator.skipTo(100548L);
while (treeIterator.hasNext() && listIterator.hasNext()) {
Token treeNext = treeIterator.next();
TokenWithOffsets listNext = listIterator.next();
Assert.assertEquals(listNext.token, (long) treeNext.get());
Assert.assertEquals(convert(listNext.offsets), convert(treeNext));
}
Assert.assertFalse("Tree iterator not completed", treeIterator.hasNext());
Assert.assertFalse("List iterator not completed", listIterator.hasNext());
reader.close();
}
use of org.apache.cassandra.io.util.RandomAccessReader in project cassandra by apache.
the class TokenTreeTest method skipPastEnd.
public void skipPastEnd(TokenTreeBuilder builder, SortedMap<Long, LongSet> tokens) throws Exception {
builder.finish();
final File treeFile = File.createTempFile("token-tree-skip-past-test", "tt");
treeFile.deleteOnExit();
try (SequentialWriter writer = new SequentialWriter(treeFile, DEFAULT_OPT)) {
builder.write(writer);
writer.sync();
}
final RandomAccessReader reader = RandomAccessReader.open(treeFile);
final RangeIterator<Long, Token> tokenTree = new TokenTree(new MappedBuffer(reader)).iterator(KEY_CONVERTER);
tokenTree.skipTo(tokens.lastKey() + 10);
}
use of org.apache.cassandra.io.util.RandomAccessReader in project cassandra by apache.
the class TokenTreeTest method buildSerializeAndIterate.
public void buildSerializeAndIterate(TokenTreeBuilder builder, SortedMap<Long, LongSet> tokenMap) throws Exception {
builder.finish();
final File treeFile = File.createTempFile("token-tree-iterate-test1", "tt");
treeFile.deleteOnExit();
try (SequentialWriter writer = new SequentialWriter(treeFile, DEFAULT_OPT)) {
builder.write(writer);
writer.sync();
}
final RandomAccessReader reader = RandomAccessReader.open(treeFile);
final TokenTree tokenTree = new TokenTree(new MappedBuffer(reader));
final Iterator<Token> tokenIterator = tokenTree.iterator(KEY_CONVERTER);
final Iterator<Map.Entry<Long, LongSet>> listIterator = tokenMap.entrySet().iterator();
while (tokenIterator.hasNext() && listIterator.hasNext()) {
Token treeNext = tokenIterator.next();
Map.Entry<Long, LongSet> listNext = listIterator.next();
Assert.assertEquals(listNext.getKey(), treeNext.get());
Assert.assertEquals(convert(listNext.getValue()), convert(treeNext));
}
Assert.assertFalse("token iterator not finished", tokenIterator.hasNext());
Assert.assertFalse("list iterator not finished", listIterator.hasNext());
reader.close();
}
Aggregations