Search in sources :

Example 6 with RandomAccessReader

use of org.apache.cassandra.io.util.RandomAccessReader in project cassandra by apache.

the class MetadataSerializerTest method testOldReadsNew.

public void testOldReadsNew(String oldV, String newV) throws IOException {
    Map<MetadataType, MetadataComponent> originalMetadata = constructMetadata();
    MetadataSerializer serializer = new MetadataSerializer();
    // Write metadata in two minor formats.
    File statsFileLb = serialize(originalMetadata, serializer, BigFormat.instance.getVersion(newV));
    File statsFileLa = serialize(originalMetadata, serializer, BigFormat.instance.getVersion(oldV));
    // Reading both as earlier version should yield identical results.
    SSTableFormat.Type stype = SSTableFormat.Type.current();
    Descriptor desc = new Descriptor(stype.info.getVersion(oldV), statsFileLb.getParentFile(), "", "", 0, stype);
    try (RandomAccessReader inLb = RandomAccessReader.open(statsFileLb);
        RandomAccessReader inLa = RandomAccessReader.open(statsFileLa)) {
        Map<MetadataType, MetadataComponent> deserializedLb = serializer.deserialize(desc, inLb, EnumSet.allOf(MetadataType.class));
        Map<MetadataType, MetadataComponent> deserializedLa = serializer.deserialize(desc, inLa, EnumSet.allOf(MetadataType.class));
        for (MetadataType type : MetadataType.values()) {
            assertEquals(deserializedLa.get(type), deserializedLb.get(type));
            if (!originalMetadata.get(type).equals(deserializedLb.get(type))) {
                // Currently only STATS can be different. Change if no longer the case
                assertEquals(MetadataType.STATS, type);
            }
        }
    }
}
Also used : RandomAccessReader(org.apache.cassandra.io.util.RandomAccessReader) SSTableFormat(org.apache.cassandra.io.sstable.format.SSTableFormat) Descriptor(org.apache.cassandra.io.sstable.Descriptor) DatabaseDescriptor(org.apache.cassandra.config.DatabaseDescriptor) File(java.io.File)

Example 7 with RandomAccessReader

use of org.apache.cassandra.io.util.RandomAccessReader in project cassandra by apache.

the class MetadataSerializerTest method testSerialization.

@Test
public void testSerialization() throws IOException {
    Map<MetadataType, MetadataComponent> originalMetadata = constructMetadata();
    MetadataSerializer serializer = new MetadataSerializer();
    File statsFile = serialize(originalMetadata, serializer, BigFormat.latestVersion);
    Descriptor desc = new Descriptor(statsFile.getParentFile(), "", "", 0, SSTableFormat.Type.BIG);
    try (RandomAccessReader in = RandomAccessReader.open(statsFile)) {
        Map<MetadataType, MetadataComponent> deserialized = serializer.deserialize(desc, in, EnumSet.allOf(MetadataType.class));
        for (MetadataType type : MetadataType.values()) {
            assertEquals(originalMetadata.get(type), deserialized.get(type));
        }
    }
}
Also used : RandomAccessReader(org.apache.cassandra.io.util.RandomAccessReader) Descriptor(org.apache.cassandra.io.sstable.Descriptor) DatabaseDescriptor(org.apache.cassandra.config.DatabaseDescriptor) File(java.io.File) Test(org.junit.Test)

Example 8 with RandomAccessReader

use of org.apache.cassandra.io.util.RandomAccessReader in project cassandra by apache.

the class TokenTreeTest method buildSerializeIterateAndSkip.

public void buildSerializeIterateAndSkip(TokenTreeBuilder builder, SortedMap<Long, LongSet> tokens) throws Exception {
    builder.finish();
    final File treeFile = File.createTempFile("token-tree-iterate-test2", "tt");
    treeFile.deleteOnExit();
    try (SequentialWriter writer = new SequentialWriter(treeFile, DEFAULT_OPT)) {
        builder.write(writer);
        writer.sync();
    }
    final RandomAccessReader reader = RandomAccessReader.open(treeFile);
    final TokenTree tokenTree = new TokenTree(new MappedBuffer(reader));
    final RangeIterator<Long, Token> treeIterator = tokenTree.iterator(KEY_CONVERTER);
    final RangeIterator<Long, TokenWithOffsets> listIterator = new EntrySetSkippableIterator(tokens);
    long lastToken = 0L;
    while (treeIterator.hasNext() && lastToken < 12) {
        Token treeNext = treeIterator.next();
        TokenWithOffsets listNext = listIterator.next();
        Assert.assertEquals(listNext.token, (lastToken = treeNext.get()));
        Assert.assertEquals(convert(listNext.offsets), convert(treeNext));
    }
    treeIterator.skipTo(100548L);
    listIterator.skipTo(100548L);
    while (treeIterator.hasNext() && listIterator.hasNext()) {
        Token treeNext = treeIterator.next();
        TokenWithOffsets listNext = listIterator.next();
        Assert.assertEquals(listNext.token, (long) treeNext.get());
        Assert.assertEquals(convert(listNext.offsets), convert(treeNext));
    }
    Assert.assertFalse("Tree iterator not completed", treeIterator.hasNext());
    Assert.assertFalse("List iterator not completed", listIterator.hasNext());
    reader.close();
}
Also used : RandomAccessReader(org.apache.cassandra.io.util.RandomAccessReader) SequentialWriter(org.apache.cassandra.io.util.SequentialWriter) File(java.io.File) MappedBuffer(org.apache.cassandra.index.sasi.utils.MappedBuffer)

Example 9 with RandomAccessReader

use of org.apache.cassandra.io.util.RandomAccessReader in project cassandra by apache.

the class TokenTreeTest method skipPastEnd.

public void skipPastEnd(TokenTreeBuilder builder, SortedMap<Long, LongSet> tokens) throws Exception {
    builder.finish();
    final File treeFile = File.createTempFile("token-tree-skip-past-test", "tt");
    treeFile.deleteOnExit();
    try (SequentialWriter writer = new SequentialWriter(treeFile, DEFAULT_OPT)) {
        builder.write(writer);
        writer.sync();
    }
    final RandomAccessReader reader = RandomAccessReader.open(treeFile);
    final RangeIterator<Long, Token> tokenTree = new TokenTree(new MappedBuffer(reader)).iterator(KEY_CONVERTER);
    tokenTree.skipTo(tokens.lastKey() + 10);
}
Also used : RandomAccessReader(org.apache.cassandra.io.util.RandomAccessReader) SequentialWriter(org.apache.cassandra.io.util.SequentialWriter) File(java.io.File) MappedBuffer(org.apache.cassandra.index.sasi.utils.MappedBuffer)

Example 10 with RandomAccessReader

use of org.apache.cassandra.io.util.RandomAccessReader in project cassandra by apache.

the class TokenTreeTest method buildSerializeAndIterate.

public void buildSerializeAndIterate(TokenTreeBuilder builder, SortedMap<Long, LongSet> tokenMap) throws Exception {
    builder.finish();
    final File treeFile = File.createTempFile("token-tree-iterate-test1", "tt");
    treeFile.deleteOnExit();
    try (SequentialWriter writer = new SequentialWriter(treeFile, DEFAULT_OPT)) {
        builder.write(writer);
        writer.sync();
    }
    final RandomAccessReader reader = RandomAccessReader.open(treeFile);
    final TokenTree tokenTree = new TokenTree(new MappedBuffer(reader));
    final Iterator<Token> tokenIterator = tokenTree.iterator(KEY_CONVERTER);
    final Iterator<Map.Entry<Long, LongSet>> listIterator = tokenMap.entrySet().iterator();
    while (tokenIterator.hasNext() && listIterator.hasNext()) {
        Token treeNext = tokenIterator.next();
        Map.Entry<Long, LongSet> listNext = listIterator.next();
        Assert.assertEquals(listNext.getKey(), treeNext.get());
        Assert.assertEquals(convert(listNext.getValue()), convert(treeNext));
    }
    Assert.assertFalse("token iterator not finished", tokenIterator.hasNext());
    Assert.assertFalse("list iterator not finished", listIterator.hasNext());
    reader.close();
}
Also used : LongSet(com.carrotsearch.hppc.LongSet) SequentialWriter(org.apache.cassandra.io.util.SequentialWriter) RandomAccessReader(org.apache.cassandra.io.util.RandomAccessReader) File(java.io.File) MappedBuffer(org.apache.cassandra.index.sasi.utils.MappedBuffer)

Aggregations

RandomAccessReader (org.apache.cassandra.io.util.RandomAccessReader)21 File (java.io.File)12 SequentialWriter (org.apache.cassandra.io.util.SequentialWriter)6 ByteBuffer (java.nio.ByteBuffer)5 MappedBuffer (org.apache.cassandra.index.sasi.utils.MappedBuffer)5 LongSet (com.carrotsearch.hppc.LongSet)2 LZFOutputStream (com.ning.compress.lzf.LZFOutputStream)2 IOException (java.io.IOException)2 RandomAccessFile (java.io.RandomAccessFile)2 DatabaseDescriptor (org.apache.cassandra.config.DatabaseDescriptor)2 SyncSegment (org.apache.cassandra.db.commitlog.CommitLogSegmentReader.SyncSegment)2 Descriptor (org.apache.cassandra.io.sstable.Descriptor)2 SSTableReader (org.apache.cassandra.io.sstable.format.SSTableReader)2 Test (org.junit.Test)2 LongOpenHashSet (com.carrotsearch.hppc.LongOpenHashSet)1 FileOutputStream (java.io.FileOutputStream)1 IOError (java.io.IOError)1 FileChannel (java.nio.channels.FileChannel)1 Cipher (javax.crypto.Cipher)1 ColumnFamilyStore (org.apache.cassandra.db.ColumnFamilyStore)1