Search in sources :

Example 1 with SASIIndex

use of org.apache.cassandra.index.sasi.SASIIndex in project cassandra by apache.

the class PerSSTableIndexWriterTest method testSparse.

@Test
public void testSparse() throws Exception {
    final String columnName = "timestamp";
    ColumnFamilyStore cfs = Keyspace.open(KS_NAME).getColumnFamilyStore(CF_NAME);
    ColumnMetadata column = cfs.metadata().getColumn(UTF8Type.instance.decompose(columnName));
    SASIIndex sasi = (SASIIndex) cfs.indexManager.getIndexByName(cfs.name + "_" + columnName);
    File directory = cfs.getDirectories().getDirectoryForNewSSTables();
    Descriptor descriptor = cfs.newSSTableDescriptor(directory);
    PerSSTableIndexWriter indexWriter = (PerSSTableIndexWriter) sasi.getFlushObserver(descriptor, OperationType.FLUSH);
    final long now = System.currentTimeMillis();
    indexWriter.begin();
    indexWriter.indexes.put(column, indexWriter.newIndex(sasi.getIndex()));
    populateSegment(cfs.metadata(), indexWriter.getIndex(column), new HashMap<Long, Set<Integer>>() {

        {
            put(now, new HashSet<>(Arrays.asList(0, 1)));
            put(now + 1, new HashSet<>(Arrays.asList(2, 3)));
            put(now + 2, new HashSet<>(Arrays.asList(4, 5, 6, 7, 8, 9)));
        }
    });
    Callable<OnDiskIndex> segmentBuilder = indexWriter.getIndex(column).scheduleSegmentFlush(false);
    Assert.assertNull(segmentBuilder.call());
    PerSSTableIndexWriter.Index index = indexWriter.getIndex(column);
    Random random = ThreadLocalRandom.current();
    Set<String> segments = new HashSet<>();
    // now let's test multiple correct segments with yield incorrect final segment
    for (int i = 0; i < 3; i++) {
        populateSegment(cfs.metadata(), index, new HashMap<Long, Set<Integer>>() {

            {
                put(now, new HashSet<>(Arrays.asList(random.nextInt(), random.nextInt(), random.nextInt())));
                put(now + 1, new HashSet<>(Arrays.asList(random.nextInt(), random.nextInt(), random.nextInt())));
                put(now + 2, new HashSet<>(Arrays.asList(random.nextInt(), random.nextInt(), random.nextInt())));
            }
        });
        try {
            // flush each of the new segments, they should all succeed
            OnDiskIndex segment = index.scheduleSegmentFlush(false).call();
            index.segments.add(Futures.immediateFuture(segment));
            segments.add(segment.getIndexPath());
        } catch (Exception | FSError e) {
            e.printStackTrace();
            Assert.fail();
        }
    }
    // make sure that all of the segments are present of the filesystem
    for (String segment : segments) Assert.assertTrue(new File(segment).exists());
    indexWriter.complete();
    // make sure that individual segments have been cleaned up
    for (String segment : segments) Assert.assertFalse(new File(segment).exists());
    // and combined index doesn't exist either
    Assert.assertFalse(new File(index.outputFile).exists());
}
Also used : ColumnMetadata(org.apache.cassandra.schema.ColumnMetadata) FSError(org.apache.cassandra.io.FSError) SASIIndex(org.apache.cassandra.index.sasi.SASIIndex) ConfigurationException(org.apache.cassandra.exceptions.ConfigurationException) ThreadLocalRandom(java.util.concurrent.ThreadLocalRandom) ColumnFamilyStore(org.apache.cassandra.db.ColumnFamilyStore) Descriptor(org.apache.cassandra.io.sstable.Descriptor) File(java.io.File) Test(org.junit.Test)

Example 2 with SASIIndex

use of org.apache.cassandra.index.sasi.SASIIndex in project cassandra by apache.

the class PerSSTableIndexWriterTest method testPartialIndexWrites.

@Test
public void testPartialIndexWrites() throws Exception {
    final int maxKeys = 100000, numParts = 4, partSize = maxKeys / numParts;
    final String keyFormat = "key%06d";
    final long timestamp = System.currentTimeMillis();
    ColumnFamilyStore cfs = Keyspace.open(KS_NAME).getColumnFamilyStore(CF_NAME);
    ColumnMetadata column = cfs.metadata().getColumn(UTF8Type.instance.decompose("age"));
    SASIIndex sasi = (SASIIndex) cfs.indexManager.getIndexByName(cfs.name + "_age");
    File directory = cfs.getDirectories().getDirectoryForNewSSTables();
    Descriptor descriptor = cfs.newSSTableDescriptor(directory);
    PerSSTableIndexWriter indexWriter = (PerSSTableIndexWriter) sasi.getFlushObserver(descriptor, OperationType.FLUSH);
    SortedMap<DecoratedKey, Row> expectedKeys = new TreeMap<>(DecoratedKey.comparator);
    for (int i = 0; i < maxKeys; i++) {
        ByteBuffer key = ByteBufferUtil.bytes(String.format(keyFormat, i));
        expectedKeys.put(cfs.metadata().partitioner.decorateKey(key), BTreeRow.singleCellRow(Clustering.EMPTY, BufferCell.live(column, timestamp, Int32Type.instance.decompose(i))));
    }
    indexWriter.begin();
    Iterator<Map.Entry<DecoratedKey, Row>> keyIterator = expectedKeys.entrySet().iterator();
    long position = 0;
    Set<String> segments = new HashSet<>();
    outer: for (; ; ) {
        for (int i = 0; i < partSize; i++) {
            if (!keyIterator.hasNext())
                break outer;
            Map.Entry<DecoratedKey, Row> key = keyIterator.next();
            indexWriter.startPartition(key.getKey(), position++);
            indexWriter.nextUnfilteredCluster(key.getValue());
        }
        PerSSTableIndexWriter.Index index = indexWriter.getIndex(column);
        OnDiskIndex segment = index.scheduleSegmentFlush(false).call();
        index.segments.add(Futures.immediateFuture(segment));
        segments.add(segment.getIndexPath());
    }
    for (String segment : segments) Assert.assertTrue(new File(segment).exists());
    String indexFile = indexWriter.indexes.get(column).filename(true);
    // final flush
    indexWriter.complete();
    for (String segment : segments) Assert.assertFalse(new File(segment).exists());
    OnDiskIndex index = new OnDiskIndex(new File(indexFile), Int32Type.instance, keyPosition -> {
        ByteBuffer key = ByteBufferUtil.bytes(String.format(keyFormat, keyPosition));
        return cfs.metadata().partitioner.decorateKey(key);
    });
    Assert.assertEquals(0, UTF8Type.instance.compare(index.minKey(), ByteBufferUtil.bytes(String.format(keyFormat, 0))));
    Assert.assertEquals(0, UTF8Type.instance.compare(index.maxKey(), ByteBufferUtil.bytes(String.format(keyFormat, maxKeys - 1))));
    Set<DecoratedKey> actualKeys = new HashSet<>();
    int count = 0;
    for (OnDiskIndex.DataTerm term : index) {
        RangeIterator<Long, Token> tokens = term.getTokens();
        while (tokens.hasNext()) {
            for (DecoratedKey key : tokens.next()) actualKeys.add(key);
        }
        Assert.assertEquals(count++, (int) Int32Type.instance.compose(term.getTerm()));
    }
    Assert.assertEquals(expectedKeys.size(), actualKeys.size());
    for (DecoratedKey key : expectedKeys.keySet()) Assert.assertTrue(actualKeys.contains(key));
    FileUtils.closeQuietly(index);
}
Also used : ColumnMetadata(org.apache.cassandra.schema.ColumnMetadata) SASIIndex(org.apache.cassandra.index.sasi.SASIIndex) SASIIndex(org.apache.cassandra.index.sasi.SASIIndex) DecoratedKey(org.apache.cassandra.db.DecoratedKey) ByteBuffer(java.nio.ByteBuffer) ColumnFamilyStore(org.apache.cassandra.db.ColumnFamilyStore) Descriptor(org.apache.cassandra.io.sstable.Descriptor) BTreeRow(org.apache.cassandra.db.rows.BTreeRow) Row(org.apache.cassandra.db.rows.Row) File(java.io.File) Test(org.junit.Test)

Aggregations

File (java.io.File)2 ColumnFamilyStore (org.apache.cassandra.db.ColumnFamilyStore)2 SASIIndex (org.apache.cassandra.index.sasi.SASIIndex)2 Descriptor (org.apache.cassandra.io.sstable.Descriptor)2 ColumnMetadata (org.apache.cassandra.schema.ColumnMetadata)2 Test (org.junit.Test)2 ByteBuffer (java.nio.ByteBuffer)1 ThreadLocalRandom (java.util.concurrent.ThreadLocalRandom)1 DecoratedKey (org.apache.cassandra.db.DecoratedKey)1 BTreeRow (org.apache.cassandra.db.rows.BTreeRow)1 Row (org.apache.cassandra.db.rows.Row)1 ConfigurationException (org.apache.cassandra.exceptions.ConfigurationException)1 FSError (org.apache.cassandra.io.FSError)1