Search in sources :

Example 1 with Filter

use of org.apache.cassandra.utils.Filter in project eiger by wlloyd.

the class SSTableNamesIterator method read.

private void read(SSTableReader sstable, FileDataInput file) throws IOException {
    Filter bf = IndexHelper.defreezeBloomFilter(file, sstable.descriptor.usesOldBloomFilter);
    List<IndexHelper.IndexInfo> indexList = IndexHelper.deserializeIndex(file);
    // we can stop early if bloom filter says none of the columns actually exist -- but,
    // we can't stop before initializing the cf above, in case there's a relevant tombstone
    ColumnFamilySerializer serializer = ColumnFamily.serializer();
    try {
        cf = serializer.deserializeFromSSTableNoColumns(ColumnFamily.create(sstable.metadata), file);
    } catch (Exception e) {
        throw new IOException(serializer + " failed to deserialize " + sstable.getColumnFamilyName() + " with " + sstable.metadata + " from " + file, e);
    }
    List<ByteBuffer> filteredColumnNames = new ArrayList<ByteBuffer>(columns.size());
    for (ByteBuffer name : columns) {
        if (bf.isPresent(name)) {
            filteredColumnNames.add(name);
        }
    }
    if (filteredColumnNames.isEmpty())
        return;
    if (indexList == null)
        readSimpleColumns(file, columns, filteredColumnNames);
    else
        readIndexedColumns(sstable.metadata, file, columns, filteredColumnNames, indexList);
    // create an iterator view of the columns we read
    iter = cf.iterator();
}
Also used : ColumnFamilySerializer(org.apache.cassandra.db.ColumnFamilySerializer) Filter(org.apache.cassandra.utils.Filter) IOException(java.io.IOException) ByteBuffer(java.nio.ByteBuffer) IOException(java.io.IOException)

Example 2 with Filter

use of org.apache.cassandra.utils.Filter in project brisk by riptano.

the class BriskServer method getLocalSubBlock.

/**
 * Retrieves a local subBlock
 *
 * @param blockId row key
 * @param sblockId SubBlock column name
 * @param offset inside the sblock
 * @return a local sublock
 * @throws TException
 */
private LocalBlock getLocalSubBlock(String subBlockCFName, ByteBuffer blockId, ByteBuffer sblockId, int offset) throws TException {
    DecoratedKey<Token<?>> decoratedKey = new DecoratedKey<Token<?>>(StorageService.getPartitioner().getToken(blockId), blockId);
    Table table = Table.open(cfsKeyspace);
    ColumnFamilyStore sblockStore = table.getColumnFamilyStore(subBlockCFName);
    Collection<SSTableReader> sstables = sblockStore.getSSTables();
    for (SSTableReader sstable : sstables) {
        long position = sstable.getPosition(decoratedKey, Operator.EQ);
        if (position == -1)
            continue;
        String filename = sstable.descriptor.filenameFor(Component.DATA);
        RandomAccessFile raf = null;
        int mappedLength = -1;
        MappedByteBuffer mappedData = null;
        MappedFileDataInput file = null;
        try {
            raf = new RandomAccessFile(filename, "r");
            assert position < raf.length();
            mappedLength = (raf.length() - position) < Integer.MAX_VALUE ? (int) (raf.length() - position) : Integer.MAX_VALUE;
            mappedData = raf.getChannel().map(FileChannel.MapMode.READ_ONLY, position, mappedLength);
            file = new MappedFileDataInput(mappedData, filename, 0);
            if (file == null)
                continue;
            // Verify key was found in data file
            DecoratedKey keyInDisk = SSTableReader.decodeKey(sstable.partitioner, sstable.descriptor, ByteBufferUtil.readWithShortLength(file));
            assert keyInDisk.equals(decoratedKey) : String.format("%s != %s in %s", keyInDisk, decoratedKey, file.getPath());
            long rowSize = SSTableReader.readRowSize(file, sstable.descriptor);
            assert rowSize > 0;
            assert rowSize < mappedLength;
            Filter bf = IndexHelper.defreezeBloomFilter(file, sstable.descriptor.usesOldBloomFilter);
            // verify this column in in this version of the row.
            if (!bf.isPresent(sblockId))
                continue;
            List<IndexHelper.IndexInfo> indexList = IndexHelper.deserializeIndex(file);
            // we can stop early if bloom filter says none of the
            // columns actually exist -- but,
            // we can't stop before initializing the cf above, in
            // case there's a relevant tombstone
            ColumnFamilySerializer serializer = ColumnFamily.serializer();
            try {
                ColumnFamily cf = serializer.deserializeFromSSTableNoColumns(ColumnFamily.create(sstable.metadata), file);
                if (cf.isMarkedForDelete())
                    continue;
            } catch (Exception e) {
                e.printStackTrace();
                throw new IOException(serializer + " failed to deserialize " + sstable.getColumnFamilyName() + " with " + sstable.metadata + " from " + file, e);
            }
            Integer sblockLength = null;
            if (indexList == null)
                sblockLength = seekToSubColumn(sstable.metadata, file, sblockId);
            else
                sblockLength = seekToSubColumn(sstable.metadata, file, sblockId, indexList);
            if (sblockLength == null || sblockLength < 0)
                continue;
            int bytesReadFromStart = mappedLength - (int) file.bytesRemaining();
            if (logger.isDebugEnabled())
                logger.debug("BlockLength = " + sblockLength + " Availible " + file.bytesRemaining());
            assert offset <= sblockLength : String.format("%d > %d", offset, sblockLength);
            long dataOffset = position + bytesReadFromStart;
            if (file.bytesRemaining() == 0 || sblockLength == 0)
                continue;
            return new LocalBlock(file.getPath(), dataOffset + offset, sblockLength - offset);
        } catch (IOException e) {
            throw new TException(e);
        } finally {
            FileUtils.closeQuietly(raf);
        }
    }
    return null;
}
Also used : TException(org.apache.thrift.TException) Token(org.apache.cassandra.dht.Token) IOException(java.io.IOException) TimeoutException(java.util.concurrent.TimeoutException) TException(org.apache.thrift.TException) IOException(java.io.IOException) UnknownHostException(java.net.UnknownHostException) TrackerManagerException(org.apache.cassandra.hadoop.trackers.TrackerManagerException) SSTableReader(org.apache.cassandra.io.sstable.SSTableReader) RandomAccessFile(java.io.RandomAccessFile) MappedByteBuffer(java.nio.MappedByteBuffer) Filter(org.apache.cassandra.utils.Filter)

Aggregations

IOException (java.io.IOException)2 Filter (org.apache.cassandra.utils.Filter)2 RandomAccessFile (java.io.RandomAccessFile)1 UnknownHostException (java.net.UnknownHostException)1 ByteBuffer (java.nio.ByteBuffer)1 MappedByteBuffer (java.nio.MappedByteBuffer)1 TimeoutException (java.util.concurrent.TimeoutException)1 ColumnFamilySerializer (org.apache.cassandra.db.ColumnFamilySerializer)1 Token (org.apache.cassandra.dht.Token)1 TrackerManagerException (org.apache.cassandra.hadoop.trackers.TrackerManagerException)1 SSTableReader (org.apache.cassandra.io.sstable.SSTableReader)1 TException (org.apache.thrift.TException)1