Search in sources :

Example 1 with RollOverException

use of com.srotya.sidewinder.core.storage.compression.RollOverException in project sidewinder by srotya.

the class TestByzantineReadWrite method testBufferFull.

@Test
public void testBufferFull() throws IOException {
    ByteBuffer buf = ByteBuffer.allocateDirect(1024 * 1024);
    ByzantineWriter writer = new ByzantineWriter();
    writer.setTsBucket("asdasdasd");
    assertEquals("asdasdasd", writer.getTsBucket());
    writer.configure(new HashMap<>(), buf, true, startOffset, true);
    long ots = System.currentTimeMillis();
    writer.setHeaderTimestamp(ots);
    assertEquals(ots, writer.getHeaderTimestamp());
    int limit = 1_000_000;
    try {
        for (int i = 0; i < limit; i++) {
            writer.addValue(ots + i * 1000, i);
        }
        fail("Must fill up buffer");
    } catch (RollOverException e) {
    }
    assertTrue(writer.isFull());
}
Also used : RollOverException(com.srotya.sidewinder.core.storage.compression.RollOverException) ByteBuffer(java.nio.ByteBuffer) DataPoint(com.srotya.sidewinder.core.storage.DataPoint) Test(org.junit.Test)

Example 2 with RollOverException

use of com.srotya.sidewinder.core.storage.compression.RollOverException in project sidewinder by srotya.

the class TimeSeries method compact.

/**
 * Compacts old Writers into one for every single time bucket, this insures the
 * buffers are compacted as well as provides an opportunity to use a higher
 * compression rate algorithm for the bucket. All Writers but the last are
 * read-only therefore performing operations on them does not impact.
 *
 * @param functions
 * @return returns null if nothing to compact or empty list if all compaction
 *         attempts fail
 * @throws IOException
 */
@SafeVarargs
public final List<Writer> compact(Consumer<List<Writer>>... functions) throws IOException {
    // size check is to avoid unnecessary calls and exit fast
    if (compactionCandidateSet.isEmpty()) {
        return null;
    }
    List<Writer> compactedWriter = new ArrayList<>();
    Iterator<Entry<String, List<Writer>>> iterator = compactionCandidateSet.entrySet().iterator();
    int id = CompressionFactory.getIdByClass(compactionClass);
    while (iterator.hasNext()) {
        // entry.getKey() gives tsBucket string
        Entry<String, List<Writer>> entry = iterator.next();
        // remove this entry from compaction set
        iterator.remove();
        List<Writer> list = entry.getValue();
        int listSize = list.size() - 1;
        int pointCount = list.subList(0, listSize).stream().mapToInt(s -> s.getCount()).sum();
        int total = list.subList(0, listSize).stream().mapToInt(s -> s.getPosition()).sum();
        if (total == 0) {
            logger.warning("Ignoring bucket for compaction, not enough bytes. THIS BUG SHOULD BE INVESTIGATED");
            continue;
        }
        Writer writer = getWriterInstance(compactionClass);
        int compactedPoints = 0;
        double bufSize = total * compactionRatio;
        logger.finer("Allocating buffer:" + total + " Vs. " + pointCount * 16 + " max compacted buffer:" + bufSize);
        logger.finer("Getting sublist from:" + 0 + " to:" + (list.size() - 1));
        ByteBuffer buf = ByteBuffer.allocate((int) bufSize);
        buf.put((byte) id);
        // since this buffer will be the first one
        buf.put(1, (byte) 0);
        writer.configure(conf, buf, true, START_OFFSET, false);
        Writer input = list.get(0);
        // read the header timestamp
        long timestamp = input.getHeaderTimestamp();
        writer.setHeaderTimestamp(timestamp);
        // read all but the last writer and insert into new temp writer
        try {
            for (int i = 0; i < list.size() - 1; i++) {
                input = list.get(i);
                Reader reader = input.getReader();
                for (int k = 0; k < reader.getPairCount(); k++) {
                    long[] pair = reader.read();
                    writer.addValue(pair[0], pair[1]);
                    compactedPoints++;
                }
            }
            writer.makeReadOnly();
        } catch (RollOverException e) {
            logger.warning("Buffer filled up; bad compression ratio; not compacting");
            continue;
        } catch (Exception e) {
            logger.log(Level.SEVERE, "Compaction failed due to unknown exception", e);
        }
        // get the raw compressed bytes
        ByteBuffer rawBytes = writer.getRawBytes();
        // limit how much data needs to be read from the buffer
        rawBytes.limit(rawBytes.position());
        // convert buffer length request to size of 2
        int size = rawBytes.limit() + 1;
        if (size % 2 != 0) {
            size++;
        }
        rawBytes.rewind();
        // create buffer in measurement
        BufferObject newBuf = measurement.getMalloc().createNewBuffer(seriesId, entry.getKey(), size);
        logger.fine("Compacted buffer size:" + size + " vs " + total);
        String bufferId = newBuf.getBufferId();
        buf = newBuf.getBuf();
        writer = getWriterInstance(compactionClass);
        buf.put(rawBytes);
        writer.setBufferId(bufferId);
        writer.configure(conf, buf, false, START_OFFSET, false);
        writer.makeReadOnly();
        synchronized (list) {
            if (functions != null) {
                for (Consumer<List<Writer>> function : functions) {
                    function.accept(list);
                }
            }
            size = listSize - 1;
            logger.finest("Compaction debug size differences size:" + size + " listSize:" + listSize + " curr:" + list.size());
            for (int i = size; i >= 0; i--) {
                compactedWriter.add(list.remove(i));
            }
            list.add(0, writer);
            for (int i = 0; i < list.size(); i++) {
                list.get(i).getRawBytes().put(1, (byte) i);
            }
            // fix bucket count
            bucketCount -= size;
            logger.fine("Total points:" + compactedPoints + ", original pair count:" + writer.getReader().getPairCount() + " compression ratio:" + rawBytes.position() + " original:" + total);
        }
    }
    return compactedWriter;
}
Also used : HashMap(java.util.HashMap) ByteBuffer(java.nio.ByteBuffer) ArrayList(java.util.ArrayList) Level(java.util.logging.Level) HashSet(java.util.HashSet) Writer(com.srotya.sidewinder.core.storage.compression.Writer) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Map(java.util.Map) Predicate(com.srotya.sidewinder.core.predicates.Predicate) CompressionFactory(com.srotya.sidewinder.core.storage.compression.CompressionFactory) RollOverException(com.srotya.sidewinder.core.storage.compression.RollOverException) BetweenPredicate(com.srotya.sidewinder.core.predicates.BetweenPredicate) Reader(com.srotya.sidewinder.core.storage.compression.Reader) TimeUtils(com.srotya.sidewinder.core.utils.TimeUtils) Iterator(java.util.Iterator) Collection(java.util.Collection) IOException(java.io.IOException) Logger(java.util.logging.Logger) TimeUnit(java.util.concurrent.TimeUnit) Consumer(java.util.function.Consumer) List(java.util.List) Tag(com.srotya.sidewinder.core.filters.Tag) TreeMap(java.util.TreeMap) Entry(java.util.Map.Entry) Comparator(java.util.Comparator) Collections(java.util.Collections) SortedMap(java.util.SortedMap) ArrayList(java.util.ArrayList) Reader(com.srotya.sidewinder.core.storage.compression.Reader) ByteBuffer(java.nio.ByteBuffer) RollOverException(com.srotya.sidewinder.core.storage.compression.RollOverException) IOException(java.io.IOException) Entry(java.util.Map.Entry) RollOverException(com.srotya.sidewinder.core.storage.compression.RollOverException) ArrayList(java.util.ArrayList) List(java.util.List) Writer(com.srotya.sidewinder.core.storage.compression.Writer)

Aggregations

RollOverException (com.srotya.sidewinder.core.storage.compression.RollOverException)2 ByteBuffer (java.nio.ByteBuffer)2 Tag (com.srotya.sidewinder.core.filters.Tag)1 BetweenPredicate (com.srotya.sidewinder.core.predicates.BetweenPredicate)1 Predicate (com.srotya.sidewinder.core.predicates.Predicate)1 DataPoint (com.srotya.sidewinder.core.storage.DataPoint)1 CompressionFactory (com.srotya.sidewinder.core.storage.compression.CompressionFactory)1 Reader (com.srotya.sidewinder.core.storage.compression.Reader)1 Writer (com.srotya.sidewinder.core.storage.compression.Writer)1 TimeUtils (com.srotya.sidewinder.core.utils.TimeUtils)1 IOException (java.io.IOException)1 ArrayList (java.util.ArrayList)1 Collection (java.util.Collection)1 Collections (java.util.Collections)1 Comparator (java.util.Comparator)1 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1 Iterator (java.util.Iterator)1 List (java.util.List)1 Map (java.util.Map)1