Search in sources :

Example 31 with Writer

use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.

the class TestByzantineValueReadWrite method testWriteDataPoint.

@Test
public void testWriteDataPoint() throws IOException {
    ByteBuffer buf = ByteBuffer.allocateDirect(1024);
    ByzantineValueWriter bwriter = new ByzantineValueWriter();
    Writer writer = bwriter;
    writer.configure(buf, true, startOffset);
    for (long i = 0; i < 10; i++) {
        writer.add(i);
    }
    assertEquals(10, bwriter.getCount());
    buf = bwriter.getBuf();
    buf.flip();
    buf.get();
    assertEquals(10, buf.getInt());
}
Also used : ByteBuffer(java.nio.ByteBuffer) ValueWriter(com.srotya.sidewinder.core.storage.compression.ValueWriter) Writer(com.srotya.sidewinder.core.storage.compression.Writer) Test(org.junit.Test)

Example 32 with Writer

use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.

the class TestSeries method testIterator.

@SuppressWarnings("unchecked")
@Test
public void testIterator() throws IOException {
    measurement.setTimebucket(1024);
    Series series = new Series(new ByteString("idasdasda"), 0);
    long ts = 1497720652566L;
    for (int i = 0; i < 10000; i++) {
        Point dp = Point.newBuilder().setTimestamp(ts + i * 200).addValueFieldName("f1").addFp(false).addValue(i).addValueFieldName("f2").addFp(true).addValue(Double.doubleToLongBits(i * 1.1)).build();
        series.addPoint(dp, measurement);
    }
    // check time buckets
    assertEquals(3, series.getBucketMap().size());
    // query iterators
    FieldReaderIterator[] queryIterators = series.queryIterators(measurement, new ArrayList<>(Arrays.asList("f1", "f2")), Long.MAX_VALUE, Long.MIN_VALUE);
    assertEquals(3, queryIterators.length);
    // must respond even when there is nothing selectable in time range
    queryIterators = series.queryIterators(measurement, Arrays.asList("f1", "f2"), Long.MAX_VALUE, Long.MAX_VALUE);
    assertEquals(3, queryIterators.length);
    // must respond even when there is nothing selectable in time range
    queryIterators = series.queryIterators(measurement, Arrays.asList("f1", "TS"), Long.MAX_VALUE, Long.MAX_VALUE);
    assertEquals(2, queryIterators.length);
    // no fields should result in no iterators
    queryIterators = series.queryIterators(measurement, Arrays.asList(), Long.MAX_VALUE, Long.MAX_VALUE);
    assertEquals(0, queryIterators.length);
    final List<Writer> compactedWriters = series.compact(measurement);
    assertTrue(compactedWriters.size() > 0);
}
Also used : Point(com.srotya.sidewinder.core.rpc.Point) Point(com.srotya.sidewinder.core.rpc.Point) Writer(com.srotya.sidewinder.core.storage.compression.Writer) Test(org.junit.Test)

Example 33 with Writer

use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.

the class Measurement method collectGarbage.

public default Set<String> collectGarbage(Archiver archiver) throws IOException {
    return runCleanupOperation("garbage collection", series -> {
        try {
            Map<Integer, List<Writer>> collectedGarbage = series.collectGarbage(this);
            List<Writer> output = new ArrayList<>();
            if (collectedGarbage.size() > 0) {
                getLogger().fine("Collected garbage:" + collectedGarbage.size() + " series:" + series.getSeriesId());
            }
            if (collectedGarbage != null) {
                for (Entry<Integer, List<Writer>> entry : collectedGarbage.entrySet()) {
                    for (Writer writer : entry.getValue()) {
                        if (archiver != null) {
                            byte[] buf = Archiver.writerToByteArray(writer);
                            TimeSeriesArchivalObject archivalObject = new TimeSeriesArchivalObject(getDbName(), getMeasurementName(), series.getSeriesId(), entry.getKey(), buf);
                            try {
                                archiver.archive(archivalObject);
                            } catch (ArchiveException e) {
                                getLogger().log(Level.SEVERE, "Series failed to archive, series:" + series.getSeriesId() + " db:" + getDbName() + " m:" + getMeasurementName(), e);
                            }
                        }
                        output.add(writer);
                    }
                }
            }
            return output;
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    });
}
Also used : AtomicInteger(java.util.concurrent.atomic.AtomicInteger) ArrayList(java.util.ArrayList) ArrayList(java.util.ArrayList) List(java.util.List) IOException(java.io.IOException) TimeSeriesArchivalObject(com.srotya.sidewinder.core.storage.archival.TimeSeriesArchivalObject) Writer(com.srotya.sidewinder.core.storage.compression.Writer)

Example 34 with Writer

use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.

the class Series method collectGarbage.

public Map<Integer, List<Writer>> collectGarbage(Measurement measurement) throws IOException {
    Map<Integer, List<Writer>> collectedGarbageMap = new HashMap<>();
    logger.finer("Retention buckets:" + measurement.getRetentionBuckets().get());
    while (getBucketMap().size() > measurement.getRetentionBuckets().get()) {
        writeLock.lock();
        int oldSize = getBucketMap().size();
        Integer key = getBucketMap().firstKey();
        Map<String, Field> fieldMap = getBucketMap().remove(key);
        List<Writer> gcedBuckets = new ArrayList<>();
        collectedGarbageMap.put(key, gcedBuckets);
        for (Field field : fieldMap.values()) {
            // bucket.close();
            gcedBuckets.addAll(field.getWriters());
            logger.log(Level.FINEST, "GC," + measurement.getMeasurementName() + ":" + seriesId + " removing bucket:" + key + ": as it passed retention period of:" + measurement.getRetentionBuckets().get() + ":old size:" + oldSize + ":newsize:" + getBucketMap().size() + ":");
        }
        writeLock.unlock();
    }
    if (collectedGarbageMap.size() > 0) {
        logger.fine(() -> "GC," + measurement.getMeasurementName() + " buckets:" + collectedGarbageMap.size() + " retention size:" + measurement.getRetentionBuckets().get());
    }
    return collectedGarbageMap;
}
Also used : ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) ArrayList(java.util.ArrayList) List(java.util.List) Point(com.srotya.sidewinder.core.rpc.Point) Writer(com.srotya.sidewinder.core.storage.compression.Writer)

Example 35 with Writer

use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.

the class ValueField method compact.

/**
 * Compacts old Writers into one for every single time bucket, this insures the
 * buffers are compacted as well as provides an opportunity to use a higher
 * compression rate algorithm for the bucket. All Writers but the last are
 * read-only therefore performing operations on them does not impact.
 *
 * @param functions
 * @return returns null if nothing to compact or empty list if all compaction
 *         attempts fail
 * @throws IOException
 */
@SafeVarargs
public final List<Writer> compact(Measurement measurement, Lock writeLock, Consumer<List<? extends Writer>>... functions) throws IOException {
    if (StorageEngine.ENABLE_METHOD_METRICS) {
    // ctx = timerCompaction.time();
    }
    // size check is to avoid unnecessary calls and exit fast
    if (writerList.size() <= 1) {
        return null;
    }
    List<Writer> compactedWriter = new ArrayList<>();
    int id = CompressionFactory.getIdByValueClass(compactionClass);
    int listSize = writerList.size() - 1;
    int pointCount = writerList.subList(0, listSize).stream().mapToInt(s -> s.getCount()).sum();
    int total = writerList.subList(0, listSize).stream().mapToInt(s -> s.getPosition()).sum();
    if (total == 0) {
        logger.warning("Ignoring bucket for compaction, not enough bytes. THIS BUG SHOULD BE INVESTIGATED");
        return null;
    }
    ValueWriter writer = getWriterInstance(compactionClass);
    int compactedPoints = 0;
    double bufSize = total * compactionRatio;
    logger.finer("Allocating buffer:" + total + " Vs. " + pointCount * 16 + " max compacted buffer:" + bufSize);
    logger.finer("Getting sublist from:" + 0 + " to:" + (writerList.size() - 1));
    ByteBuffer buf = ByteBuffer.allocateDirect((int) bufSize);
    buf.put((byte) id);
    // since this buffer will be the first one
    buf.put(1, (byte) 0);
    writer.configure(buf, true, START_OFFSET);
    ValueWriter input = writerList.get(0);
    // read all but the last writer and insert into new temp writer
    try {
        for (int i = 0; i < writerList.size() - 1; i++) {
            input = writerList.get(i);
            Reader reader = input.getReader();
            for (int k = 0; k < reader.getCount(); k++) {
                long pair = reader.read();
                writer.add(pair);
                compactedPoints++;
            }
        }
        writer.makeReadOnly(false);
    } catch (RollOverException e) {
        logger.warning("Buffer filled up; bad compression ratio; not compacting");
        return null;
    } catch (Exception e) {
        logger.log(Level.SEVERE, "Compaction failed due to unknown exception", e);
        return null;
    }
    // get the raw compressed bytes
    ByteBuffer rawBytes = writer.getRawBytes();
    // limit how much data needs to be read from the buffer
    rawBytes.limit(rawBytes.position());
    // convert buffer length request to size of 2
    int size = rawBytes.limit() + 1;
    if (size % 2 != 0) {
        size++;
    }
    rawBytes.rewind();
    // create buffer in measurement
    BufferObject newBuf = measurement.getMalloc().createNewBuffer(fieldId, tsBucket, size);
    logger.fine("Compacted buffer size:" + size + " vs " + total + " countp:" + listSize + " field:" + fieldId);
    LinkedByteString bufferId = newBuf.getBufferId();
    buf = newBuf.getBuf();
    writer = getWriterInstance(compactionClass);
    buf.put(rawBytes);
    writer.setBufferId(bufferId);
    writer.configure(buf, false, START_OFFSET);
    writer.makeReadOnly(false);
    writeLock.lock();
    if (functions != null) {
        for (Consumer<List<? extends Writer>> function : functions) {
            function.accept(writerList);
        }
    }
    size = listSize - 1;
    logger.finest("Compaction debug size differences size:" + size + " listSize:" + listSize + " curr:" + writerList.size());
    for (int i = size; i >= 0; i--) {
        compactedWriter.add(writerList.remove(i));
    }
    writerList.add(0, writer);
    for (int i = 0; i < writerList.size(); i++) {
        writerList.get(i).getRawBytes().put(1, (byte) i);
    }
    logger.fine("Total points:" + compactedPoints + ", original pair count:" + writer.getReader().getCount() + " compression ratio:" + rawBytes.position() + " original:" + total + " newlistlength:" + writerList.size());
    writeLock.unlock();
    if (StorageEngine.ENABLE_METHOD_METRICS) {
    // ctx.stop();
    }
    return compactedWriter;
}
Also used : ValueWriter(com.srotya.sidewinder.core.storage.compression.ValueWriter) IOException(java.io.IOException) Logger(java.util.logging.Logger) ByteBuffer(java.nio.ByteBuffer) ArrayList(java.util.ArrayList) Level(java.util.logging.Level) HashSet(java.util.HashSet) Consumer(java.util.function.Consumer) MetricsRegistryService(com.srotya.sidewinder.core.monitoring.MetricsRegistryService) Writer(com.srotya.sidewinder.core.storage.compression.Writer) List(java.util.List) Lock(java.util.concurrent.locks.Lock) Map(java.util.Map) Predicate(com.srotya.sidewinder.core.predicates.Predicate) CompressionFactory(com.srotya.sidewinder.core.storage.compression.CompressionFactory) RollOverException(com.srotya.sidewinder.core.storage.compression.RollOverException) Comparator(java.util.Comparator) Collections(java.util.Collections) Reader(com.srotya.sidewinder.core.storage.compression.Reader) ArrayList(java.util.ArrayList) Reader(com.srotya.sidewinder.core.storage.compression.Reader) ByteBuffer(java.nio.ByteBuffer) IOException(java.io.IOException) RollOverException(com.srotya.sidewinder.core.storage.compression.RollOverException) RollOverException(com.srotya.sidewinder.core.storage.compression.RollOverException) ValueWriter(com.srotya.sidewinder.core.storage.compression.ValueWriter) ArrayList(java.util.ArrayList) List(java.util.List) ValueWriter(com.srotya.sidewinder.core.storage.compression.ValueWriter) Writer(com.srotya.sidewinder.core.storage.compression.Writer)

Aggregations

Writer (com.srotya.sidewinder.core.storage.compression.Writer)35 ArrayList (java.util.ArrayList)23 List (java.util.List)16 Test (org.junit.Test)16 ByteBuffer (java.nio.ByteBuffer)15 Reader (com.srotya.sidewinder.core.storage.compression.Reader)14 IOException (java.io.IOException)12 HashMap (java.util.HashMap)12 DataPoint (com.srotya.sidewinder.core.storage.DataPoint)7 BetweenPredicate (com.srotya.sidewinder.core.predicates.BetweenPredicate)6 Predicate (com.srotya.sidewinder.core.predicates.Predicate)5 HashSet (java.util.HashSet)5 Point (com.srotya.sidewinder.core.rpc.Point)3 CompressionFactory (com.srotya.sidewinder.core.storage.compression.CompressionFactory)3 RollOverException (com.srotya.sidewinder.core.storage.compression.RollOverException)3 ValueWriter (com.srotya.sidewinder.core.storage.compression.ValueWriter)3 Collections (java.util.Collections)3 Comparator (java.util.Comparator)3 Map (java.util.Map)3 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)3