Search in sources :

Example 16 with Writer

use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.

the class TimeSeries method createNewWriter.

private Writer createNewWriter(long timestamp, String tsBucket, List<Writer> list) throws IOException {
    BufferObject bufPair = measurement.getMalloc().createNewBuffer(seriesId, tsBucket);
    bufPair.getBuf().put((byte) CompressionFactory.getIdByClass(compressionClass));
    bufPair.getBuf().put((byte) list.size());
    Writer writer;
    writer = getWriterInstance(compressionClass);
    writer.setBufferId(bufPair.getBufferId());
    // first byte is used to store compression codec type
    writer.configure(conf, bufPair.getBuf(), true, START_OFFSET, true);
    writer.setHeaderTimestamp(timestamp);
    list.add(writer);
    bucketCount++;
    logger.fine(() -> "Created new writer for:" + tsBucket + " timstamp:" + timestamp + " buckectInfo:" + bufPair.getBufferId());
    return writer;
}
Also used : Writer(com.srotya.sidewinder.core.storage.compression.Writer)

Example 17 with Writer

use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.

the class TimeField method compact.

/**
 * Compacts old Writers into one for every single time bucket, this insures the
 * buffers are compacted as well as provides an opportunity to use a higher
 * compression rate algorithm for the bucket. All Writers but the last are
 * read-only therefore performing operations on them does not impact.
 *
 * @param functions
 *            optional functions to execute BEFORE cleaning up list
 * @return returns null if nothing to compact or empty list if all compaction
 *         attempts fail
 * @throws IOException
 */
@SafeVarargs
public final List<Writer> compact(Measurement measurement, Lock writeLock, Consumer<List<? extends Writer>>... functions) throws IOException {
    if (StorageEngine.ENABLE_METHOD_METRICS) {
    // ctx = timerCompaction.time();
    }
    // size check is to avoid unnecessary calls and exit fast
    if (writerList.size() <= 1) {
        return null;
    }
    List<Writer> compactedWriter = new ArrayList<>();
    int id = CompressionFactory.getIdByTimeClass(compactionClass);
    List<TimeWriter> list = writerList;
    int listSize = list.size() - 1;
    int pointCount = list.subList(0, listSize).stream().mapToInt(s -> s.getCount()).sum();
    int total = list.subList(0, listSize).stream().mapToInt(s -> s.getPosition()).sum();
    if (total == 0) {
        logger.warning("Ignoring bucket for compaction, not enough bytes. THIS BUG SHOULD BE INVESTIGATED");
        return null;
    }
    TimeWriter writer = getWriterInstance(compactionClass);
    int compactedPoints = 0;
    double bufSize = total * compactionRatio;
    logger.finer("Allocating buffer:" + total + " Vs. " + pointCount * 16 + " max compacted buffer:" + bufSize);
    logger.finer("Getting sublist from:" + 0 + " to:" + (list.size() - 1));
    ByteBuffer buf = ByteBuffer.allocateDirect((int) bufSize);
    buf.put((byte) id);
    // since this buffer will be the first one
    buf.put(1, (byte) 0);
    writer.configure(buf, true, START_OFFSET);
    TimeWriter input = list.get(0);
    // read the header timestamp
    writer.setHeaderTimestamp(input.getHeaderTimestamp());
    // read all but the last writer and insert into new temp writer
    try {
        for (int i = 0; i < list.size() - 1; i++) {
            input = list.get(i);
            Reader reader = input.getReader();
            for (int k = 0; k < reader.getCount(); k++) {
                long pair = reader.read();
                writer.add(pair);
                compactedPoints++;
            }
        }
        writer.makeReadOnly(false);
    } catch (RollOverException e) {
        logger.warning("Buffer filled up; bad compression ratio; not compacting");
        return null;
    } catch (Exception e) {
        logger.log(Level.SEVERE, "Compaction failed due to unknown exception", e);
        return null;
    }
    // get the raw compressed bytes
    ByteBuffer rawBytes = writer.getRawBytes();
    // limit how much data needs to be read from the buffer
    rawBytes.limit(rawBytes.position());
    // convert buffer length request to size of 2
    int size = rawBytes.limit() + 1;
    if (size % 2 != 0) {
        size++;
    }
    rawBytes.rewind();
    // create buffer in measurement
    BufferObject newBuf = measurement.getMalloc().createNewBuffer(fieldId, tsBucket, size);
    logger.fine("Compacted buffer size:" + size + " vs " + total);
    LinkedByteString bufferId = newBuf.getBufferId();
    buf = newBuf.getBuf();
    writer = getWriterInstance(compactionClass);
    buf.put(rawBytes);
    writer.setBufferId(bufferId);
    writer.configure(buf, false, START_OFFSET);
    writer.makeReadOnly(false);
    writeLock.lock();
    if (functions != null) {
        for (Consumer<List<? extends Writer>> function : functions) {
            function.accept(list);
        }
    }
    size = listSize - 1;
    logger.finest("Compaction debug size differences size:" + size + " listSize:" + listSize + " curr:" + list.size());
    for (int i = size; i >= 0; i--) {
        compactedWriter.add(list.remove(i));
    }
    list.add(0, writer);
    for (int i = 0; i < list.size(); i++) {
        list.get(i).getRawBytes().put(1, (byte) i);
    }
    logger.fine("Total points:" + compactedPoints + ", original pair count:" + writer.getReader().getCount() + " compression ratio:" + rawBytes.position() + " original:" + total);
    writeLock.unlock();
    if (StorageEngine.ENABLE_METHOD_METRICS) {
    // ctx.stop();
    }
    return compactedWriter;
}
Also used : IOException(java.io.IOException) Logger(java.util.logging.Logger) FilteredValueException(com.srotya.sidewinder.core.storage.compression.FilteredValueException) ByteBuffer(java.nio.ByteBuffer) ArrayList(java.util.ArrayList) Level(java.util.logging.Level) HashSet(java.util.HashSet) Consumer(java.util.function.Consumer) MetricsRegistryService(com.srotya.sidewinder.core.monitoring.MetricsRegistryService) Writer(com.srotya.sidewinder.core.storage.compression.Writer) List(java.util.List) Lock(java.util.concurrent.locks.Lock) TimeWriter(com.srotya.sidewinder.core.storage.compression.TimeWriter) Map(java.util.Map) Predicate(com.srotya.sidewinder.core.predicates.Predicate) CompressionFactory(com.srotya.sidewinder.core.storage.compression.CompressionFactory) RollOverException(com.srotya.sidewinder.core.storage.compression.RollOverException) UnsupportedMessageTypeException(io.netty.handler.codec.UnsupportedMessageTypeException) Comparator(java.util.Comparator) Collections(java.util.Collections) Reader(com.srotya.sidewinder.core.storage.compression.Reader) ArrayList(java.util.ArrayList) Reader(com.srotya.sidewinder.core.storage.compression.Reader) TimeWriter(com.srotya.sidewinder.core.storage.compression.TimeWriter) ByteBuffer(java.nio.ByteBuffer) IOException(java.io.IOException) FilteredValueException(com.srotya.sidewinder.core.storage.compression.FilteredValueException) RollOverException(com.srotya.sidewinder.core.storage.compression.RollOverException) UnsupportedMessageTypeException(io.netty.handler.codec.UnsupportedMessageTypeException) RollOverException(com.srotya.sidewinder.core.storage.compression.RollOverException) ArrayList(java.util.ArrayList) List(java.util.List) Writer(com.srotya.sidewinder.core.storage.compression.Writer) TimeWriter(com.srotya.sidewinder.core.storage.compression.TimeWriter)

Example 18 with Writer

use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.

the class Measurement method runCleanupOperation.

public default Set<String> runCleanupOperation(String operation, java.util.function.Function<Series, List<Writer>> op) throws IOException {
    Set<String> cleanupList = new HashSet<>();
    getLock().lock();
    try {
        List<Series> seriesList = getSeriesList();
        Set<String> temp = new HashSet<>();
        for (int i = 0; i < seriesList.size(); i++) {
            Series entry = seriesList.get(i);
            try {
                List<Writer> list = op.apply(entry);
                if (list == null) {
                    continue;
                }
                for (Writer timeSeriesBucket : list) {
                    if (getMetricsCleanupBufferCounter() != null) {
                        getMetricsCleanupBufferCounter().inc();
                    }
                    String buf = timeSeriesBucket.getBufferId().toString();
                    temp.add(buf);
                    cleanupList.add(buf);
                    getLogger().fine("Adding buffer to cleanup " + operation + " for bucket:" + entry.getSeriesId() + " Offset:" + timeSeriesBucket.currentOffset());
                }
                getLogger().fine("Buffers " + operation + " for time series:" + entry.getSeriesId());
                if (i % 100 == 0) {
                    if (temp.size() > 0) {
                        getMalloc().cleanupBufferIds(temp);
                        temp = new HashSet<>();
                    }
                }
            } catch (Exception e) {
                getLogger().log(Level.SEVERE, "Error collecing " + operation, e);
            }
        }
        // cleanup these buffer ids
        if (cleanupList.size() > 0) {
            getLogger().info("For measurement:" + getMeasurementName() + " cleaned=" + cleanupList.size() + " buffers");
        }
        getMalloc().cleanupBufferIds(cleanupList);
    } finally {
        getLock().unlock();
    }
    return cleanupList;
}
Also used : Point(com.srotya.sidewinder.core.rpc.Point) Writer(com.srotya.sidewinder.core.storage.compression.Writer) IOException(java.io.IOException) FilteredValueException(com.srotya.sidewinder.core.storage.compression.FilteredValueException) HashSet(java.util.HashSet)

Example 19 with Writer

use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.

the class TestByzantineValueReadWrite method testReadWriteDataPoints.

@Test
public void testReadWriteDataPoints() throws IOException {
    ByteBuffer buf = ByteBuffer.allocateDirect(1024);
    Writer writer = new ByzantineValueWriter();
    writer.configure(buf, true, startOffset);
    for (long i = 0; i < 100; i++) {
        writer.add(i);
    }
    Reader reader = writer.getReader();
    for (int i = 0; i < 100; i++) {
        assertEquals(i, reader.read());
    }
    for (long i = 0; i < 100; i++) {
        writer.write(i);
    }
    reader = writer.getReader();
    for (int i = 0; i < 200; i++) {
        assertEquals(i % 100, reader.read());
    }
    System.out.println("Compression Ratio:" + writer.getCompressionRatio());
}
Also used : Reader(com.srotya.sidewinder.core.storage.compression.Reader) ByteBuffer(java.nio.ByteBuffer) ValueWriter(com.srotya.sidewinder.core.storage.compression.ValueWriter) Writer(com.srotya.sidewinder.core.storage.compression.Writer) Test(org.junit.Test)

Example 20 with Writer

use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.

the class WriterServiceImpl method writeSeriesPoint.

@Override
public void writeSeriesPoint(RawTimeSeriesBucket request, StreamObserver<Ack> responseObserver) {
    Ack ack;
    try {
        TimeSeries series = engine.getOrCreateTimeSeries(request.getDbName(), request.getMeasurementName(), request.getValueFieldName(), new ArrayList<>(request.getTagsList()), request.getBucketSize(), request.getFp());
        for (Bucket bucket : request.getBucketsList()) {
            Writer writer = series.getOrCreateSeriesBucket(TimeUnit.MILLISECONDS, bucket.getHeaderTimestamp());
            writer.configure(conf, null, false, 1, true);
            writer.setCounter(bucket.getCount());
            writer.bootstrap(bucket.getData().asReadOnlyByteBuffer());
        }
        ack = Ack.newBuilder().setMessageId(request.getMessageId()).setResponseCode(200).build();
    } catch (Exception e) {
        ack = Ack.newBuilder().setMessageId(request.getMessageId()).setResponseCode(500).build();
    }
    responseObserver.onNext(ack);
    responseObserver.onCompleted();
}
Also used : TimeSeries(com.srotya.sidewinder.core.storage.TimeSeries) Writer(com.srotya.sidewinder.core.storage.compression.Writer) IOException(java.io.IOException)

Aggregations

Writer (com.srotya.sidewinder.core.storage.compression.Writer)35 ArrayList (java.util.ArrayList)23 List (java.util.List)16 Test (org.junit.Test)16 ByteBuffer (java.nio.ByteBuffer)15 Reader (com.srotya.sidewinder.core.storage.compression.Reader)14 IOException (java.io.IOException)12 HashMap (java.util.HashMap)12 DataPoint (com.srotya.sidewinder.core.storage.DataPoint)7 BetweenPredicate (com.srotya.sidewinder.core.predicates.BetweenPredicate)6 Predicate (com.srotya.sidewinder.core.predicates.Predicate)5 HashSet (java.util.HashSet)5 Point (com.srotya.sidewinder.core.rpc.Point)3 CompressionFactory (com.srotya.sidewinder.core.storage.compression.CompressionFactory)3 RollOverException (com.srotya.sidewinder.core.storage.compression.RollOverException)3 ValueWriter (com.srotya.sidewinder.core.storage.compression.ValueWriter)3 Collections (java.util.Collections)3 Comparator (java.util.Comparator)3 Map (java.util.Map)3 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)3