Search in sources :

Example 26 with Writer

use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.

the class TestTimeSeries method testReplaceSeries.

// @Test
// public void testCompactionGzip() throws IOException {
// DBMetadata metadata = new DBMetadata(28);
// MockMeasurement measurement = new MockMeasurement(1024);
// HashMap<String, String> conf = new HashMap<>();
// conf.put("default.bucket.size", "409600");
// conf.put("compaction.enabled", "true");
// conf.put("use.query.pool", "false");
// conf.put("compaction.ratio", "1.1");
// conf.put("zip.block.size", "8");
// 
// final TimeSeries series = new TimeSeries(measurement, "byzantine", "bzip",
// "asdasasd", 409600, metadata, true,
// conf);
// final long curr = 1497720652566L;
// 
// String valueFieldName = "value";
// 
// for (int i = 1; i <= 10000; i++) {
// series.addDataPoint(TimeUnit.MILLISECONDS, curr + i * 1000, i * 1.1);
// }
// 
// SortedMap<String, List<Writer>> bucketRawMap = series.getBucketRawMap();
// assertEquals(1, bucketRawMap.size());
// int size = bucketRawMap.values().iterator().next().size();
// assertTrue(series.getCompactionSet().size() < size);
// assertTrue(size > 2);
// series.compact();
// List<DataPoint> dataPoints = series.queryDataPoints(valueFieldName, curr -
// 1000, curr + 10000 * 1000 + 1, null);
// bucketRawMap = series.getBucketRawMap();
// assertEquals(2, bucketRawMap.values().iterator().next().size());
// int count = 0;
// for (List<Writer> list : bucketRawMap.values()) {
// for (Writer writer : list) {
// Reader reader = writer.getReader();
// count += reader.getPairCount();
// }
// }
// assertEquals(10000, count);
// assertEquals(10000, dataPoints.size());
// for (int i = 1; i <= 10000; i++) {
// DataPoint dp = dataPoints.get(i - 1);
// assertEquals("Bad ts:" + i, curr + i * 1000, dp.getTimestamp());
// assertEquals(dp.getValue(), i * 1.1, 0.001);
// }
// }
@Test
public void testReplaceSeries() throws IOException {
    DBMetadata metadata = new DBMetadata(28);
    MockMeasurement measurement = new MockMeasurement(1024);
    HashMap<String, String> conf = new HashMap<>();
    conf.put("default.bucket.size", "409600");
    conf.put("compaction.enabled", "true");
    conf.put("use.query.pool", "false");
    final TimeSeries series = new TimeSeries(measurement, "byzantine", "gorilla", "asdasasd", 409600, metadata, true, conf);
    final long curr = 1497720652566L;
    String valueFieldName = "value";
    for (int i = 1; i <= 10000; i++) {
        series.addDataPoint(TimeUnit.MILLISECONDS, curr + i * 1000, i * 1.1);
    }
    SortedMap<String, List<Writer>> bucketRawMap = series.getBucketRawMap();
    int size = bucketRawMap.values().iterator().next().size();
    assertTrue(series.getCompactionSet().size() < size);
    assertTrue(size > 2);
    List<Writer> compact = series.compact();
    assertTrue(compact.size() > 0);
    assertTrue(bucketRawMap.values().iterator().next().size() == 2);
    List<Writer> next = bucketRawMap.values().iterator().next();
    Writer writer = next.get(0);
    ByteBuffer buf = writer.getRawBytes();
    buf.rewind();
    int limit = buf.limit();
    ByteBuffer allocate = ByteBuffer.allocate(limit);
    allocate.put(buf);
    allocate.rewind();
    byte[] array = allocate.array();
    assertEquals(buf.limit(), array.length);
    MemMalloc allocator = measurement.getAllocator();
    List<Entry<Long, byte[]>> list = new ArrayList<>();
    list.add(new AbstractMap.SimpleEntry<Long, byte[]>(writer.getHeaderTimestamp(), array));
    try {
        series.replaceFirstBuckets(bucketRawMap.firstKey(), list);
    } catch (InstantiationException | IllegalAccessException e) {
        e.printStackTrace();
        fail(e.getMessage());
    }
    assertEquals(1, allocator.getCleanupCallback().size());
    List<DataPoint> dataPoints = series.queryDataPoints(valueFieldName, curr - 1000, curr + 10000 * 1000 + 1, null);
    for (int i = 1; i <= 10000; i++) {
        DataPoint dp = dataPoints.get(i - 1);
        assertEquals("Bad ts:" + i, curr + i * 1000, dp.getTimestamp());
        assertEquals(dp.getValue(), i * 1.1, 0.001);
    }
}
Also used : HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) AbstractMap(java.util.AbstractMap) Entry(java.util.Map.Entry) ArrayList(java.util.ArrayList) List(java.util.List) MemMalloc(com.srotya.sidewinder.core.storage.mem.MemMalloc) ByteBuffer(java.nio.ByteBuffer) Writer(com.srotya.sidewinder.core.storage.compression.Writer) Test(org.junit.Test)

Example 27 with Writer

use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.

the class TimeSeries method loadBucketMap.

/**
 * Function to check and recover existing bucket map, if one exists.
 *
 * @param bufferEntries
 * @throws IOException
 */
public void loadBucketMap(List<Entry<String, BufferObject>> bufferEntries) throws IOException {
    Map<String, String> cacheConf = new HashMap<>(conf);
    logger.fine(() -> "Scanning buffer for:" + seriesId);
    for (Entry<String, BufferObject> entry : bufferEntries) {
        ByteBuffer duplicate = entry.getValue().getBuf();
        duplicate.rewind();
        // String series = getStringFromBuffer(duplicate);
        // if (!series.equalsIgnoreCase(seriesId)) {
        // continue;
        // }
        String tsBucket = entry.getKey();
        List<Writer> list = bucketMap.get(tsBucket);
        if (list == null) {
            list = Collections.synchronizedList(new ArrayList<>());
            bucketMap.put(tsBucket, list);
        }
        ByteBuffer slice = duplicate.slice();
        int codecId = (int) slice.get();
        // int listIndex = (int) slice.get();
        Class<Writer> classById = CompressionFactory.getClassById(codecId);
        Writer writer = getWriterInstance(classById);
        if (entry.getValue().getBufferId() == null) {
            throw new IOException("Buffer id can't be read:" + measurement.getDbName() + ":" + measurement.getMeasurementName() + " series:" + getSeriesId());
        }
        logger.fine(() -> "Loading bucketmap:" + seriesId + "\t" + tsBucket + " bufferid:" + entry.getValue().getBufferId());
        writer.setBufferId(entry.getValue().getBufferId());
        writer.configure(cacheConf, slice, false, START_OFFSET, true);
        list.add(writer);
        bucketCount++;
        logger.fine(() -> "Loaded bucketmap:" + seriesId + "\t" + tsBucket + " bufferid:" + entry.getValue().getBufferId());
    }
    sortBucketMap();
}
Also used : HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) IOException(java.io.IOException) ByteBuffer(java.nio.ByteBuffer) Writer(com.srotya.sidewinder.core.storage.compression.Writer)

Example 28 with Writer

use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.

the class TimeSeries method queryDataPoints.

/**
 * Extract {@link DataPoint}s for the supplied time range and value predicate.
 *
 * Each {@link DataPoint} has the appendFieldValue and appendTags set in it.
 *
 * @param appendFieldValueName
 *            fieldname to append to each datapoint
 * @param startTime
 *            time range beginning
 * @param endTime
 *            time range end
 * @param valuePredicate
 *            pushed down filter for values
 * @return list of datapoints
 * @throws IOException
 */
public List<DataPoint> queryDataPoints(String appendFieldValueName, long startTime, long endTime, Predicate valuePredicate) throws IOException {
    if (startTime > endTime) {
        // swap start and end times if they are off
        startTime = startTime ^ endTime;
        endTime = endTime ^ startTime;
        startTime = startTime ^ endTime;
    }
    BetweenPredicate timeRangePredicate = new BetweenPredicate(startTime, endTime);
    logger.fine(getSeriesId() + " " + bucketMap.size() + " " + bucketCount + " " + startTime + "  " + endTime + " " + valuePredicate + " " + timeRangePredicate + " diff:" + (endTime - startTime));
    SortedMap<String, List<Writer>> series = correctTimeRangeScan(startTime, endTime);
    List<Reader> readers = new ArrayList<>();
    for (List<Writer> writers : series.values()) {
        for (Writer writer : writers) {
            readers.add(getReader(writer, timeRangePredicate, valuePredicate));
        }
    }
    List<DataPoint> points = new ArrayList<>();
    for (Reader reader : readers) {
        readerToDataPoints(points, reader);
    }
    return points;
}
Also used : BetweenPredicate(com.srotya.sidewinder.core.predicates.BetweenPredicate) ArrayList(java.util.ArrayList) Reader(com.srotya.sidewinder.core.storage.compression.Reader) ArrayList(java.util.ArrayList) List(java.util.List) Writer(com.srotya.sidewinder.core.storage.compression.Writer)

Example 29 with Writer

use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.

the class TimeSeries method getBucketMap.

/**
 * @return the bucketMap
 */
public SortedMap<String, Writer> getBucketMap() {
    SortedMap<String, Writer> map = new TreeMap<>();
    for (Entry<String, List<Writer>> entry : bucketMap.entrySet()) {
        List<Writer> value = entry.getValue();
        for (int i = 0; i < value.size(); i++) {
            Writer bucketEntry = value.get(i);
            map.put(entry.getKey() + i, bucketEntry);
        }
    }
    return map;
}
Also used : ArrayList(java.util.ArrayList) List(java.util.List) TreeMap(java.util.TreeMap) Writer(com.srotya.sidewinder.core.storage.compression.Writer)

Example 30 with Writer

use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.

the class Measurement method runCleanupOperation.

public default Set<String> runCleanupOperation(String operation, java.util.function.Function<TimeSeries, List<Writer>> op) throws IOException {
    Set<String> cleanupList = new HashSet<>();
    getLock().lock();
    try {
        for (TimeSeries entry : getTimeSeries()) {
            try {
                List<Writer> list = op.apply(entry);
                if (list == null) {
                    continue;
                }
                for (Writer timeSeriesBucket : list) {
                    cleanupList.add(timeSeriesBucket.getBufferId());
                    getLogger().fine("Adding buffer to cleanup " + operation + " for bucket:" + entry.getSeriesId() + " Offset:" + timeSeriesBucket.currentOffset());
                }
                getLogger().fine("Buffers " + operation + " for time series:" + entry.getSeriesId());
            } catch (Exception e) {
                getLogger().log(Level.SEVERE, "Error collecing " + operation, e);
            }
        }
        // cleanup these buffer ids
        if (cleanupList.size() > 0) {
            getLogger().info("For measurement:" + getMeasurementName() + " cleaned=" + cleanupList.size() + " buffers");
        }
        getMalloc().cleanupBufferIds(cleanupList);
    } finally {
        getLock().unlock();
    }
    return cleanupList;
}
Also used : Writer(com.srotya.sidewinder.core.storage.compression.Writer) IOException(java.io.IOException) HashSet(java.util.HashSet)

Aggregations

Writer (com.srotya.sidewinder.core.storage.compression.Writer)35 ArrayList (java.util.ArrayList)23 List (java.util.List)16 Test (org.junit.Test)16 ByteBuffer (java.nio.ByteBuffer)15 Reader (com.srotya.sidewinder.core.storage.compression.Reader)14 IOException (java.io.IOException)12 HashMap (java.util.HashMap)12 DataPoint (com.srotya.sidewinder.core.storage.DataPoint)7 BetweenPredicate (com.srotya.sidewinder.core.predicates.BetweenPredicate)6 Predicate (com.srotya.sidewinder.core.predicates.Predicate)5 HashSet (java.util.HashSet)5 Point (com.srotya.sidewinder.core.rpc.Point)3 CompressionFactory (com.srotya.sidewinder.core.storage.compression.CompressionFactory)3 RollOverException (com.srotya.sidewinder.core.storage.compression.RollOverException)3 ValueWriter (com.srotya.sidewinder.core.storage.compression.ValueWriter)3 Collections (java.util.Collections)3 Comparator (java.util.Comparator)3 Map (java.util.Map)3 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)3