Search in sources :

Example 16 with TimeSeries

use of com.srotya.sidewinder.core.storage.TimeSeries in project sidewinder by srotya.

the class TestPersistentMeasurement method testLinearizability.

@Test
public void testLinearizability() throws IOException, InterruptedException {
    for (int p = 0; p < 100; p++) {
        MiscUtils.delete(new File("target/db134/"));
        final long t1 = 1497720452566L;
        Measurement m = new PersistentMeasurement();
        m.configure(conf, engine, DBNAME, "m1", "target/db134/index", "target/db134/data", metadata, bgTaskPool);
        ExecutorService es = Executors.newFixedThreadPool(2, new BackgrounThreadFactory("tlinear"));
        AtomicBoolean wait = new AtomicBoolean(false);
        for (int i = 0; i < 2; i++) {
            final int th = i;
            es.submit(() -> {
                while (!wait.get()) {
                    try {
                        Thread.sleep(1);
                    } catch (InterruptedException e) {
                    }
                }
                long t = t1 + th * 3;
                for (int j = 0; j < 100; j++) {
                    try {
                        TimeSeries ts = m.getOrCreateTimeSeries("vf1", Arrays.asList("t=1", "t=2"), 4096, false, conf);
                        long timestamp = t + j * 1000;
                        ts.addDataPoint(TimeUnit.MILLISECONDS, timestamp, j);
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                }
            });
        }
        es.shutdown();
        wait.set(true);
        es.awaitTermination(100, TimeUnit.SECONDS);
        TimeSeries ts = m.getOrCreateTimeSeries("vf1", Arrays.asList("t=1", "t=2"), 4096, false, conf);
        List<DataPoint> dps = ts.queryDataPoints("vf1", t1 - 120, t1 + 1000_000, null);
        assertEquals(200, dps.size());
        assertEquals(1, ts.getBucketCount());
        m.close();
    }
}
Also used : Measurement(com.srotya.sidewinder.core.storage.Measurement) BackgrounThreadFactory(com.srotya.sidewinder.core.utils.BackgrounThreadFactory) TimeSeries(com.srotya.sidewinder.core.storage.TimeSeries) DataPoint(com.srotya.sidewinder.core.storage.DataPoint) IOException(java.io.IOException) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) DataPoint(com.srotya.sidewinder.core.storage.DataPoint) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) ExecutorService(java.util.concurrent.ExecutorService) File(java.io.File) Test(org.junit.Test)

Example 17 with TimeSeries

use of com.srotya.sidewinder.core.storage.TimeSeries in project sidewinder by srotya.

the class TestPersistentMeasurement method testDataPointsRecovery.

@Test
public void testDataPointsRecovery() throws Exception {
    long ts = System.currentTimeMillis();
    MiscUtils.delete(new File("target/db132/"));
    List<String> tags = Arrays.asList("test=1", "test=2");
    PersistentMeasurement m = new PersistentMeasurement();
    Map<String, String> map = new HashMap<>();
    map.put("disk.compression.class", ByzantineWriter.class.getName());
    map.put("malloc.file.max", String.valueOf(1024 * 1024));
    try {
        m.configure(map, null, DBNAME, "m1", "target/db132/index", "target/db132/data", metadata, bgTaskPool);
        fail("Must throw invalid file max size exception");
    } catch (Exception e) {
    }
    map.put("malloc.file.max", String.valueOf(2 * 1024 * 1024));
    m.configure(map, null, DBNAME, "m1", "target/db132/index", "target/db132/data", metadata, bgTaskPool);
    int LIMIT = 100000;
    for (int i = 0; i < LIMIT; i++) {
        TimeSeries t = m.getOrCreateTimeSeries("value", tags, 4096, false, map);
        t.addDataPoint(TimeUnit.MILLISECONDS, ts + i * 1000, 1L);
    }
    m.close();
    m = new PersistentMeasurement();
    m.configure(map, null, DBNAME, "m1", "target/db132/index", "target/db132/data", metadata, bgTaskPool);
    List<Series> resultMap = new ArrayList<>();
    m.queryDataPoints("value", ts, ts + 1000 * LIMIT, null, null, resultMap);
    Iterator<Series> iterator = resultMap.iterator();
    assertEquals(LIMIT, iterator.next().getDataPoints().size());
    m.close();
}
Also used : TimeSeries(com.srotya.sidewinder.core.storage.TimeSeries) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) ByzantineWriter(com.srotya.sidewinder.core.storage.compression.byzantine.ByzantineWriter) ArrayList(java.util.ArrayList) IOException(java.io.IOException) DataPoint(com.srotya.sidewinder.core.storage.DataPoint) TimeSeries(com.srotya.sidewinder.core.storage.TimeSeries) Series(com.srotya.sidewinder.core.storage.Series) File(java.io.File) Test(org.junit.Test)

Example 18 with TimeSeries

use of com.srotya.sidewinder.core.storage.TimeSeries in project sidewinder by srotya.

the class TestPersistentMeasurement method testCompaction.

@Test
public void testCompaction() throws IOException {
    final long ts = 1484788896586L;
    MiscUtils.delete(new File("target/db45/"));
    List<String> tags = Arrays.asList("test=1", "test=2");
    PersistentMeasurement m = new PersistentMeasurement();
    Map<String, String> map = new HashMap<>();
    map.put("disk.compression.class", ByzantineWriter.class.getName());
    map.put("malloc.file.max", String.valueOf(2 * 1024 * 1024));
    map.put("malloc.ptrfile.increment", String.valueOf(1024));
    map.put("compaction.ratio", "1.2");
    map.put("compaction.enabled", "true");
    m.configure(map, null, DBNAME, "m1", "target/db45/index", "target/db45/data", metadata, bgTaskPool);
    int LIMIT = 7000;
    for (int i = 0; i < LIMIT; i++) {
        TimeSeries t = m.getOrCreateTimeSeries("value1", tags, 1024, false, map);
        t.addDataPoint(TimeUnit.MILLISECONDS, ts + i, i * 1.2);
    }
    assertEquals(1, m.getTimeSeries().size());
    TimeSeries series = m.getTimeSeries().iterator().next();
    assertEquals(1, series.getBucketRawMap().size());
    assertEquals(3, series.getBucketCount());
    assertEquals(3, series.getBucketRawMap().entrySet().iterator().next().getValue().size());
    assertEquals(1, series.getCompactionSet().size());
    int maxDp = series.getBucketRawMap().values().stream().flatMap(v -> v.stream()).mapToInt(l -> l.getCount()).max().getAsInt();
    // check and read datapoint count before
    List<DataPoint> queryDataPoints = series.queryDataPoints("", ts, ts + LIMIT + 1, null);
    assertEquals(LIMIT, queryDataPoints.size());
    for (int i = 0; i < LIMIT; i++) {
        DataPoint dp = queryDataPoints.get(i);
        assertEquals(ts + i, dp.getTimestamp());
        assertEquals(i * 1.2, dp.getValue(), 0.01);
    }
    m.compact();
    assertEquals(2, series.getBucketCount());
    assertEquals(2, series.getBucketRawMap().entrySet().iterator().next().getValue().size());
    assertEquals(0, series.getCompactionSet().size());
    assertTrue(maxDp <= series.getBucketRawMap().values().stream().flatMap(v -> v.stream()).mapToInt(l -> l.getCount()).max().getAsInt());
    // validate query after compaction
    queryDataPoints = series.queryDataPoints("", ts, ts + LIMIT + 1, null);
    assertEquals(LIMIT, queryDataPoints.size());
    for (int i = 0; i < LIMIT; i++) {
        DataPoint dp = queryDataPoints.get(i);
        assertEquals(ts + i, dp.getTimestamp());
        assertEquals(i * 1.2, dp.getValue(), 0.01);
    }
    // test buffer recovery after compaction, validate count
    m = new PersistentMeasurement();
    m.configure(map, null, DBNAME, "m1", "target/db45/index", "target/db45/data", metadata, bgTaskPool);
    series = m.getTimeSeries().iterator().next();
    queryDataPoints = series.queryDataPoints("", ts, ts + LIMIT + 1, null);
    assertEquals(LIMIT, queryDataPoints.size());
    for (int i = 0; i < LIMIT; i++) {
        DataPoint dp = queryDataPoints.get(i);
        assertEquals(ts + i, dp.getTimestamp());
        assertEquals(i * 1.2, dp.getValue(), 0.01);
    }
    for (int i = 0; i < LIMIT; i++) {
        TimeSeries t = m.getOrCreateTimeSeries("value1", tags, 1024, false, map);
        t.addDataPoint(TimeUnit.MILLISECONDS, LIMIT + ts + i, i * 1.2);
    }
    series.getBucketRawMap().entrySet().iterator().next().getValue().stream().map(v -> "" + v.getCount() + ":" + v.isReadOnly() + ":" + (int) v.getRawBytes().get(1)).forEach(System.out::println);
    // test recovery again
    m = new PersistentMeasurement();
    m.configure(map, null, DBNAME, "m1", "target/db45/index", "target/db45/data", metadata, bgTaskPool);
    series = m.getTimeSeries().iterator().next();
    queryDataPoints = series.queryDataPoints("", ts - 1, ts + 2 + (LIMIT * 2), null);
    assertEquals(LIMIT * 2, queryDataPoints.size());
    for (int i = 0; i < LIMIT * 2; i++) {
        DataPoint dp = queryDataPoints.get(i);
        assertEquals("Error:" + i + " " + (dp.getTimestamp() - ts - i), ts + i, dp.getTimestamp());
    }
}
Also used : Measurement(com.srotya.sidewinder.core.storage.Measurement) Arrays(java.util.Arrays) BeforeClass(org.junit.BeforeClass) DBMetadata(com.srotya.sidewinder.core.storage.DBMetadata) MiscUtils(com.srotya.sidewinder.core.utils.MiscUtils) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) DataPoint(com.srotya.sidewinder.core.storage.DataPoint) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) LinkedHashMap(java.util.LinkedHashMap) MetricsRegistryService(com.srotya.sidewinder.core.monitoring.MetricsRegistryService) SetIndex(com.srotya.sidewinder.core.storage.mem.SetIndex) Map(java.util.Map) TagIndex(com.srotya.sidewinder.core.storage.TagIndex) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) Assert.fail(org.junit.Assert.fail) MemStorageEngine(com.srotya.sidewinder.core.storage.mem.MemStorageEngine) ExecutorService(java.util.concurrent.ExecutorService) StorageEngine(com.srotya.sidewinder.core.storage.StorageEngine) Reader(com.srotya.sidewinder.core.storage.compression.Reader) Iterator(java.util.Iterator) TimeSeries(com.srotya.sidewinder.core.storage.TimeSeries) Assert.assertTrue(org.junit.Assert.assertTrue) Set(java.util.Set) IOException(java.io.IOException) Test(org.junit.Test) ByzantineWriter(com.srotya.sidewinder.core.storage.compression.byzantine.ByzantineWriter) File(java.io.File) Executors(java.util.concurrent.Executors) TimeUnit(java.util.concurrent.TimeUnit) List(java.util.List) Tag(com.srotya.sidewinder.core.filters.Tag) BackgrounThreadFactory(com.srotya.sidewinder.core.utils.BackgrounThreadFactory) Series(com.srotya.sidewinder.core.storage.Series) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) TimeSeries(com.srotya.sidewinder.core.storage.TimeSeries) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) ByzantineWriter(com.srotya.sidewinder.core.storage.compression.byzantine.ByzantineWriter) DataPoint(com.srotya.sidewinder.core.storage.DataPoint) DataPoint(com.srotya.sidewinder.core.storage.DataPoint) File(java.io.File) Test(org.junit.Test)

Example 19 with TimeSeries

use of com.srotya.sidewinder.core.storage.TimeSeries in project sidewinder by srotya.

the class TestMemStorageEngine method testCompaction.

@Test
public void testCompaction() throws IOException, InterruptedException {
    MemStorageEngine engine = new MemStorageEngine();
    HashMap<String, String> conf2 = new HashMap<>();
    conf2.put("default.bucket.size", "409600");
    conf2.put("compaction.enabled", "true");
    conf2.put("use.query.pool", "false");
    conf2.put("compaction.codec", "gorilla");
    conf2.put("compaction.delay", "1");
    conf2.put("compaction.frequency", "1");
    engine.configure(conf2, bgTasks);
    final long curr = 1497720652566L;
    String dbName = "test";
    String measurementName = "cpu";
    String valueFieldName = "value";
    String tag = "host=123123";
    List<String> tags = Arrays.asList(tag);
    for (int i = 1; i <= 10000; i++) {
        engine.writeDataPoint(MiscUtils.buildDataPoint(dbName, measurementName, valueFieldName, tags, curr + i * 1000, i * 1.1));
    }
    long ts = System.nanoTime();
    List<Series> queryDataPoints = engine.queryDataPoints(dbName, measurementName, valueFieldName, curr - 1000, curr + 10000 * 1000 + 1, null, null);
    ts = System.nanoTime() - ts;
    System.out.println("Before compaction:" + ts / 1000 + "us");
    assertEquals(1, queryDataPoints.size());
    assertEquals(10000, queryDataPoints.iterator().next().getDataPoints().size());
    List<DataPoint> dataPoints = queryDataPoints.iterator().next().getDataPoints();
    for (int i = 1; i <= 10000; i++) {
        DataPoint dp = dataPoints.get(i - 1);
        assertEquals("Bad ts:" + i, curr + i * 1000, dp.getTimestamp());
        assertEquals(dp.getValue(), i * 1.1, 0.001);
    }
    TimeSeries series = engine.getOrCreateTimeSeries(dbName, measurementName, valueFieldName, tags, 409600, false);
    SortedMap<String, List<Writer>> bucketRawMap = series.getBucketRawMap();
    assertEquals(1, bucketRawMap.size());
    int size = bucketRawMap.values().iterator().next().size();
    assertTrue(series.getCompactionSet().size() < size);
    assertTrue(size > 2);
    Thread.sleep(2000);
    ts = System.nanoTime();
    queryDataPoints = engine.queryDataPoints(dbName, measurementName, valueFieldName, curr - 1, curr + 20000 * 1000 + 1, null, null);
    ts = System.nanoTime() - ts;
    System.out.println("After compaction:" + ts / 1000 + "us");
    bucketRawMap = series.getBucketRawMap();
    assertEquals(2, bucketRawMap.values().iterator().next().size());
    assertEquals(10000, queryDataPoints.iterator().next().getDataPoints().size());
    dataPoints = queryDataPoints.iterator().next().getDataPoints();
    for (int i = 1; i <= 10000; i++) {
        DataPoint dp = dataPoints.get(i - 1);
        assertEquals("Bad ts:" + i, curr + i * 1000, dp.getTimestamp());
        assertEquals(dp.getValue(), i * 1.1, 0.001);
    }
}
Also used : TimeSeries(com.srotya.sidewinder.core.storage.TimeSeries) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) DataPoint(com.srotya.sidewinder.core.storage.DataPoint) Point(com.srotya.sidewinder.core.rpc.Point) TimeSeries(com.srotya.sidewinder.core.storage.TimeSeries) Series(com.srotya.sidewinder.core.storage.Series) DataPoint(com.srotya.sidewinder.core.storage.DataPoint) ArrayList(java.util.ArrayList) List(java.util.List) Test(org.junit.Test)

Example 20 with TimeSeries

use of com.srotya.sidewinder.core.storage.TimeSeries in project sidewinder by srotya.

the class PersistentMeasurement method loadEntry.

private void loadEntry(String entry) {
    String[] split = entry.split(MD_SEPARATOR);
    String seriesId = split[0];
    logger.fine("Loading Timeseries:" + seriesId);
    try {
        String timeBucketSize = split[2];
        String isFp = split[1];
        TimeSeries timeSeries = new TimeSeries(this, compressionCodec, compactionCodec, seriesId, Integer.parseInt(timeBucketSize), metadata, Boolean.parseBoolean(isFp), conf);
        String[] split2 = seriesId.split(SERIESID_SEPARATOR);
        String valueField = split2[1];
        seriesId = split2[0];
        Integer seriesIdx = seriesMap.get(seriesId);
        SeriesFieldMap m = null;
        if (seriesIdx == null) {
            seriesIdx = Integer.parseInt(split[3], 16);
            m = new SeriesFieldMap(seriesId);
            seriesMap.put(seriesId, seriesIdx);
            seriesList.add(seriesIdx, m);
        } else {
            m = seriesList.get(seriesIdx);
        }
        m.addSeries(valueField, timeSeries);
        logger.fine("Intialized Timeseries:" + seriesId);
    } catch (NumberFormatException | IOException e) {
        logger.log(Level.SEVERE, "Failed to load series:" + entry, e);
    }
}
Also used : TimeSeries(com.srotya.sidewinder.core.storage.TimeSeries) IOException(java.io.IOException) SeriesFieldMap(com.srotya.sidewinder.core.storage.SeriesFieldMap)

Aggregations

TimeSeries (com.srotya.sidewinder.core.storage.TimeSeries)22 DataPoint (com.srotya.sidewinder.core.storage.DataPoint)14 Test (org.junit.Test)14 File (java.io.File)12 LinkedHashMap (java.util.LinkedHashMap)12 HashMap (java.util.HashMap)11 IOException (java.io.IOException)9 Series (com.srotya.sidewinder.core.storage.Series)8 ArrayList (java.util.ArrayList)8 ByzantineWriter (com.srotya.sidewinder.core.storage.compression.byzantine.ByzantineWriter)6 Measurement (com.srotya.sidewinder.core.storage.Measurement)5 BackgrounThreadFactory (com.srotya.sidewinder.core.utils.BackgrounThreadFactory)5 ScheduledExecutorService (java.util.concurrent.ScheduledExecutorService)5 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)5 Point (com.srotya.sidewinder.core.rpc.Point)4 List (java.util.List)4 ExecutorService (java.util.concurrent.ExecutorService)4 DBMetadata (com.srotya.sidewinder.core.storage.DBMetadata)3 SeriesFieldMap (com.srotya.sidewinder.core.storage.SeriesFieldMap)3 Tag (com.srotya.sidewinder.core.filters.Tag)2