use of com.srotya.sidewinder.core.storage.TimeSeries in project sidewinder by srotya.
the class TestPersistentMeasurement method testLinearizability.
@Test
public void testLinearizability() throws IOException, InterruptedException {
for (int p = 0; p < 100; p++) {
MiscUtils.delete(new File("target/db134/"));
final long t1 = 1497720452566L;
Measurement m = new PersistentMeasurement();
m.configure(conf, engine, DBNAME, "m1", "target/db134/index", "target/db134/data", metadata, bgTaskPool);
ExecutorService es = Executors.newFixedThreadPool(2, new BackgrounThreadFactory("tlinear"));
AtomicBoolean wait = new AtomicBoolean(false);
for (int i = 0; i < 2; i++) {
final int th = i;
es.submit(() -> {
while (!wait.get()) {
try {
Thread.sleep(1);
} catch (InterruptedException e) {
}
}
long t = t1 + th * 3;
for (int j = 0; j < 100; j++) {
try {
TimeSeries ts = m.getOrCreateTimeSeries("vf1", Arrays.asList("t=1", "t=2"), 4096, false, conf);
long timestamp = t + j * 1000;
ts.addDataPoint(TimeUnit.MILLISECONDS, timestamp, j);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
es.shutdown();
wait.set(true);
es.awaitTermination(100, TimeUnit.SECONDS);
TimeSeries ts = m.getOrCreateTimeSeries("vf1", Arrays.asList("t=1", "t=2"), 4096, false, conf);
List<DataPoint> dps = ts.queryDataPoints("vf1", t1 - 120, t1 + 1000_000, null);
assertEquals(200, dps.size());
assertEquals(1, ts.getBucketCount());
m.close();
}
}
use of com.srotya.sidewinder.core.storage.TimeSeries in project sidewinder by srotya.
the class TestPersistentMeasurement method testDataPointsRecovery.
@Test
public void testDataPointsRecovery() throws Exception {
long ts = System.currentTimeMillis();
MiscUtils.delete(new File("target/db132/"));
List<String> tags = Arrays.asList("test=1", "test=2");
PersistentMeasurement m = new PersistentMeasurement();
Map<String, String> map = new HashMap<>();
map.put("disk.compression.class", ByzantineWriter.class.getName());
map.put("malloc.file.max", String.valueOf(1024 * 1024));
try {
m.configure(map, null, DBNAME, "m1", "target/db132/index", "target/db132/data", metadata, bgTaskPool);
fail("Must throw invalid file max size exception");
} catch (Exception e) {
}
map.put("malloc.file.max", String.valueOf(2 * 1024 * 1024));
m.configure(map, null, DBNAME, "m1", "target/db132/index", "target/db132/data", metadata, bgTaskPool);
int LIMIT = 100000;
for (int i = 0; i < LIMIT; i++) {
TimeSeries t = m.getOrCreateTimeSeries("value", tags, 4096, false, map);
t.addDataPoint(TimeUnit.MILLISECONDS, ts + i * 1000, 1L);
}
m.close();
m = new PersistentMeasurement();
m.configure(map, null, DBNAME, "m1", "target/db132/index", "target/db132/data", metadata, bgTaskPool);
List<Series> resultMap = new ArrayList<>();
m.queryDataPoints("value", ts, ts + 1000 * LIMIT, null, null, resultMap);
Iterator<Series> iterator = resultMap.iterator();
assertEquals(LIMIT, iterator.next().getDataPoints().size());
m.close();
}
use of com.srotya.sidewinder.core.storage.TimeSeries in project sidewinder by srotya.
the class TestPersistentMeasurement method testCompaction.
@Test
public void testCompaction() throws IOException {
final long ts = 1484788896586L;
MiscUtils.delete(new File("target/db45/"));
List<String> tags = Arrays.asList("test=1", "test=2");
PersistentMeasurement m = new PersistentMeasurement();
Map<String, String> map = new HashMap<>();
map.put("disk.compression.class", ByzantineWriter.class.getName());
map.put("malloc.file.max", String.valueOf(2 * 1024 * 1024));
map.put("malloc.ptrfile.increment", String.valueOf(1024));
map.put("compaction.ratio", "1.2");
map.put("compaction.enabled", "true");
m.configure(map, null, DBNAME, "m1", "target/db45/index", "target/db45/data", metadata, bgTaskPool);
int LIMIT = 7000;
for (int i = 0; i < LIMIT; i++) {
TimeSeries t = m.getOrCreateTimeSeries("value1", tags, 1024, false, map);
t.addDataPoint(TimeUnit.MILLISECONDS, ts + i, i * 1.2);
}
assertEquals(1, m.getTimeSeries().size());
TimeSeries series = m.getTimeSeries().iterator().next();
assertEquals(1, series.getBucketRawMap().size());
assertEquals(3, series.getBucketCount());
assertEquals(3, series.getBucketRawMap().entrySet().iterator().next().getValue().size());
assertEquals(1, series.getCompactionSet().size());
int maxDp = series.getBucketRawMap().values().stream().flatMap(v -> v.stream()).mapToInt(l -> l.getCount()).max().getAsInt();
// check and read datapoint count before
List<DataPoint> queryDataPoints = series.queryDataPoints("", ts, ts + LIMIT + 1, null);
assertEquals(LIMIT, queryDataPoints.size());
for (int i = 0; i < LIMIT; i++) {
DataPoint dp = queryDataPoints.get(i);
assertEquals(ts + i, dp.getTimestamp());
assertEquals(i * 1.2, dp.getValue(), 0.01);
}
m.compact();
assertEquals(2, series.getBucketCount());
assertEquals(2, series.getBucketRawMap().entrySet().iterator().next().getValue().size());
assertEquals(0, series.getCompactionSet().size());
assertTrue(maxDp <= series.getBucketRawMap().values().stream().flatMap(v -> v.stream()).mapToInt(l -> l.getCount()).max().getAsInt());
// validate query after compaction
queryDataPoints = series.queryDataPoints("", ts, ts + LIMIT + 1, null);
assertEquals(LIMIT, queryDataPoints.size());
for (int i = 0; i < LIMIT; i++) {
DataPoint dp = queryDataPoints.get(i);
assertEquals(ts + i, dp.getTimestamp());
assertEquals(i * 1.2, dp.getValue(), 0.01);
}
// test buffer recovery after compaction, validate count
m = new PersistentMeasurement();
m.configure(map, null, DBNAME, "m1", "target/db45/index", "target/db45/data", metadata, bgTaskPool);
series = m.getTimeSeries().iterator().next();
queryDataPoints = series.queryDataPoints("", ts, ts + LIMIT + 1, null);
assertEquals(LIMIT, queryDataPoints.size());
for (int i = 0; i < LIMIT; i++) {
DataPoint dp = queryDataPoints.get(i);
assertEquals(ts + i, dp.getTimestamp());
assertEquals(i * 1.2, dp.getValue(), 0.01);
}
for (int i = 0; i < LIMIT; i++) {
TimeSeries t = m.getOrCreateTimeSeries("value1", tags, 1024, false, map);
t.addDataPoint(TimeUnit.MILLISECONDS, LIMIT + ts + i, i * 1.2);
}
series.getBucketRawMap().entrySet().iterator().next().getValue().stream().map(v -> "" + v.getCount() + ":" + v.isReadOnly() + ":" + (int) v.getRawBytes().get(1)).forEach(System.out::println);
// test recovery again
m = new PersistentMeasurement();
m.configure(map, null, DBNAME, "m1", "target/db45/index", "target/db45/data", metadata, bgTaskPool);
series = m.getTimeSeries().iterator().next();
queryDataPoints = series.queryDataPoints("", ts - 1, ts + 2 + (LIMIT * 2), null);
assertEquals(LIMIT * 2, queryDataPoints.size());
for (int i = 0; i < LIMIT * 2; i++) {
DataPoint dp = queryDataPoints.get(i);
assertEquals("Error:" + i + " " + (dp.getTimestamp() - ts - i), ts + i, dp.getTimestamp());
}
}
use of com.srotya.sidewinder.core.storage.TimeSeries in project sidewinder by srotya.
the class TestMemStorageEngine method testCompaction.
@Test
public void testCompaction() throws IOException, InterruptedException {
MemStorageEngine engine = new MemStorageEngine();
HashMap<String, String> conf2 = new HashMap<>();
conf2.put("default.bucket.size", "409600");
conf2.put("compaction.enabled", "true");
conf2.put("use.query.pool", "false");
conf2.put("compaction.codec", "gorilla");
conf2.put("compaction.delay", "1");
conf2.put("compaction.frequency", "1");
engine.configure(conf2, bgTasks);
final long curr = 1497720652566L;
String dbName = "test";
String measurementName = "cpu";
String valueFieldName = "value";
String tag = "host=123123";
List<String> tags = Arrays.asList(tag);
for (int i = 1; i <= 10000; i++) {
engine.writeDataPoint(MiscUtils.buildDataPoint(dbName, measurementName, valueFieldName, tags, curr + i * 1000, i * 1.1));
}
long ts = System.nanoTime();
List<Series> queryDataPoints = engine.queryDataPoints(dbName, measurementName, valueFieldName, curr - 1000, curr + 10000 * 1000 + 1, null, null);
ts = System.nanoTime() - ts;
System.out.println("Before compaction:" + ts / 1000 + "us");
assertEquals(1, queryDataPoints.size());
assertEquals(10000, queryDataPoints.iterator().next().getDataPoints().size());
List<DataPoint> dataPoints = queryDataPoints.iterator().next().getDataPoints();
for (int i = 1; i <= 10000; i++) {
DataPoint dp = dataPoints.get(i - 1);
assertEquals("Bad ts:" + i, curr + i * 1000, dp.getTimestamp());
assertEquals(dp.getValue(), i * 1.1, 0.001);
}
TimeSeries series = engine.getOrCreateTimeSeries(dbName, measurementName, valueFieldName, tags, 409600, false);
SortedMap<String, List<Writer>> bucketRawMap = series.getBucketRawMap();
assertEquals(1, bucketRawMap.size());
int size = bucketRawMap.values().iterator().next().size();
assertTrue(series.getCompactionSet().size() < size);
assertTrue(size > 2);
Thread.sleep(2000);
ts = System.nanoTime();
queryDataPoints = engine.queryDataPoints(dbName, measurementName, valueFieldName, curr - 1, curr + 20000 * 1000 + 1, null, null);
ts = System.nanoTime() - ts;
System.out.println("After compaction:" + ts / 1000 + "us");
bucketRawMap = series.getBucketRawMap();
assertEquals(2, bucketRawMap.values().iterator().next().size());
assertEquals(10000, queryDataPoints.iterator().next().getDataPoints().size());
dataPoints = queryDataPoints.iterator().next().getDataPoints();
for (int i = 1; i <= 10000; i++) {
DataPoint dp = dataPoints.get(i - 1);
assertEquals("Bad ts:" + i, curr + i * 1000, dp.getTimestamp());
assertEquals(dp.getValue(), i * 1.1, 0.001);
}
}
use of com.srotya.sidewinder.core.storage.TimeSeries in project sidewinder by srotya.
the class PersistentMeasurement method loadEntry.
private void loadEntry(String entry) {
String[] split = entry.split(MD_SEPARATOR);
String seriesId = split[0];
logger.fine("Loading Timeseries:" + seriesId);
try {
String timeBucketSize = split[2];
String isFp = split[1];
TimeSeries timeSeries = new TimeSeries(this, compressionCodec, compactionCodec, seriesId, Integer.parseInt(timeBucketSize), metadata, Boolean.parseBoolean(isFp), conf);
String[] split2 = seriesId.split(SERIESID_SEPARATOR);
String valueField = split2[1];
seriesId = split2[0];
Integer seriesIdx = seriesMap.get(seriesId);
SeriesFieldMap m = null;
if (seriesIdx == null) {
seriesIdx = Integer.parseInt(split[3], 16);
m = new SeriesFieldMap(seriesId);
seriesMap.put(seriesId, seriesIdx);
seriesList.add(seriesIdx, m);
} else {
m = seriesList.get(seriesIdx);
}
m.addSeries(valueField, timeSeries);
logger.fine("Intialized Timeseries:" + seriesId);
} catch (NumberFormatException | IOException e) {
logger.log(Level.SEVERE, "Failed to load series:" + entry, e);
}
}
Aggregations