use of com.srotya.sidewinder.core.storage.TimeSeries in project sidewinder by srotya.
the class WriterServiceImpl method writeSeriesPoint.
@Override
public void writeSeriesPoint(RawTimeSeriesBucket request, StreamObserver<Ack> responseObserver) {
Ack ack;
try {
TimeSeries series = engine.getOrCreateTimeSeries(request.getDbName(), request.getMeasurementName(), request.getValueFieldName(), new ArrayList<>(request.getTagsList()), request.getBucketSize(), request.getFp());
for (Bucket bucket : request.getBucketsList()) {
Writer writer = series.getOrCreateSeriesBucket(TimeUnit.MILLISECONDS, bucket.getHeaderTimestamp());
writer.configure(conf, null, false, 1, true);
writer.setCounter(bucket.getCount());
writer.bootstrap(bucket.getData().asReadOnlyByteBuffer());
}
ack = Ack.newBuilder().setMessageId(request.getMessageId()).setResponseCode(200).build();
} catch (Exception e) {
ack = Ack.newBuilder().setMessageId(request.getMessageId()).setResponseCode(500).build();
}
responseObserver.onNext(ack);
responseObserver.onCompleted();
}
use of com.srotya.sidewinder.core.storage.TimeSeries in project sidewinder by srotya.
the class TestDiskStorageEngine method testConcurrentOperations.
@Test
public void testConcurrentOperations() throws Exception {
MiscUtils.delete(new File("target/dst-6/data"));
final StorageEngine engine = new DiskStorageEngine();
Map<String, String> conf = new HashMap<>();
conf.put("data.dir", "target/dst-6/data");
conf.put("index.dir", "target/dst-6/index");
engine.configure(conf, bgTasks);
final long ts = System.currentTimeMillis();
ExecutorService es = Executors.newFixedThreadPool(2, new BackgrounThreadFactory("wr1"));
String measurementName = "mmm2";
String valueFieldName = "v1";
String dbName = "db9";
String tag = "h=1";
for (int k = 0; k < 2; k++) {
final int p = k;
es.submit(() -> {
long t = ts + p;
for (int i = 0; i < 100; i++) {
Point dp = MiscUtils.buildDataPoint(dbName, measurementName, valueFieldName, Arrays.asList(tag), t + i * 1000, i);
try {
engine.writeDataPoint(dp);
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
System.err.println("Completed writes:" + 100 + " data points");
});
}
es.shutdown();
es.awaitTermination(100, TimeUnit.SECONDS);
assertEquals(1, engine.getAllMeasurementsForDb(dbName).size());
assertEquals(1, engine.getMeasurementMap().size());
try {
TimeSeries timeSeries = engine.getTimeSeries(dbName, measurementName, valueFieldName, Arrays.asList(tag));
assertNotNull(timeSeries);
} catch (ItemNotFoundException e) {
fail("Time series must exist");
}
List<Series> queryDataPoints = engine.queryDataPoints(dbName, measurementName, valueFieldName, ts, ts + 220 * 1000, null);
assertEquals(1, queryDataPoints.size());
Series next = queryDataPoints.iterator().next();
assertEquals(200, next.getDataPoints().size());
}
use of com.srotya.sidewinder.core.storage.TimeSeries in project sidewinder by srotya.
the class TestDiskStorageEngine method testUpdateTimeSeriesRetention.
@Test
public void testUpdateTimeSeriesRetention() throws IOException {
MiscUtils.delete(new File("target/dst-2/"));
DiskStorageEngine engine = new DiskStorageEngine();
Map<String, String> conf = new HashMap<>();
conf.put("data.dir", "target/dst-2/data");
conf.put("index.dir", "target/dst-2/index");
engine.configure(conf, bgTasks);
engine.getOrCreateMeasurement("db1", "m1");
engine.updateDefaultTimeSeriesRetentionPolicy("db1", 10);
assertEquals(10, engine.getDbMetadataMap().get("db1").getRetentionHours());
TimeSeries ts = engine.getOrCreateTimeSeries("db1", "m1", "vf1", Arrays.asList("t=1"), 4096, false);
int buckets = ts.getRetentionBuckets();
engine.updateDefaultTimeSeriesRetentionPolicy("db1", 30);
engine.updateTimeSeriesRetentionPolicy("db1", 30);
engine.updateTimeSeriesRetentionPolicy("db1", "m1", 40);
engine.updateTimeSeriesRetentionPolicy("db1", "m1", "vf1", Arrays.asList("t=1"), 60);
assertTrue(buckets != ts.getRetentionBuckets());
}
use of com.srotya.sidewinder.core.storage.TimeSeries in project sidewinder by srotya.
the class TestPersistentMeasurement method testCompactionEmptyLineValidation.
@Test
public void testCompactionEmptyLineValidation() throws IOException {
final long ts = 1484788896586L;
MiscUtils.delete(new File("target/db46/"));
List<String> tags = Arrays.asList("test=1", "test=2");
PersistentMeasurement m = new PersistentMeasurement();
Map<String, String> map = new HashMap<>();
map.put("disk.compression.class", ByzantineWriter.class.getName());
map.put("malloc.file.max", String.valueOf(2 * 1024 * 1024));
map.put("malloc.ptrfile.increment", String.valueOf(256));
map.put("compaction.ratio", "1.2");
map.put("compaction.enabled", "true");
m.configure(map, null, DBNAME, "m1", "target/db46/index", "target/db46/data", metadata, bgTaskPool);
int LIMIT = 34500;
for (int i = 0; i < LIMIT; i++) {
TimeSeries t = m.getOrCreateTimeSeries("value1", tags, 1024, false, map);
t.addDataPoint(TimeUnit.MILLISECONDS, ts + i * 100, i * 1.2);
}
m.collectGarbage(null);
System.err.println("Gc complete");
m.compact();
m.getTimeSeries().iterator().next();
m = new PersistentMeasurement();
m.configure(map, null, DBNAME, "m1", "target/db46/index", "target/db46/data", metadata, bgTaskPool);
}
use of com.srotya.sidewinder.core.storage.TimeSeries in project sidewinder by srotya.
the class TestPersistentMeasurement method testDataPointsRecoveryPTR.
@Test
public void testDataPointsRecoveryPTR() throws Exception {
long ts = System.currentTimeMillis();
MiscUtils.delete(new File("target/db290/"));
List<String> tags = Arrays.asList("test=1", "test=2");
PersistentMeasurement m = new PersistentMeasurement();
Map<String, String> map = new HashMap<>();
map.put("disk.compression.class", ByzantineWriter.class.getName());
map.put("malloc.file.max", String.valueOf(2 * 1024 * 1024));
map.put("malloc.ptrfile.increment", String.valueOf(2 * 1024));
m.configure(map, null, DBNAME, "m1", "target/db290/index", "target/db290/data", metadata, bgTaskPool);
int LIMIT = 100;
for (int i = 0; i < LIMIT; i++) {
TimeSeries t = m.getOrCreateTimeSeries("value" + i, tags, 4096, false, map);
t.addDataPoint(TimeUnit.MILLISECONDS, ts, 1L);
}
m.close();
m = new PersistentMeasurement();
m.configure(map, null, DBNAME, "m1", "target/db290/index", "target/db290/data", metadata, bgTaskPool);
List<Series> resultMap = new ArrayList<>();
m.queryDataPoints("value.*", ts, ts + 1000, null, null, resultMap);
assertEquals(LIMIT, resultMap.size());
m.close();
}
Aggregations