use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.
the class TestByzantineReadWrite method testWriteDataPoint.
@Test
public void testWriteDataPoint() throws IOException {
ByteBuffer buf = ByteBuffer.allocateDirect(1024);
ByzantineWriter bwriter = new ByzantineWriter();
Writer writer = bwriter;
writer.configure(new HashMap<>(), buf, true, startOffset, false);
long ts = System.currentTimeMillis();
writer.setHeaderTimestamp(ts);
for (int i = 0; i < 10; i++) {
writer.addValue(ts + i, i);
}
assertEquals(10, bwriter.getCount());
assertEquals(ts + 9, bwriter.getPrevTs());
buf = bwriter.getBuf();
buf.flip();
buf.get();
assertEquals(10, buf.getInt());
assertEquals(ts, buf.getLong());
}
use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.
the class TestByzantineReadWrite method testReadWriteDataPoints.
@Test
public void testReadWriteDataPoints() throws IOException {
ByteBuffer buf = ByteBuffer.allocateDirect(1024);
Writer writer = new ByzantineWriter();
writer.configure(new HashMap<>(), buf, true, startOffset, false);
long ts = System.currentTimeMillis();
writer.setHeaderTimestamp(ts);
for (int i = 0; i < 100; i++) {
writer.addValue(ts + i * 10, i);
}
Reader reader = writer.getReader();
for (int i = 0; i < 100; i++) {
DataPoint pair = reader.readPair();
assertEquals(ts + i * 10, pair.getTimestamp());
assertEquals(i, pair.getLongValue());
}
for (int i = 0; i < 100; i++) {
DataPoint dp = new DataPoint(1000 + ts + i * 10, i);
writer.write(dp);
}
reader = writer.getReader();
for (int i = 0; i < 200; i++) {
DataPoint pair = reader.readPair();
assertEquals(ts + i * 10, pair.getTimestamp());
assertEquals(i % 100, pair.getLongValue());
}
System.out.println("Compression Ratio:" + writer.getCompressionRatio());
}
use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.
the class TestDiskStorageEngine method testSeriesToDataPointConversion.
@Test
public void testSeriesToDataPointConversion() throws IOException {
List<DataPoint> points = new ArrayList<>();
long headerTimestamp = System.currentTimeMillis();
HashMap<String, String> map = new HashMap<>();
map.put("metadata.dir", "target/db1/mdq");
map.put("index.dir", "target/db1/index");
map.put("data.dir", "target/db1/data");
map.put(StorageEngine.PERSISTENCE_DISK, "true");
ByteBuffer buf = ByteBuffer.allocate(100);
Writer timeSeries = new ByzantineWriter();
timeSeries.configure(map, buf, true, 1, false);
timeSeries.setHeaderTimestamp(headerTimestamp);
timeSeries.addValue(headerTimestamp, 1L);
TimeSeries.seriesToDataPoints("value", Arrays.asList("test=1"), points, timeSeries, null, null, false);
assertEquals(1, points.size());
points.clear();
Predicate timepredicate = new BetweenPredicate(Long.MAX_VALUE, Long.MAX_VALUE);
TimeSeries.seriesToDataPoints("value", Arrays.asList("test=1"), points, timeSeries, timepredicate, null, false);
assertEquals(0, points.size());
}
use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.
the class TestMemStorageEngine method testSeriesToDataPointConversion.
@Test
public void testSeriesToDataPointConversion() throws IOException {
List<DataPoint> points = new ArrayList<>();
long headerTimestamp = System.currentTimeMillis();
ByteBuffer buf = ByteBuffer.allocate(1024);
Writer timeSeries = new ByzantineWriter();
timeSeries.configure(conf, buf, true, 1, false);
timeSeries.setHeaderTimestamp(headerTimestamp);
timeSeries.addValue(headerTimestamp, 1L);
TimeSeries.seriesToDataPoints("value", Arrays.asList("test=2"), points, timeSeries, null, null, false);
assertEquals(1, points.size());
points.clear();
Predicate timepredicate = new BetweenPredicate(Long.MAX_VALUE, Long.MAX_VALUE);
TimeSeries.seriesToDataPoints("value", Arrays.asList("test=2"), points, timeSeries, timepredicate, null, false);
assertEquals(0, points.size());
}
use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.
the class TestTimeSeries method testConcurrentSeriesCreate.
@Test(timeout = 10000)
public void testConcurrentSeriesCreate() throws IOException, InterruptedException {
DBMetadata metadata = new DBMetadata(28);
MockMeasurement measurement = new MockMeasurement(4096);
HashMap<String, String> conf = new HashMap<>();
conf.put("default.bucket.size", "4096");
conf.put("compaction.enabled", "true");
conf.put("use.query.pool", "false");
final TimeSeries series = new TimeSeries(measurement, "byzantine", "gorilla", "asdasasd", 4096, metadata, true, conf);
final AtomicBoolean control = new AtomicBoolean(true);
ExecutorService c = Executors.newCachedThreadPool();
final long curr = 1497720652566L;
for (int i = 0; i < 4; i++) {
c.submit(() -> {
int k = 0;
while (control.get()) {
try {
long timestamp = curr + k * 4000;
if (timestamp < (1497720652566L + 4096 * 1000)) {
series.addDataPoint(TimeUnit.MILLISECONDS, timestamp, k);
} else {
break;
}
k++;
} catch (IOException e) {
e.printStackTrace();
}
}
});
}
c.shutdown();
Thread.sleep(1000);
control.set(false);
assertEquals(2, series.getBucketRawMap().size());
for (Entry<String, List<Writer>> entry : series.getBucketRawMap().entrySet()) {
for (int i = 0; i < entry.getValue().size() - 1; i++) {
Writer writer = entry.getValue().get(i);
assertTrue(writer.isFull());
}
}
}
Aggregations