use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.
the class TestByzantineReadWrite method testWriteRead.
@Test
public void testWriteRead() throws IOException {
ByteBuffer buf = ByteBuffer.allocateDirect(1024 * 100);
Writer writer = new ByzantineWriter();
writer.configure(new HashMap<>(), buf, true, startOffset, false);
long ts = System.currentTimeMillis();
writer.setHeaderTimestamp(ts);
int LIMIT = 10000;
for (int i = 0; i < LIMIT; i++) {
writer.addValue(ts + i * 1000, i);
}
System.out.println("Compression Ratio:" + writer.getCompressionRatio());
Reader reader = writer.getReader();
for (int i = 0; i < LIMIT; i++) {
DataPoint dp = reader.readPair();
assertEquals(ts + i * 1000, dp.getTimestamp());
}
buf.rewind();
writer = new ByzantineWriter();
writer.configure(new HashMap<>(), buf, true, startOffset, true);
ts = System.currentTimeMillis();
writer.setHeaderTimestamp(ts);
for (int i = 0; i < LIMIT; i++) {
writer.addValue(ts + i * 1000, i * 1.1);
}
reader = writer.getReader();
for (int i = 0; i < LIMIT; i++) {
DataPoint dp = reader.readPair();
assertEquals(ts + i * 1000, dp.getTimestamp());
assertEquals(i * 1.1, dp.getValue(), startOffset);
}
buf.rewind();
writer = new ByzantineWriter();
writer.configure(new HashMap<>(), buf, true, startOffset, false);
ts = System.currentTimeMillis();
writer.setHeaderTimestamp(ts);
for (int i = 0; i < LIMIT; i++) {
DataPoint dp = MiscUtils.buildDataPoint(ts + i * 1000, i);
writer.write(dp);
}
reader = writer.getReader();
assertEquals(LIMIT, reader.getPairCount());
for (int i = 0; i < LIMIT; i++) {
DataPoint dp = reader.readPair();
assertEquals(ts + i * 1000, dp.getTimestamp());
}
buf.rewind();
writer = new ByzantineWriter();
writer.configure(new HashMap<>(), buf, true, startOffset, true);
ts = System.currentTimeMillis();
writer.setHeaderTimestamp(ts);
List<DataPoint> dps = new ArrayList<>();
for (int i = 0; i < LIMIT; i++) {
DataPoint dp = MiscUtils.buildDataPoint(ts + i * 1000, i);
dps.add(dp);
}
writer.write(dps);
reader = writer.getReader();
assertEquals(LIMIT, reader.getPairCount());
for (int i = 0; i < LIMIT; i++) {
DataPoint dp = reader.readPair();
assertEquals(ts + i * 1000, dp.getTimestamp());
}
}
use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.
the class TestByzantineReadWrite method testWriteReadNoLock.
@Test
public void testWriteReadNoLock() throws IOException {
ByteBuffer buf = ByteBuffer.allocateDirect(1024 * 1000);
Writer writer = new ByzantineWriter();
Map<String, String> conf = new HashMap<>();
writer.configure(conf, buf, true, startOffset, false);
long ts = System.currentTimeMillis();
writer.setHeaderTimestamp(ts);
int LIMIT = 100000;
for (int i = 0; i < LIMIT; i++) {
writer.addValue(ts + i * 1000, i);
}
System.out.println("Compression Ratio:" + writer.getCompressionRatio());
Reader reader = writer.getReader();
for (int i = 0; i < LIMIT; i++) {
DataPoint dp = reader.readPair();
assertEquals("Iteration:" + i, ts + i * 1000, dp.getTimestamp());
}
}
use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.
the class TestByzantineReadWrite method testWriteReadConcurrent.
@Test
public void testWriteReadConcurrent() throws IOException, InterruptedException {
ByteBuffer buf = ByteBuffer.allocateDirect(1024 * 1024);
Writer writer = new ByzantineWriter();
Map<String, String> conf = new HashMap<>();
writer.configure(conf, buf, true, startOffset, true);
final long ts = System.currentTimeMillis();
writer.setHeaderTimestamp(ts);
int LIMIT = 10000;
final AtomicInteger wait = new AtomicInteger(0);
int THREAD_COUNT = 4;
ExecutorService es = Executors.newFixedThreadPool(THREAD_COUNT);
for (int j = 0; j < THREAD_COUNT; j++) {
final int o = j * LIMIT;
es.submit(() -> {
long t = ts + o;
for (int i = 0; i < LIMIT; i++) {
try {
writer.addValue(t + i * 100, i);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
wait.incrementAndGet();
});
}
es.shutdown();
es.awaitTermination(100, TimeUnit.MILLISECONDS);
System.out.println("Compression Ratio:" + writer.getCompressionRatio());
while (wait.get() != THREAD_COUNT) {
Thread.sleep(1000);
}
Reader reader = writer.getReader();
assertEquals(LIMIT * THREAD_COUNT, reader.getPairCount());
}
use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.
the class TestTimeSeries method testCompactionGorilla.
@Test
public void testCompactionGorilla() throws IOException {
DBMetadata metadata = new DBMetadata(28);
MockMeasurement measurement = new MockMeasurement(1024);
HashMap<String, String> conf = new HashMap<>();
conf.put("default.bucket.size", "409600");
conf.put("compaction.enabled", "true");
conf.put("use.query.pool", "false");
conf.put("compaction.ratio", "1.1");
final TimeSeries series = new TimeSeries(measurement, "byzantine", "gorilla", "asdasasd", 409600, metadata, true, conf);
final long curr = 1497720652566L;
String valueFieldName = "value";
for (int i = 1; i <= 10000; i++) {
series.addDataPoint(TimeUnit.MILLISECONDS, curr + i * 1000, i * 1.1);
}
SortedMap<String, List<Writer>> bucketRawMap = series.getBucketRawMap();
assertEquals(1, bucketRawMap.size());
int size = bucketRawMap.values().iterator().next().size();
assertTrue(series.getCompactionSet().size() < size);
assertTrue(size > 2);
series.compact();
List<DataPoint> dataPoints = series.queryDataPoints(valueFieldName, curr - 1000, curr + 10000 * 1000 + 1, null);
bucketRawMap = series.getBucketRawMap();
assertEquals(2, bucketRawMap.values().iterator().next().size());
int count = 0;
for (List<Writer> list : bucketRawMap.values()) {
for (Writer writer : list) {
Reader reader = writer.getReader();
count += reader.getPairCount();
}
}
assertEquals(10000, count);
assertEquals(10000, dataPoints.size());
for (int i = 1; i <= 10000; i++) {
DataPoint dp = dataPoints.get(i - 1);
assertEquals("Bad ts:" + i, curr + i * 1000, dp.getTimestamp());
assertEquals(dp.getValue(), i * 1.1, 0.001);
}
}
use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.
the class TestTimeSeries method testAddAndReadDataPoints.
@Test
public void testAddAndReadDataPoints() throws IOException {
Measurement measurement = new MockMeasurement(100);
DBMetadata metadata = new DBMetadata(24);
TimeSeries series = new TimeSeries(measurement, compression, compaction, "43232", 4096, metadata, true, conf);
long curr = System.currentTimeMillis();
for (int i = 1; i <= 3; i++) {
series.addDataPoint(TimeUnit.MILLISECONDS, curr + i, 2.2 * i);
}
assertEquals(1, series.getBucketMap().size());
Writer writer = series.getBucketMap().values().iterator().next();
assertEquals(3, writer.getCount());
Reader reader = TimeSeries.getReader(writer, null, null);
for (int i = 0; i < 3; i++) {
reader.readPair();
}
try {
reader.readPair();
fail("The read shouldn't succeed");
} catch (IOException e) {
}
List<DataPoint> values = series.queryDataPoints("value", curr + 3, curr, null);
assertEquals(3, values.size());
for (int i = 1; i <= 3; i++) {
DataPoint dp = values.get(i - 1);
assertEquals("Value mismatch:" + dp.getValue() + "\t" + (2.2 * i) + "\t" + i, dp.getValue(), 2.2 * i, 0.01);
}
List<Reader> queryReaders = series.queryReader("value", Arrays.asList(), curr + 3, curr, null);
assertEquals(1, queryReaders.size());
reader = queryReaders.get(0);
for (int i = 1; i <= 3; i++) {
DataPoint dp = reader.readPair();
assertEquals("Value mismatch:" + dp.getValue() + "\t" + (2.2 * i) + "\t" + i, dp.getValue(), 2.2 * i, 0.01);
}
values = series.queryDataPoints("value", curr - 1, curr - 1, null);
assertEquals(0, values.size());
}
Aggregations