use of com.srotya.sidewinder.core.storage.DataPoint in project sidewinder by srotya.
the class TestByzantineReadWrite method testWriteReadNoLock.
@Test
public void testWriteReadNoLock() throws IOException {
ByteBuffer buf = ByteBuffer.allocateDirect(1024 * 1000);
Writer writer = new ByzantineWriter();
Map<String, String> conf = new HashMap<>();
writer.configure(conf, buf, true, startOffset, false);
long ts = System.currentTimeMillis();
writer.setHeaderTimestamp(ts);
int LIMIT = 100000;
for (int i = 0; i < LIMIT; i++) {
writer.addValue(ts + i * 1000, i);
}
System.out.println("Compression Ratio:" + writer.getCompressionRatio());
Reader reader = writer.getReader();
for (int i = 0; i < LIMIT; i++) {
DataPoint dp = reader.readPair();
assertEquals("Iteration:" + i, ts + i * 1000, dp.getTimestamp());
}
}
use of com.srotya.sidewinder.core.storage.DataPoint in project sidewinder by srotya.
the class TestByzantineReadWrite method testDiskRecovery.
@Test
public void testDiskRecovery() throws IOException, InterruptedException {
ByteBuffer buf = ByteBuffer.allocateDirect(1024 * 1024 * 10);
ByzantineWriter writer = new ByzantineWriter();
writer.configure(new HashMap<>(), buf, true, startOffset, true);
long ots = System.currentTimeMillis();
writer.setHeaderTimestamp(ots);
int limit = 1_000_000;
for (int i = 0; i < limit; i++) {
writer.addValue(ots + i * 1000, i);
}
long ts = (System.currentTimeMillis() - ots);
System.out.println("==>Byzantine Write time:" + ts + " data size:" + writer.getBuf().position());
Reader reader = writer.getReader();
assertEquals(limit, reader.getPairCount());
try {
for (int i = 0; i < limit; i++) {
DataPoint pair = reader.readPair();
assertEquals(ots + i * 1000, pair.getTimestamp());
assertEquals(i, pair.getLongValue());
}
} catch (Exception e) {
e.printStackTrace();
throw e;
}
System.out.println("Completed phase 1 reads");
writer = new ByzantineWriter();
// writer.setSeriesId("test_byzantine_disk_writes" + 0);
buf.rewind();
writer.configure(new HashMap<>(), buf, false, startOffset, true);
assertEquals(1000000, writer.getCount());
writer.addValue(ts + 10000, 1);
try {
reader = writer.getReader();
for (int i = 0; i < limit; i++) {
DataPoint pair = reader.readPair();
assertEquals(ots + i * 1000, pair.getTimestamp());
assertEquals(i, pair.getLongValue());
}
} catch (Exception e) {
e.printStackTrace();
throw e;
}
}
use of com.srotya.sidewinder.core.storage.DataPoint in project sidewinder by srotya.
the class TestGorillaCompression method testCompressUncompress.
@Test
public void testCompressUncompress() throws IOException {
ByteBuffer buf = ByteBuffer.allocate(1024);
GorillaWriter writer = new GorillaWriter();
writer.configure(new HashMap<>(), buf, true, 0, false);
long ts = System.currentTimeMillis();
writer.setHeaderTimestamp(ts);
for (int i = 0; i < 100; i++) {
writer.addValue(ts + i * 100, i);
}
writer.makeReadOnly();
Reader reader = writer.getReader();
assertEquals(100, reader.getPairCount());
for (int i = 0; i < 100; i++) {
DataPoint pair = reader.readPair();
assertEquals(ts + i * 100, pair.getTimestamp());
assertEquals(i, pair.getLongValue());
}
}
use of com.srotya.sidewinder.core.storage.DataPoint in project sidewinder by srotya.
the class TestDiskStorageEngine method testAddAndReaderDataPoints.
@Test
public void testAddAndReaderDataPoints() throws Exception {
DiskStorageEngine engine = new DiskStorageEngine();
File file = new File("target/db8/");
if (file.exists()) {
MiscUtils.delete(file);
}
HashMap<String, String> map = new HashMap<>();
map.put("metadata.dir", "target/db8/mdq");
map.put("index.dir", "target/db8/index");
map.put("data.dir", "target/db8/data");
map.put(StorageEngine.PERSISTENCE_DISK, "true");
engine.configure(map, bgTasks);
long curr = 1497720452566L;
String dbName = "test";
String measurementName = "cpu";
String valueFieldName = "value";
try {
engine.writeDataPoint(MiscUtils.buildDataPoint(dbName, measurementName, valueFieldName, null, curr, 2.2 * 0));
fail("Must reject the above datapoint due to missing tags");
} catch (Exception e) {
}
for (int i = 1; i <= 3; i++) {
engine.writeDataPoint(MiscUtils.buildDataPoint(dbName, measurementName, valueFieldName, Arrays.asList(dbName + "=1"), curr + i, 2.2 * i));
}
assertEquals(1, engine.getAllMeasurementsForDb(dbName).size());
LinkedHashMap<Reader, Boolean> readers = engine.queryReaders(dbName, measurementName, valueFieldName, curr, curr + 3);
int count = 0;
for (Entry<Reader, Boolean> entry : readers.entrySet()) {
assertTrue(entry.getValue());
while (true) {
try {
DataPoint readPair = entry.getKey().readPair();
assertEquals(2.2 * (count + 1), readPair.getValue(), 0.01);
count++;
} catch (RejectException e) {
break;
}
}
}
assertEquals(3, count);
assertTrue(engine.checkIfExists(dbName, measurementName));
try {
engine.checkIfExists(dbName + "1");
} catch (Exception e) {
}
engine.dropMeasurement(dbName, measurementName);
assertEquals(0, engine.getAllMeasurementsForDb(dbName).size());
engine.disconnect();
}
use of com.srotya.sidewinder.core.storage.DataPoint in project sidewinder by srotya.
the class TestPersistentMeasurement method testLinearizability.
@Test
public void testLinearizability() throws IOException, InterruptedException {
for (int p = 0; p < 100; p++) {
MiscUtils.delete(new File("target/db134/"));
final long t1 = 1497720452566L;
Measurement m = new PersistentMeasurement();
m.configure(conf, engine, DBNAME, "m1", "target/db134/index", "target/db134/data", metadata, bgTaskPool);
ExecutorService es = Executors.newFixedThreadPool(2, new BackgrounThreadFactory("tlinear"));
AtomicBoolean wait = new AtomicBoolean(false);
for (int i = 0; i < 2; i++) {
final int th = i;
es.submit(() -> {
while (!wait.get()) {
try {
Thread.sleep(1);
} catch (InterruptedException e) {
}
}
long t = t1 + th * 3;
for (int j = 0; j < 100; j++) {
try {
TimeSeries ts = m.getOrCreateTimeSeries("vf1", Arrays.asList("t=1", "t=2"), 4096, false, conf);
long timestamp = t + j * 1000;
ts.addDataPoint(TimeUnit.MILLISECONDS, timestamp, j);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
es.shutdown();
wait.set(true);
es.awaitTermination(100, TimeUnit.SECONDS);
TimeSeries ts = m.getOrCreateTimeSeries("vf1", Arrays.asList("t=1", "t=2"), 4096, false, conf);
List<DataPoint> dps = ts.queryDataPoints("vf1", t1 - 120, t1 + 1000_000, null);
assertEquals(200, dps.size());
assertEquals(1, ts.getBucketCount());
m.close();
}
}
Aggregations