use of com.srotya.sidewinder.core.storage.compression.Reader in project sidewinder by srotya.
the class TestGorillaCompression method testCompressUncompress.
@Test
public void testCompressUncompress() throws IOException {
ByteBuffer buf = ByteBuffer.allocate(1024);
GorillaWriter writer = new GorillaWriter();
writer.configure(new HashMap<>(), buf, true, 0, false);
long ts = System.currentTimeMillis();
writer.setHeaderTimestamp(ts);
for (int i = 0; i < 100; i++) {
writer.addValue(ts + i * 100, i);
}
writer.makeReadOnly();
Reader reader = writer.getReader();
assertEquals(100, reader.getPairCount());
for (int i = 0; i < 100; i++) {
DataPoint pair = reader.readPair();
assertEquals(ts + i * 100, pair.getTimestamp());
assertEquals(i, pair.getLongValue());
}
}
use of com.srotya.sidewinder.core.storage.compression.Reader in project sidewinder by srotya.
the class TestDiskStorageEngine method testAddAndReaderDataPoints.
@Test
public void testAddAndReaderDataPoints() throws Exception {
DiskStorageEngine engine = new DiskStorageEngine();
File file = new File("target/db8/");
if (file.exists()) {
MiscUtils.delete(file);
}
HashMap<String, String> map = new HashMap<>();
map.put("metadata.dir", "target/db8/mdq");
map.put("index.dir", "target/db8/index");
map.put("data.dir", "target/db8/data");
map.put(StorageEngine.PERSISTENCE_DISK, "true");
engine.configure(map, bgTasks);
long curr = 1497720452566L;
String dbName = "test";
String measurementName = "cpu";
String valueFieldName = "value";
try {
engine.writeDataPoint(MiscUtils.buildDataPoint(dbName, measurementName, valueFieldName, null, curr, 2.2 * 0));
fail("Must reject the above datapoint due to missing tags");
} catch (Exception e) {
}
for (int i = 1; i <= 3; i++) {
engine.writeDataPoint(MiscUtils.buildDataPoint(dbName, measurementName, valueFieldName, Arrays.asList(dbName + "=1"), curr + i, 2.2 * i));
}
assertEquals(1, engine.getAllMeasurementsForDb(dbName).size());
LinkedHashMap<Reader, Boolean> readers = engine.queryReaders(dbName, measurementName, valueFieldName, curr, curr + 3);
int count = 0;
for (Entry<Reader, Boolean> entry : readers.entrySet()) {
assertTrue(entry.getValue());
while (true) {
try {
DataPoint readPair = entry.getKey().readPair();
assertEquals(2.2 * (count + 1), readPair.getValue(), 0.01);
count++;
} catch (RejectException e) {
break;
}
}
}
assertEquals(3, count);
assertTrue(engine.checkIfExists(dbName, measurementName));
try {
engine.checkIfExists(dbName + "1");
} catch (Exception e) {
}
engine.dropMeasurement(dbName, measurementName);
assertEquals(0, engine.getAllMeasurementsForDb(dbName).size());
engine.disconnect();
}
use of com.srotya.sidewinder.core.storage.compression.Reader in project sidewinder by srotya.
the class TestMemStorageEngine method testAddAndReaderDataPoints.
@Test
public void testAddAndReaderDataPoints() throws Exception {
MemStorageEngine engine = new MemStorageEngine();
engine.configure(new HashMap<>(), bgTasks);
long curr = System.currentTimeMillis();
String dbName = "test";
String measurementName = "cpu";
String valueFieldName = "value";
try {
engine.writeDataPoint(MiscUtils.buildDataPoint(dbName, measurementName, valueFieldName, null, curr, 2.2 * 0));
fail("Must reject the above datapoint due to missing tags");
} catch (Exception e) {
}
for (int i = 1; i <= 3; i++) {
engine.writeDataPoint(MiscUtils.buildDataPoint(dbName, measurementName, valueFieldName, Arrays.asList(dbName + "=2"), curr + i, 2.2 * i));
}
assertEquals(1, engine.getAllMeasurementsForDb(dbName).size());
LinkedHashMap<Reader, Boolean> readers = engine.queryReaders(dbName, measurementName, valueFieldName, curr, curr + 3);
int count = 0;
for (Entry<Reader, Boolean> entry : readers.entrySet()) {
assertTrue(entry.getValue());
while (true) {
try {
DataPoint readPair = entry.getKey().readPair();
assertEquals(2.2 * (count + 1), readPair.getValue(), 0.01);
count++;
} catch (RejectException e) {
break;
}
}
}
assertEquals(3, count);
assertTrue(engine.checkIfExists(dbName, measurementName));
try {
engine.checkIfExists(dbName + "1");
} catch (Exception e) {
}
engine.dropMeasurement(dbName, measurementName);
assertEquals(0, engine.getAllMeasurementsForDb(dbName).size());
}
use of com.srotya.sidewinder.core.storage.compression.Reader in project sidewinder by srotya.
the class TestTimeSeries method testCompactionGorilla.
@Test
public void testCompactionGorilla() throws IOException {
DBMetadata metadata = new DBMetadata(28);
MockMeasurement measurement = new MockMeasurement(1024);
HashMap<String, String> conf = new HashMap<>();
conf.put("default.bucket.size", "409600");
conf.put("compaction.enabled", "true");
conf.put("use.query.pool", "false");
conf.put("compaction.ratio", "1.1");
final TimeSeries series = new TimeSeries(measurement, "byzantine", "gorilla", "asdasasd", 409600, metadata, true, conf);
final long curr = 1497720652566L;
String valueFieldName = "value";
for (int i = 1; i <= 10000; i++) {
series.addDataPoint(TimeUnit.MILLISECONDS, curr + i * 1000, i * 1.1);
}
SortedMap<String, List<Writer>> bucketRawMap = series.getBucketRawMap();
assertEquals(1, bucketRawMap.size());
int size = bucketRawMap.values().iterator().next().size();
assertTrue(series.getCompactionSet().size() < size);
assertTrue(size > 2);
series.compact();
List<DataPoint> dataPoints = series.queryDataPoints(valueFieldName, curr - 1000, curr + 10000 * 1000 + 1, null);
bucketRawMap = series.getBucketRawMap();
assertEquals(2, bucketRawMap.values().iterator().next().size());
int count = 0;
for (List<Writer> list : bucketRawMap.values()) {
for (Writer writer : list) {
Reader reader = writer.getReader();
count += reader.getPairCount();
}
}
assertEquals(10000, count);
assertEquals(10000, dataPoints.size());
for (int i = 1; i <= 10000; i++) {
DataPoint dp = dataPoints.get(i - 1);
assertEquals("Bad ts:" + i, curr + i * 1000, dp.getTimestamp());
assertEquals(dp.getValue(), i * 1.1, 0.001);
}
}
use of com.srotya.sidewinder.core.storage.compression.Reader in project sidewinder by srotya.
the class TestTimeSeries method testAddAndReadDataPoints.
@Test
public void testAddAndReadDataPoints() throws IOException {
Measurement measurement = new MockMeasurement(100);
DBMetadata metadata = new DBMetadata(24);
TimeSeries series = new TimeSeries(measurement, compression, compaction, "43232", 4096, metadata, true, conf);
long curr = System.currentTimeMillis();
for (int i = 1; i <= 3; i++) {
series.addDataPoint(TimeUnit.MILLISECONDS, curr + i, 2.2 * i);
}
assertEquals(1, series.getBucketMap().size());
Writer writer = series.getBucketMap().values().iterator().next();
assertEquals(3, writer.getCount());
Reader reader = TimeSeries.getReader(writer, null, null);
for (int i = 0; i < 3; i++) {
reader.readPair();
}
try {
reader.readPair();
fail("The read shouldn't succeed");
} catch (IOException e) {
}
List<DataPoint> values = series.queryDataPoints("value", curr + 3, curr, null);
assertEquals(3, values.size());
for (int i = 1; i <= 3; i++) {
DataPoint dp = values.get(i - 1);
assertEquals("Value mismatch:" + dp.getValue() + "\t" + (2.2 * i) + "\t" + i, dp.getValue(), 2.2 * i, 0.01);
}
List<Reader> queryReaders = series.queryReader("value", Arrays.asList(), curr + 3, curr, null);
assertEquals(1, queryReaders.size());
reader = queryReaders.get(0);
for (int i = 1; i <= 3; i++) {
DataPoint dp = reader.readPair();
assertEquals("Value mismatch:" + dp.getValue() + "\t" + (2.2 * i) + "\t" + i, dp.getValue(), 2.2 * i, 0.01);
}
values = series.queryDataPoints("value", curr - 1, curr - 1, null);
assertEquals(0, values.size());
}
Aggregations