use of com.srotya.sidewinder.core.storage.mem.MemMalloc in project sidewinder by srotya.
the class TestTimeSeries method testReplaceSeries.
// @Test
// public void testCompactionGzip() throws IOException {
// DBMetadata metadata = new DBMetadata(28);
// MockMeasurement measurement = new MockMeasurement(1024);
// HashMap<String, String> conf = new HashMap<>();
// conf.put("default.bucket.size", "409600");
// conf.put("compaction.enabled", "true");
// conf.put("use.query.pool", "false");
// conf.put("compaction.ratio", "1.1");
// conf.put("zip.block.size", "8");
//
// final TimeSeries series = new TimeSeries(measurement, "byzantine", "bzip",
// "asdasasd", 409600, metadata, true,
// conf);
// final long curr = 1497720652566L;
//
// String valueFieldName = "value";
//
// for (int i = 1; i <= 10000; i++) {
// series.addDataPoint(TimeUnit.MILLISECONDS, curr + i * 1000, i * 1.1);
// }
//
// SortedMap<String, List<Writer>> bucketRawMap = series.getBucketRawMap();
// assertEquals(1, bucketRawMap.size());
// int size = bucketRawMap.values().iterator().next().size();
// assertTrue(series.getCompactionSet().size() < size);
// assertTrue(size > 2);
// series.compact();
// List<DataPoint> dataPoints = series.queryDataPoints(valueFieldName, curr -
// 1000, curr + 10000 * 1000 + 1, null);
// bucketRawMap = series.getBucketRawMap();
// assertEquals(2, bucketRawMap.values().iterator().next().size());
// int count = 0;
// for (List<Writer> list : bucketRawMap.values()) {
// for (Writer writer : list) {
// Reader reader = writer.getReader();
// count += reader.getPairCount();
// }
// }
// assertEquals(10000, count);
// assertEquals(10000, dataPoints.size());
// for (int i = 1; i <= 10000; i++) {
// DataPoint dp = dataPoints.get(i - 1);
// assertEquals("Bad ts:" + i, curr + i * 1000, dp.getTimestamp());
// assertEquals(dp.getValue(), i * 1.1, 0.001);
// }
// }
@Test
public void testReplaceSeries() throws IOException {
DBMetadata metadata = new DBMetadata(28);
MockMeasurement measurement = new MockMeasurement(1024);
HashMap<String, String> conf = new HashMap<>();
conf.put("default.bucket.size", "409600");
conf.put("compaction.enabled", "true");
conf.put("use.query.pool", "false");
final TimeSeries series = new TimeSeries(measurement, "byzantine", "gorilla", "asdasasd", 409600, metadata, true, conf);
final long curr = 1497720652566L;
String valueFieldName = "value";
for (int i = 1; i <= 10000; i++) {
series.addDataPoint(TimeUnit.MILLISECONDS, curr + i * 1000, i * 1.1);
}
SortedMap<String, List<Writer>> bucketRawMap = series.getBucketRawMap();
int size = bucketRawMap.values().iterator().next().size();
assertTrue(series.getCompactionSet().size() < size);
assertTrue(size > 2);
List<Writer> compact = series.compact();
assertTrue(compact.size() > 0);
assertTrue(bucketRawMap.values().iterator().next().size() == 2);
List<Writer> next = bucketRawMap.values().iterator().next();
Writer writer = next.get(0);
ByteBuffer buf = writer.getRawBytes();
buf.rewind();
int limit = buf.limit();
ByteBuffer allocate = ByteBuffer.allocate(limit);
allocate.put(buf);
allocate.rewind();
byte[] array = allocate.array();
assertEquals(buf.limit(), array.length);
MemMalloc allocator = measurement.getAllocator();
List<Entry<Long, byte[]>> list = new ArrayList<>();
list.add(new AbstractMap.SimpleEntry<Long, byte[]>(writer.getHeaderTimestamp(), array));
try {
series.replaceFirstBuckets(bucketRawMap.firstKey(), list);
} catch (InstantiationException | IllegalAccessException e) {
e.printStackTrace();
fail(e.getMessage());
}
assertEquals(1, allocator.getCleanupCallback().size());
List<DataPoint> dataPoints = series.queryDataPoints(valueFieldName, curr - 1000, curr + 10000 * 1000 + 1, null);
for (int i = 1; i <= 10000; i++) {
DataPoint dp = dataPoints.get(i - 1);
assertEquals("Bad ts:" + i, curr + i * 1000, dp.getTimestamp());
assertEquals(dp.getValue(), i * 1.1, 0.001);
}
}
Aggregations