use of com.srotya.sidewinder.core.storage.LinkedByteString in project sidewinder by srotya.
the class DiskMalloc method createNewBuffer.
@Override
public BufferObject createNewBuffer(LinkedByteString fieldId, Integer tsBucket, int newSize) throws IOException {
logger.fine(() -> "Seriesid:" + fieldId + " requesting buffer of size:" + newSize);
if (rafActiveFile == null) {
lock.lock();
if (rafActiveFile == null) {
filename = new ByteString(dataDirectory + "/data-" + String.format("%012d", fcnt) + ".dat");
rafActiveFile = new RandomAccessFile(filename.toString(), "rwd");
offset = 0;
logger.info("Creating new datafile for measurement:" + filename);
memoryMappedBuffer = rafActiveFile.getChannel().map(MapMode.READ_WRITE, 0, fileMapIncrement);
fcnt++;
if (enableMetricsCapture) {
metricsFileRotation.inc();
}
}
lock.unlock();
}
lock.lock();
try {
if (curr + newSize < 0 || curr + newSize > memoryMappedBuffer.remaining() + 1) {
curr = 0;
itr++;
offset = (((long) (fileMapIncrement)) * itr);
// requested
if (offset >= maxFileSize) {
itr = 0;
logger.info("Rotating datafile for measurement:" + measurementName + " closing active file:" + filename);
memoryMappedBuffer.force();
rafActiveFile.close();
rafActiveFile = null;
return createNewBuffer(fieldId, tsBucket, newSize);
}
// used for GC testing and debugging
if (oldBufferReferences != null) {
oldBufferReferences.put(filename, new WeakReference<MappedByteBuffer>(memoryMappedBuffer));
}
memoryMappedBuffer = rafActiveFile.getChannel().map(MapMode.READ_WRITE, offset, fileMapIncrement);
logger.fine("Buffer expansion:" + offset + "\t\t" + curr);
if (enableMetricsCapture) {
metricsBufferResize.inc();
metricsBufferSize.inc(fileMapIncrement);
}
}
LinkedByteString ptrKey = appendBufferPointersToDisk(fieldId, filename, curr, offset, newSize, tsBucket);
ByteBuffer buf = memoryMappedBuffer.slice();
buf.limit(newSize);
curr = curr + newSize;
memoryMappedBuffer.position(curr);
logger.fine(() -> "Position:" + buf.position() + "\t" + buf.limit() + "\t" + buf.capacity());
if (enableMetricsCapture) {
metricsBufferCounter.inc();
}
return new BufferObject(ptrKey, buf);
} finally {
lock.unlock();
}
}
use of com.srotya.sidewinder.core.storage.LinkedByteString in project sidewinder by srotya.
the class TestDownSamplingIFunction method testBasicSumDownSampling.
@Test
public void testBasicSumDownSampling() throws IOException {
Field tField = new TimeField(measurement, new LinkedByteString().concat(new ByteString("time")), 121213, new HashMap<>());
long ts = 1546755994280L;
for (int i = 0; i < 100; i++) {
tField.addDataPoint(measurement, ts + i * 1000);
}
Field vField = new ValueField(measurement, new LinkedByteString().concat(new ByteString("field1")), 121213, new HashMap<>());
for (int i = 0; i < 100; i++) {
vField.addDataPoint(measurement, i);
}
DataPointIterator itr = new DataPointIterator(tField.queryReader(null, new NoLock()), vField.queryReader(null, new NoLock()));
DownsampleFunction f = new DownsampleFunction(itr, 5, TimeUnit.SECONDS, ((x, y) -> (x + y)));
int c = 0;
while (f.hasNext()) {
DataPoint next = f.next();
if (c < 20) {
assertEquals(ts + c * 1000 * 5, next.getTimestamp());
}
c++;
}
assertEquals(21, c);
itr = new DataPointIterator(tField.queryReader(null, new NoLock()), vField.queryReader(null, new NoLock()));
f = new DownsampleFunction(itr, 10, TimeUnit.SECONDS, ((x, y) -> (x + y) / 2));
c = 0;
while (f.hasNext()) {
DataPoint next = f.next();
if (c < 10) {
assertEquals(ts + c * 1000 * 10, next.getTimestamp());
}
c++;
}
assertEquals(10, c, 1);
}
use of com.srotya.sidewinder.core.storage.LinkedByteString in project sidewinder by srotya.
the class MemMalloc method createNewBuffer.
public BufferObject createNewBuffer(LinkedByteString seriesId, Integer tsBucket, int newSize) throws IOException {
ByteBuffer allocateDirect = ByteBuffer.allocateDirect(newSize);
LinkedByteString str = new LinkedByteString().concat(seriesId);
str.concat(STR2).concat(String.valueOf(tsBucket));
return new BufferObject(str, allocateDirect);
}
use of com.srotya.sidewinder.core.storage.LinkedByteString in project sidewinder by srotya.
the class TestBasicSingleFunctions method before.
@Before
public void before() throws IOException {
measurement = new MockMeasurement(32768, 100);
TimeField.compressionClass = CompressionFactory.getTimeClassByName("byzantine");
Field tField = new TimeField(measurement, new LinkedByteString().concat(new ByteString("time")), 121213, new HashMap<>());
long ts = 1546755991280L;
for (int i = 0; i < 200; i++) {
tField.addDataPoint(measurement, ts + i * 1000);
}
Field vField = new ValueField(measurement, new LinkedByteString().concat(new ByteString("field1")), 121213, new HashMap<>());
for (int i = 0; i < 100; i++) {
vField.addDataPoint(measurement, i * 1L);
}
for (int i = 100; i > 0; i--) {
vField.addDataPoint(measurement, i * 1L);
}
itr = new DataPointIterator(tField.queryReader(null, new NoLock()), vField.queryReader(null, new NoLock()));
}
use of com.srotya.sidewinder.core.storage.LinkedByteString in project sidewinder by srotya.
the class DiskMalloc method sliceMappedBuffersForBuckets.
private void sliceMappedBuffersForBuckets(Map<String, MappedByteBuffer> bufferMap, Map<ByteString, List<Entry<Integer, BufferObject>>> seriesBuffers) throws IOException {
ptrCounter = 0;
initializePtrFile();
for (int i = 0; i < ptrCounter; i++) {
String line = MiscUtils.getStringFromBuffer(ptrBuf).trim();
String[] splits = line.split("\\" + SEPARATOR);
logger.finer("Reading line:" + Arrays.toString(splits));
String fileName = splits[1];
int positionOffset = Integer.parseInt(splits[3]);
String seriesIdStr = splits[0];
int pointer = Integer.parseInt(splits[2]);
int size = Integer.parseInt(splits[4]);
MappedByteBuffer buf = bufferMap.get(fileName);
int position = positionOffset + pointer;
buf.position(position);
String tsBucket = splits[5];
ByteBuffer slice = buf.slice();
slice.limit(size);
ByteString seriesId = new ByteString(seriesIdStr);
LinkedByteString bsLine = new LinkedByteString(BUF_PARTS_LENGTH);
bsLine.concat(seriesId).concat(SEPARATOR).concat(cache.get(new ByteString(splits[1]))).concat(SEPARATOR).concat(new ByteString(splits[2])).concat(SEPARATOR).concat(new ByteString(splits[3])).concat(SEPARATOR).concat(new ByteString(splits[4]));
List<Entry<Integer, BufferObject>> list = seriesBuffers.get(seriesId);
if (list == null) {
list = new ArrayList<>();
seriesBuffers.put(seriesId, list);
}
list.add(new AbstractMap.SimpleEntry<>(Integer.parseInt(tsBucket, 16), new BufferObject(bsLine, slice)));
}
}
Aggregations