use of com.srotya.sidewinder.core.storage.BufferObject in project sidewinder by srotya.
the class DiskMalloc method sliceMappedBuffersForBuckets.
private void sliceMappedBuffersForBuckets(Map<String, MappedByteBuffer> bufferMap, Map<String, List<Entry<String, BufferObject>>> seriesBuffers) throws IOException {
ptrCounter = 0;
initializePtrFile();
for (int i = 0; i < ptrCounter; i++) {
String line = MiscUtils.getStringFromBuffer(ptrBuf);
String[] splits = line.split("\\" + SEPARATOR);
logger.finer("reading line:" + Arrays.toString(splits));
String fileName = splits[1];
int positionOffset = Integer.parseInt(splits[3]);
String seriesId = splits[0];
int pointer = Integer.parseInt(splits[2]);
int size = Integer.parseInt(splits[4]);
MappedByteBuffer buf = bufferMap.get(fileName);
int position = positionOffset + pointer;
buf.position(position);
String tsBucket = MiscUtils.getStringFromBuffer(buf);
ByteBuffer slice = buf.slice();
slice.limit(size);
List<Entry<String, BufferObject>> list = seriesBuffers.get(seriesId);
if (list == null) {
list = new ArrayList<>();
seriesBuffers.put(seriesId, list);
}
list.add(new AbstractMap.SimpleEntry<>(tsBucket, new BufferObject(line, slice)));
}
}
use of com.srotya.sidewinder.core.storage.BufferObject in project sidewinder by srotya.
the class DiskMalloc method createNewBuffer.
@Override
public BufferObject createNewBuffer(String seriesId, String tsBucket, int newSize) throws IOException {
logger.fine("Seriesid:" + seriesId + " requesting buffer of size:" + newSize);
if (rafActiveFile == null) {
lock.lock();
if (rafActiveFile == null) {
filename = dataDirectory + "/data-" + String.format("%012d", fcnt) + ".dat";
rafActiveFile = new RandomAccessFile(filename, "rwd");
offset = 0;
logger.info("Creating new datafile for measurement:" + filename);
memoryMappedBuffer = rafActiveFile.getChannel().map(MapMode.READ_WRITE, 0, fileMapIncrement);
fcnt++;
if (enableMetricsCapture) {
metricsFileRotation.inc();
}
}
lock.unlock();
}
lock.lock();
try {
if (curr + newSize < 0 || curr + newSize > memoryMappedBuffer.remaining() + 1) {
curr = 0;
itr++;
offset = (((long) (fileMapIncrement)) * itr);
// requested
if (offset >= maxFileSize) {
itr = 0;
logger.info("Rotating datafile for measurement:" + measurementName + " closing active file:" + filename);
rafActiveFile.close();
rafActiveFile = null;
return createNewBuffer(seriesId, tsBucket, newSize);
}
// used for GC testing and debugging
if (oldBufferReferences != null) {
oldBufferReferences.put(filename, new WeakReference<MappedByteBuffer>(memoryMappedBuffer));
}
memoryMappedBuffer = rafActiveFile.getChannel().map(MapMode.READ_WRITE, offset, fileMapIncrement);
logger.fine("Buffer expansion:" + offset + "\t\t" + curr);
if (enableMetricsCapture) {
metricsBufferResize.inc();
metricsBufferSize.inc(fileMapIncrement);
}
}
String ptrKey = appendBufferPointersToDisk(seriesId, filename, curr, offset, newSize);
MiscUtils.writeStringToBuffer(tsBucket, memoryMappedBuffer);
ByteBuffer buf = memoryMappedBuffer.slice();
buf.limit(newSize);
curr = curr + newSize;
memoryMappedBuffer.position(curr);
logger.fine("Position:" + buf.position() + "\t" + buf.limit() + "\t" + buf.capacity());
if (enableMetricsCapture) {
metricsBufferCounter.inc();
}
return new BufferObject(ptrKey, buf);
} finally {
lock.unlock();
}
}
use of com.srotya.sidewinder.core.storage.BufferObject in project sidewinder by srotya.
the class DiskMalloc method seriesBufferMap.
@Override
public Map<ByteString, List<Entry<Integer, BufferObject>>> seriesBufferMap() throws FileNotFoundException, IOException {
Map<String, MappedByteBuffer> bufferMap = new ConcurrentHashMap<>();
File[] listFiles = new File(dataDirectory).listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.endsWith(".dat");
}
});
Arrays.sort(listFiles, new Comparator<File>() {
@Override
public int compare(File o1, File o2) {
return o1.getName().compareTo(o2.getName());
}
});
for (File dataFile : listFiles) {
try {
RandomAccessFile raf = new RandomAccessFile(dataFile, "rw");
MappedByteBuffer map = raf.getChannel().map(MapMode.READ_WRITE, 0, dataFile.length());
bufferMap.put(dataFile.getName(), map);
logger.info("Recovering data file:" + dataDirectory + "/" + dataFile.getName());
raf.close();
} catch (Exception e) {
logger.log(Level.SEVERE, "Failed to recover data files for measurement:" + measurementName, e);
}
}
// files and this will prevent them from being overwritten
if (listFiles.length > 0) {
fcnt = Integer.parseInt(listFiles[listFiles.length - 1].getName().replace("data-", "").replace(".dat", "")) + 1;
}
Map<ByteString, List<Entry<Integer, BufferObject>>> seriesBuffers = new HashMap<>();
sliceMappedBuffersForBuckets(bufferMap, seriesBuffers);
return seriesBuffers;
}
use of com.srotya.sidewinder.core.storage.BufferObject in project sidewinder by srotya.
the class DiskMalloc method createNewBuffer.
@Override
public BufferObject createNewBuffer(LinkedByteString fieldId, Integer tsBucket, int newSize) throws IOException {
logger.fine(() -> "Seriesid:" + fieldId + " requesting buffer of size:" + newSize);
if (rafActiveFile == null) {
lock.lock();
if (rafActiveFile == null) {
filename = new ByteString(dataDirectory + "/data-" + String.format("%012d", fcnt) + ".dat");
rafActiveFile = new RandomAccessFile(filename.toString(), "rwd");
offset = 0;
logger.info("Creating new datafile for measurement:" + filename);
memoryMappedBuffer = rafActiveFile.getChannel().map(MapMode.READ_WRITE, 0, fileMapIncrement);
fcnt++;
if (enableMetricsCapture) {
metricsFileRotation.inc();
}
}
lock.unlock();
}
lock.lock();
try {
if (curr + newSize < 0 || curr + newSize > memoryMappedBuffer.remaining() + 1) {
curr = 0;
itr++;
offset = (((long) (fileMapIncrement)) * itr);
// requested
if (offset >= maxFileSize) {
itr = 0;
logger.info("Rotating datafile for measurement:" + measurementName + " closing active file:" + filename);
memoryMappedBuffer.force();
rafActiveFile.close();
rafActiveFile = null;
return createNewBuffer(fieldId, tsBucket, newSize);
}
// used for GC testing and debugging
if (oldBufferReferences != null) {
oldBufferReferences.put(filename, new WeakReference<MappedByteBuffer>(memoryMappedBuffer));
}
memoryMappedBuffer = rafActiveFile.getChannel().map(MapMode.READ_WRITE, offset, fileMapIncrement);
logger.fine("Buffer expansion:" + offset + "\t\t" + curr);
if (enableMetricsCapture) {
metricsBufferResize.inc();
metricsBufferSize.inc(fileMapIncrement);
}
}
LinkedByteString ptrKey = appendBufferPointersToDisk(fieldId, filename, curr, offset, newSize, tsBucket);
ByteBuffer buf = memoryMappedBuffer.slice();
buf.limit(newSize);
curr = curr + newSize;
memoryMappedBuffer.position(curr);
logger.fine(() -> "Position:" + buf.position() + "\t" + buf.limit() + "\t" + buf.capacity());
if (enableMetricsCapture) {
metricsBufferCounter.inc();
}
return new BufferObject(ptrKey, buf);
} finally {
lock.unlock();
}
}
use of com.srotya.sidewinder.core.storage.BufferObject in project sidewinder by srotya.
the class PersistentMeasurement method loadTimeseriesInMeasurements.
@Override
public void loadTimeseriesInMeasurements() throws IOException {
String fieldFilePath = getFieldMetadataPath();
File file = new File(fieldFilePath);
if (!file.exists()) {
logger.warning("Field file missing for measurement:" + measurementName);
return;
} else {
logger.fine("Field file exists:" + file.getAbsolutePath());
}
List<String> fieldEntries = MiscUtils.readAllLines(file);
loadFieldList(fieldEntries);
String mdFilePath = getMetadataPath();
file = new File(mdFilePath);
if (!file.exists()) {
logger.warning("Metadata file missing for measurement:" + measurementName);
return;
} else {
logger.fine("Metadata file exists:" + file.getAbsolutePath());
}
List<String> seriesEntries = MiscUtils.readAllLines(file);
try {
loadSeriesEntries(seriesEntries);
} catch (Exception e) {
e.printStackTrace();
throw new IOException(e);
}
ByteStringCache localCache = ByteStringCache.instance();
Map<ByteString, List<Entry<Integer, BufferObject>>> seriesBuffers = malloc.seriesBufferMap();
for (Entry<ByteString, List<Entry<Integer, BufferObject>>> entry : seriesBuffers.entrySet()) {
ByteString[] split = entry.getKey().split(SERIESID_SEPARATOR);
ByteString seriesId = localCache.get(split[0]);
Integer seriesIndex = seriesMap.get(seriesId);
Series series = seriesList.get(seriesIndex);
if (series.getSeriesId() != seriesId) {
seriesMap.put(seriesId, seriesIndex);
series.setSeriesId(seriesId);
}
List<Entry<Integer, BufferObject>> list = entry.getValue();
if (list != null) {
try {
String fieldName = split[1].toString();
series.loadBuffers(this, fieldName, list, this.getConf());
} catch (Exception e) {
logger.log(Level.SEVERE, "Failed to load bucket map for:" + entry.getKey() + ":" + measurementName, e);
}
}
}
if (compactOnStart) {
compact();
}
logger.info("Loaded measurement:" + measurementName);
}
Aggregations