use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.
the class TimeSeries method addDataPoints.
public void addDataPoints(TimeUnit unit, List<DataPoint> dps) throws IOException {
Map<Writer, List<DataPoint>> dpMap = new HashMap<>();
for (DataPoint dp : dps) {
Writer writer = getOrCreateSeriesBucket(unit, dp.getTimestamp());
List<DataPoint> dpx;
if (!dpMap.containsKey(writer)) {
dpMap.put(writer, dpx = new ArrayList<>());
} else {
dpx = dpMap.get(writer);
}
dpx.add(dp);
}
for (Entry<Writer, List<DataPoint>> entry : dpMap.entrySet()) {
entry.getKey().write(entry.getValue());
}
}
use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.
the class TimeSeries method collectGarbage.
/**
* Cleans stale series
*
* @throws IOException
*/
public List<Writer> collectGarbage() throws IOException {
List<Writer> gcedBuckets = new ArrayList<>();
logger.finer("Retention buckets:" + retentionBuckets.get());
while (bucketMap.size() > retentionBuckets.get()) {
int oldSize = bucketMap.size();
String key = bucketMap.firstKey();
List<Writer> buckets = bucketMap.remove(key);
for (Writer bucket : buckets) {
// bucket.close();
gcedBuckets.add(bucket);
logger.log(Level.FINEST, "GC," + measurement.getMeasurementName() + ":" + seriesId + " removing bucket:" + key + ": as it passed retention period of:" + retentionBuckets.get() + ":old size:" + oldSize + ":newsize:" + bucketMap.size() + ":");
}
}
if (gcedBuckets.size() > 0) {
logger.fine(() -> "GC," + measurement.getMeasurementName() + " buckets:" + gcedBuckets.size() + " retention size:" + retentionBuckets);
}
return gcedBuckets;
}
use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.
the class TimeSeries method queryPoints.
public List<long[]> queryPoints(String appendFieldValueName, List<String> appendTags, long startTime, long endTime, Predicate valuePredicate) throws IOException {
if (startTime > endTime) {
// swap start and end times if they are off
startTime = startTime ^ endTime;
endTime = endTime ^ startTime;
startTime = startTime ^ endTime;
}
BetweenPredicate timeRangePredicate = new BetweenPredicate(startTime, endTime);
SortedMap<String, List<Writer>> series = correctTimeRangeScan(startTime, endTime);
List<Reader> readers = new ArrayList<>();
for (List<Writer> writers : series.values()) {
for (Writer writer : writers) {
readers.add(getReader(writer, timeRangePredicate, valuePredicate));
}
}
List<long[]> points = new ArrayList<>();
for (Reader reader : readers) {
readerToPoints(points, reader);
}
return points;
}
use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.
the class TimeSeries method queryReader.
/**
* Extract list of readers for the supplied time range and value predicate.
*
* Each {@link DataPoint} has the appendFieldValue and appendTags set in it.
*
* @param appendFieldValueName
* fieldname to append to each datapoint
* @param appendTags
* tags to append to each datapoint
* @param startTime
* time range beginning
* @param endTime
* time range end
* @param valuePredicate
* pushed down filter for values
* @return list of readers
* @throws IOException
*/
public List<Reader> queryReader(String appendFieldValueName, List<Tag> appendTags, long startTime, long endTime, Predicate valuePredicate) throws IOException {
if (startTime > endTime) {
// swap start and end times if they are off
startTime = startTime ^ endTime;
endTime = endTime ^ startTime;
startTime = startTime ^ endTime;
}
List<Reader> readers = new ArrayList<>();
BetweenPredicate timeRangePredicate = new BetweenPredicate(startTime, endTime);
SortedMap<String, List<Writer>> series = correctTimeRangeScan(startTime, endTime);
for (List<Writer> writers : series.values()) {
for (Writer writer : writers) {
readers.add(getReader(writer, timeRangePredicate, valuePredicate));
}
}
return readers;
}
use of com.srotya.sidewinder.core.storage.compression.Writer in project sidewinder by srotya.
the class TimeSeries method getOrCreateSeriesBucket.
public Writer getOrCreateSeriesBucket(TimeUnit unit, long timestamp) throws IOException {
String tsBucket = getTimeBucket(unit, timestamp, timeBucketSize);
List<Writer> list = bucketMap.get(tsBucket);
if (list == null) {
// location
synchronized (bucketMap) {
if ((list = bucketMap.get(tsBucket)) == null) {
list = Collections.synchronizedList(new ArrayList<>());
createNewWriter(timestamp, tsBucket, list);
bucketMap.put(tsBucket, list);
logger.fine(() -> "Creating new time series bucket:" + seriesId + ",measurement:" + measurement.getMeasurementName());
}
}
}
synchronized (list) {
Writer ans = list.get(list.size() - 1);
if (ans.isFull()) {
if ((ans = list.get(list.size() - 1)).isFull()) {
final Writer ansTmp = ans;
logger.fine(() -> "Requesting new writer for:" + seriesId + ",measurement:" + measurement.getMeasurementName() + " bucketcount:" + bucketCount + " pos:" + ansTmp.getPosition());
ans = createNewWriter(timestamp, tsBucket, list);
// #COMPACTHRESHOLD
if (compactionEnabled && list.size() > COMPACTION_THRESHOLD) {
//
// add older bucket to compaction queue
final List<Writer> listTmp = list;
logger.fine(() -> "Adding bucket to compaction set:" + listTmp.size());
compactionCandidateSet.put(tsBucket, list);
}
}
}
return ans;
// Old code used for thread safety checks
// try {
// int idx = list.indexOf(ans);
// if (idx != (list.size() - 1)) {
// System.out.println("\n\nThread safety error\t" + idx + "\t" +
// list.size() +
// "\n\n");
// }
// } catch (Exception e) {
// logger.log(Level.SEVERE, "Create new:" + "\tList:" + list +
// "\tbucket:" +
// tsBucket + "\t" + bucketMap,
// e);
// throw e;
// }
}
}
Aggregations