use of com.srotya.sidewinder.core.storage.DataPoint in project sidewinder by srotya.
the class TestWindowedFunctions method testIntegralFunction.
@Test
public void testIntegralFunction() throws Exception {
double[] values = { 1.1, 2.2, 3.3, 4.4 };
List<DataPoint> dps = new ArrayList<>();
long ts = 1486617103629L;
for (int i = 0; i < values.length; i++) {
double d = values[i];
ts = ts + (30_000);
dps.add(MiscUtils.buildDataPoint(ts, d));
}
ReducingWindowedAggregator rwa = new IntegralFunction();
rwa.init(new Object[] { 70, "smean" });
Series series = new Series();
series.setDataPoints(dps);
series.setFp(true);
List<DataPoint> result = rwa.apply(series).getDataPoints();
assertEquals(2, result.size());
assertEquals(1.1, result.get(0).getValue(), 0);
assertEquals(9.9, result.get(1).getValue(), 0);
}
use of com.srotya.sidewinder.core.storage.DataPoint in project sidewinder by srotya.
the class GrafanaUtils method queryAndGetData.
public static void queryAndGetData(StorageEngine engine, String dbName, long startTs, long endTs, List<Target> output, TargetSeries targetSeriesEntry) throws IOException {
List<Series> points;
try {
points = engine.queryDataPoints(dbName, targetSeriesEntry.getMeasurementName(), targetSeriesEntry.getFieldName(), startTs, endTs, targetSeriesEntry.getTagFilter(), null, targetSeriesEntry.getAggregationFunction());
} catch (ItemNotFoundException e) {
throw new NotFoundException(e.getMessage());
} catch (Exception e) {
e.printStackTrace();
throw new BadRequestException(e.getMessage());
}
if (points != null) {
for (Series entry : points) {
Target tar = new Target(entry.toString());
List<DataPoint> dps = entry.getDataPoints();
if (dps != null) {
for (DataPoint point : dps) {
if (!entry.isFp()) {
tar.getDatapoints().add(new Number[] { point.getLongValue(), point.getTimestamp() });
} else {
tar.getDatapoints().add(new Number[] { point.getValue(), point.getTimestamp() });
}
}
}
output.add(tar);
tar.sort();
}
}
}
use of com.srotya.sidewinder.core.storage.DataPoint in project sidewinder by srotya.
the class MultiSeriesFunction method apply.
@Override
public List<Series> apply(List<Series> t) {
List<Series> output = new ArrayList<>();
boolean fp = t.get(0).isFp();
List<List<DataPoint>> intermediate = new ArrayList<>();
int size = t.get(0).getDataPoints().size();
for (int i = 0; i < t.size(); i++) {
Series ts = t.get(i);
if (size != ts.getDataPoints().size()) {
throw new IllegalArgumentException("Non-uniform series length");
}
intermediate.add(ts.getDataPoints());
}
List<DataPoint> compute = compute(intermediate, fp);
Series series = new Series(compute);
series.setFp(fp);
series.setMeasurementName(t.get(0).getMeasurementName());
series.setValueFieldName(name());
series.setTags(Arrays.asList(new Tag("multiseries", "true")));
output.add(series);
return output;
}
use of com.srotya.sidewinder.core.storage.DataPoint in project sidewinder by srotya.
the class ReduceFunction method apply.
@Override
public Series apply(Series dataPoints) {
Series output = new Series(dataPoints.getMeasurementName(), dataPoints.getValueFieldName(), dataPoints.getTags());
output.setFp(dataPoints.isFp());
DataPoint single = new DataPoint();
single.setTimestamp(dataPoints.getDataPoints().get(0).getTimestamp());
aggregateToSingle(dataPoints.getDataPoints(), single, dataPoints.isFp());
output.setDataPoints(Arrays.asList(single));
return output;
}
use of com.srotya.sidewinder.core.storage.DataPoint in project sidewinder by srotya.
the class TestByzantineReadWrite method testWriteRead.
@Test
public void testWriteRead() throws IOException {
ByteBuffer buf = ByteBuffer.allocateDirect(1024 * 100);
Writer writer = new ByzantineWriter();
writer.configure(new HashMap<>(), buf, true, startOffset, false);
long ts = System.currentTimeMillis();
writer.setHeaderTimestamp(ts);
int LIMIT = 10000;
for (int i = 0; i < LIMIT; i++) {
writer.addValue(ts + i * 1000, i);
}
System.out.println("Compression Ratio:" + writer.getCompressionRatio());
Reader reader = writer.getReader();
for (int i = 0; i < LIMIT; i++) {
DataPoint dp = reader.readPair();
assertEquals(ts + i * 1000, dp.getTimestamp());
}
buf.rewind();
writer = new ByzantineWriter();
writer.configure(new HashMap<>(), buf, true, startOffset, true);
ts = System.currentTimeMillis();
writer.setHeaderTimestamp(ts);
for (int i = 0; i < LIMIT; i++) {
writer.addValue(ts + i * 1000, i * 1.1);
}
reader = writer.getReader();
for (int i = 0; i < LIMIT; i++) {
DataPoint dp = reader.readPair();
assertEquals(ts + i * 1000, dp.getTimestamp());
assertEquals(i * 1.1, dp.getValue(), startOffset);
}
buf.rewind();
writer = new ByzantineWriter();
writer.configure(new HashMap<>(), buf, true, startOffset, false);
ts = System.currentTimeMillis();
writer.setHeaderTimestamp(ts);
for (int i = 0; i < LIMIT; i++) {
DataPoint dp = MiscUtils.buildDataPoint(ts + i * 1000, i);
writer.write(dp);
}
reader = writer.getReader();
assertEquals(LIMIT, reader.getPairCount());
for (int i = 0; i < LIMIT; i++) {
DataPoint dp = reader.readPair();
assertEquals(ts + i * 1000, dp.getTimestamp());
}
buf.rewind();
writer = new ByzantineWriter();
writer.configure(new HashMap<>(), buf, true, startOffset, true);
ts = System.currentTimeMillis();
writer.setHeaderTimestamp(ts);
List<DataPoint> dps = new ArrayList<>();
for (int i = 0; i < LIMIT; i++) {
DataPoint dp = MiscUtils.buildDataPoint(ts + i * 1000, i);
dps.add(dp);
}
writer.write(dps);
reader = writer.getReader();
assertEquals(LIMIT, reader.getPairCount());
for (int i = 0; i < LIMIT; i++) {
DataPoint dp = reader.readPair();
assertEquals(ts + i * 1000, dp.getTimestamp());
}
}
Aggregations