use of io.cdap.cdap.api.dataset.lib.cube.TimeValue in project cdap by caskdata.
the class LocalMRJobInfoFetcher method getAggregates.
private void getAggregates(Map<String, String> tags, Map<String, String> metricsToCounters, Map<String, Long> result) throws IOException {
Collection<MetricTimeSeries> query = metricsSystemClient.query(tags, metricsToCounters.keySet());
// initialize elements to zero
for (String counterName : metricsToCounters.values()) {
result.put(counterName, 0L);
}
for (MetricTimeSeries metricTimeSeries : query) {
List<TimeValue> timeValues = metricTimeSeries.getTimeValues();
TimeValue timeValue = Iterables.getOnlyElement(timeValues);
result.put(metricsToCounters.get(metricTimeSeries.getMetricName()), timeValue.getValue());
}
}
use of io.cdap.cdap.api.dataset.lib.cube.TimeValue in project cdap by caskdata.
the class LocalMRJobInfoFetcher method queryGroupedAggregates.
// queries MetricStore for one metric across all tasks of a certain TaskType, using GroupBy InstanceId
private void queryGroupedAggregates(Map<String, String> tags, Table<String, String, Long> allTaskMetrics, Map<String, String> metricsToCounters) throws IOException {
Collection<MetricTimeSeries> query = metricsSystemClient.query(tags, metricsToCounters.keySet(), Collections.singleton(Constants.Metrics.Tag.INSTANCE_ID));
for (MetricTimeSeries metricTimeSeries : query) {
List<TimeValue> timeValues = metricTimeSeries.getTimeValues();
TimeValue timeValue = Iterables.getOnlyElement(timeValues);
String taskId = metricTimeSeries.getTagValues().get(Constants.Metrics.Tag.INSTANCE_ID);
allTaskMetrics.put(taskId, metricsToCounters.get(metricTimeSeries.getMetricName()), timeValue.getValue());
}
}
use of io.cdap.cdap.api.dataset.lib.cube.TimeValue in project cdap by caskdata.
the class PreviewDataPipelineTest method getTotalMetric.
private long getTotalMetric(Map<String, String> tags, String metricName, PreviewManager previewManager) {
MetricDataQuery query = new MetricDataQuery(0, 0, Integer.MAX_VALUE, metricName, AggregationFunction.SUM, tags, new ArrayList<>());
Collection<MetricTimeSeries> result = previewManager.getMetricsQueryHelper().getMetricStore().query(query);
if (result.isEmpty()) {
return 0;
}
List<TimeValue> timeValues = result.iterator().next().getTimeValues();
if (timeValues.isEmpty()) {
return 0;
}
return timeValues.get(0).getValue();
}
use of io.cdap.cdap.api.dataset.lib.cube.TimeValue in project cdap by caskdata.
the class AbstractCubeTest method testInterpolate.
@Test
public void testInterpolate() throws Exception {
Aggregation agg1 = new DefaultAggregation(ImmutableList.of("dim1", "dim2", "dim3"), ImmutableList.of("dim1", "dim2", "dim3"));
int resolution = 1;
Cube cube = getCube("myInterpolatedCube", new int[] { resolution }, ImmutableMap.of("agg1", agg1));
// test step interpolation
long startTs = 1;
long endTs = 10;
writeInc(cube, "metric1", startTs, 5, "1", "1", "1");
writeInc(cube, "metric1", endTs, 3, "1", "1", "1");
List<TimeValue> expectedTimeValues = Lists.newArrayList();
for (long i = startTs; i < endTs; i++) {
expectedTimeValues.add(new TimeValue(i, 5));
}
expectedTimeValues.add(new TimeValue(endTs, 3));
verifyCountQuery(cube, startTs, endTs, resolution, "metric1", AggregationFunction.SUM, ImmutableMap.of("dim1", "1", "dim2", "1", "dim3", "1"), new ArrayList<String>(), ImmutableList.of(new TimeSeries("metric1", new HashMap<String, String>(), expectedTimeValues)), new Interpolators.Step());
Map<String, String> deleteTags = new LinkedHashMap<>();
deleteTags.put("dim1", "1");
deleteTags.put("dim2", "1");
deleteTags.put("dim3", "1");
Predicate<List<String>> predicate = aggregates -> Collections.indexOfSubList(aggregates, new ArrayList<>(deleteTags.keySet())) == 0;
CubeDeleteQuery query = new CubeDeleteQuery(startTs, endTs, resolution, deleteTags, Collections.singletonList("metric1"), predicate);
cube.delete(query);
// test small-slope linear interpolation
startTs = 1;
endTs = 5;
writeInc(cube, "metric1", startTs, 5, "1", "1", "1");
writeInc(cube, "metric1", endTs, 3, "1", "1", "1");
verifyCountQuery(cube, startTs, endTs, resolution, "metric1", AggregationFunction.SUM, ImmutableMap.of("dim1", "1", "dim2", "1", "dim3", "1"), new ArrayList<String>(), ImmutableList.of(new TimeSeries("metric1", new HashMap<String, String>(), timeValues(1, 5, 2, 5, 3, 4, 4, 4, 5, 3))), new Interpolators.Linear());
query = new CubeDeleteQuery(startTs, endTs, resolution, deleteTags, Collections.singletonList("metric1"), predicate);
cube.delete(query);
// test big-slope linear interpolation
writeInc(cube, "metric1", startTs, 100, "1", "1", "1");
writeInc(cube, "metric1", endTs, 500, "1", "1", "1");
verifyCountQuery(cube, startTs, endTs, resolution, "metric1", AggregationFunction.SUM, ImmutableMap.of("dim1", "1", "dim2", "1", "dim3", "1"), new ArrayList<String>(), ImmutableList.of(new TimeSeries("metric1", new HashMap<String, String>(), timeValues(1, 100, 2, 200, 3, 300, 4, 400, 5, 500))), new Interpolators.Linear());
cube.delete(query);
// test limit on Interpolate
long limit = 20;
writeInc(cube, "metric1", 0, 10, "1", "1", "1");
writeInc(cube, "metric1", limit + 1, 50, "1", "1", "1");
expectedTimeValues.clear();
expectedTimeValues.add(new TimeValue(0, 10));
for (long i = 1; i <= limit; i++) {
expectedTimeValues.add(new TimeValue(i, 0));
}
expectedTimeValues.add(new TimeValue(limit + 1, 50));
verifyCountQuery(cube, 0, 21, resolution, "metric1", AggregationFunction.SUM, ImmutableMap.of("dim1", "1", "dim2", "1", "dim3", "1"), new ArrayList<String>(), ImmutableList.of(new TimeSeries("metric1", new HashMap<String, String>(), expectedTimeValues)), new Interpolators.Step(limit));
}
use of io.cdap.cdap.api.dataset.lib.cube.TimeValue in project cdap by caskdata.
the class AppFabricTestBase method getTotalMetric.
protected long getTotalMetric(String metricName, Map<String, String> tags) {
MetricDataQuery query = new MetricDataQuery(0, 0, Integer.MAX_VALUE, "system." + metricName, AggregationFunction.SUM, tags, Collections.emptyList());
Collection<MetricTimeSeries> results = metricStore.query(query);
if (results.isEmpty()) {
return 0;
}
// since it is totals query and not groupBy specified, we know there's one time series
List<TimeValue> timeValues = results.iterator().next().getTimeValues();
if (timeValues.isEmpty()) {
return 0;
}
// since it is totals, we know there's one value only
return timeValues.get(0).getValue();
}
Aggregations