Search in sources :

Example 6 with TimeValue

use of co.cask.cdap.api.dataset.lib.cube.TimeValue in project cdap by caskdata.

the class LocalMRJobInfoFetcher method queryGroupedAggregates.

// queries MetricStore for one metric across all tasks of a certain TaskType, using GroupBy InstanceId
private void queryGroupedAggregates(Map<String, String> tags, Table<String, String, Long> allTaskMetrics, Map<String, String> metricsToCounters) {
    Map<String, AggregationFunction> metrics = Maps.newHashMap();
    // all map-reduce metrics are gauges
    for (String metric : metricsToCounters.keySet()) {
        metrics.put(metric, AggregationFunction.LATEST);
    }
    MetricDataQuery metricDataQuery = new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, metrics, tags, ImmutableList.of(Constants.Metrics.Tag.INSTANCE_ID));
    Collection<MetricTimeSeries> query = metricStore.query(metricDataQuery);
    for (MetricTimeSeries metricTimeSeries : query) {
        List<TimeValue> timeValues = metricTimeSeries.getTimeValues();
        TimeValue timeValue = Iterables.getOnlyElement(timeValues);
        String taskId = metricTimeSeries.getTagValues().get(Constants.Metrics.Tag.INSTANCE_ID);
        allTaskMetrics.put(taskId, metricsToCounters.get(metricTimeSeries.getMetricName()), timeValue.getValue());
    }
}
Also used : AggregationFunction(co.cask.cdap.api.dataset.lib.cube.AggregationFunction) MetricTimeSeries(co.cask.cdap.api.metrics.MetricTimeSeries) MetricDataQuery(co.cask.cdap.api.metrics.MetricDataQuery) TimeValue(co.cask.cdap.api.dataset.lib.cube.TimeValue)

Example 7 with TimeValue

use of co.cask.cdap.api.dataset.lib.cube.TimeValue in project cdap by caskdata.

the class DefaultCube method convertToQueryResult.

private Collection<TimeSeries> convertToQueryResult(CubeQuery query, Table<Map<String, String>, String, Map<Long, Long>> resultTable) {
    List<TimeSeries> result = Lists.newArrayList();
    // iterating each groupValue dimensions
    for (Map.Entry<Map<String, String>, Map<String, Map<Long, Long>>> row : resultTable.rowMap().entrySet()) {
        // iterating each measure
        for (Map.Entry<String, Map<Long, Long>> measureEntry : row.getValue().entrySet()) {
            // generating time series for a grouping and a measure
            int count = 0;
            List<TimeValue> timeValues = Lists.newArrayList();
            for (Map.Entry<Long, Long> timeValue : measureEntry.getValue().entrySet()) {
                timeValues.add(new TimeValue(timeValue.getKey(), timeValue.getValue()));
            }
            Collections.sort(timeValues);
            PeekingIterator<TimeValue> timeValueItor = Iterators.peekingIterator(new TimeSeriesInterpolator(timeValues, query.getInterpolator(), query.getResolution()).iterator());
            List<TimeValue> resultTimeValues = Lists.newArrayList();
            while (timeValueItor.hasNext()) {
                TimeValue timeValue = timeValueItor.next();
                resultTimeValues.add(new TimeValue(timeValue.getTimestamp(), timeValue.getValue()));
                if (++count >= query.getLimit()) {
                    break;
                }
            }
            result.add(new TimeSeries(measureEntry.getKey(), row.getKey(), resultTimeValues));
        }
    }
    return result;
}
Also used : TimeSeries(co.cask.cdap.api.dataset.lib.cube.TimeSeries) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map) TimeValue(co.cask.cdap.api.dataset.lib.cube.TimeValue)

Example 8 with TimeValue

use of co.cask.cdap.api.dataset.lib.cube.TimeValue in project cdap by caskdata.

the class MetricsProcessorServiceTest method assertMetricsResult.

private void assertMetricsResult(MetricStore metricStore, Map<String, String> metricsContext, Map<String, Long> expected) {
    for (Map.Entry<String, Long> metric : expected.entrySet()) {
        Collection<MetricTimeSeries> queryResult = metricStore.query(new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, metric.getKey(), AggregationFunction.SUM, metricsContext, ImmutableList.<String>of()));
        MetricTimeSeries timeSeries = Iterables.getOnlyElement(queryResult);
        List<TimeValue> timeValues = timeSeries.getTimeValues();
        TimeValue timeValue = Iterables.getOnlyElement(timeValues);
        Assert.assertEquals(String.format("Actual value of metric: %s does not match expected", metric.getKey()), metric.getValue().longValue(), timeValue.getValue());
    }
}
Also used : MetricTimeSeries(co.cask.cdap.api.metrics.MetricTimeSeries) MetricDataQuery(co.cask.cdap.api.metrics.MetricDataQuery) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) TimeValue(co.cask.cdap.api.dataset.lib.cube.TimeValue)

Example 9 with TimeValue

use of co.cask.cdap.api.dataset.lib.cube.TimeValue in project cdap by caskdata.

the class FlowQueuePendingCorrector method run.

/**
   * Corrects queue.pending metric for a flowlet.
   */
public void run(FlowId flowId, String producerFlowlet, String consumerFlowlet, String flowletQueue, FlowSpecification flow) throws Exception {
    System.out.println("Running queue.pending correction on flow '" + flowId + "' producerFlowlet '" + producerFlowlet + "' consumerFlowlet '" + consumerFlowlet + "' flowletQueue '" + flowletQueue + "'");
    Map<RunId, ProgramRuntimeService.RuntimeInfo> runtimeInfos = programRuntimeService.list(flowId);
    Preconditions.checkState(runtimeInfos.isEmpty(), "Cannot run tool when flow " + flowId + " is still running");
    SimpleQueueSpecificationGenerator queueSpecGenerator = new SimpleQueueSpecificationGenerator(flowId.getParent());
    Table<QueueSpecificationGenerator.Node, String, Set<QueueSpecification>> table = queueSpecGenerator.create(flow);
    Preconditions.checkArgument(table.contains(QueueSpecificationGenerator.Node.flowlet(producerFlowlet), consumerFlowlet), "Flowlet " + producerFlowlet + " is not emitting to " + consumerFlowlet);
    Set<QueueSpecification> queueSpecs = table.get(QueueSpecificationGenerator.Node.flowlet(producerFlowlet), consumerFlowlet);
    boolean validQueue = false;
    for (QueueSpecification queueSpec : queueSpecs) {
        if (queueSpec.getQueueName().getSimpleName().equals(flowletQueue)) {
            validQueue = true;
            break;
        }
    }
    Preconditions.checkArgument(validQueue, "Queue " + flowletQueue + " does not exist for the given flowlets");
    QueueName queueName = QueueName.fromFlowlet(flowId, producerFlowlet, flowletQueue);
    long consumerGroupId = FlowUtils.generateConsumerGroupId(flowId, consumerFlowlet);
    long correctQueuePendingValue;
    try {
        HBaseQueueDebugger.QueueStatistics stats = queueDebugger.scanQueue(queueName, consumerGroupId);
        correctQueuePendingValue = stats.getUnprocessed() + stats.getProcessedAndNotVisible();
    } catch (NotFoundException e) {
        // OK since flowlet queue exists, but actual queue doesn't exist
        // (e.g. when running upgrade tool from 2.8 to 3.0)
        correctQueuePendingValue = 0;
    }
    Map<String, String> tags = ImmutableMap.<String, String>builder().put(Constants.Metrics.Tag.NAMESPACE, flowId.getNamespace()).put(Constants.Metrics.Tag.APP, flowId.getApplication()).put(Constants.Metrics.Tag.FLOW, flowId.getProgram()).put(Constants.Metrics.Tag.CONSUMER, consumerFlowlet).put(Constants.Metrics.Tag.PRODUCER, producerFlowlet).put(Constants.Metrics.Tag.FLOWLET_QUEUE, flowletQueue).build();
    MetricDataQuery query = new MetricDataQuery(0, 0, Integer.MAX_VALUE, 1, ImmutableMap.of("system.queue.pending", AggregationFunction.SUM), tags, ImmutableList.<String>of(), null);
    Collection<MetricTimeSeries> results = metricStore.query(query);
    long queuePending;
    if (results.isEmpty()) {
        queuePending = 0;
    } else {
        System.out.println("Got results: " + GSON.toJson(results));
        Preconditions.checkState(results.size() == 1);
        List<TimeValue> timeValues = results.iterator().next().getTimeValues();
        Preconditions.checkState(timeValues.size() == 1);
        TimeValue timeValue = timeValues.get(0);
        queuePending = timeValue.getValue();
    }
    metricsCollectionService.startAndWait();
    MetricsContext collector = metricsCollectionService.getContext(tags);
    collector.gauge("queue.pending", correctQueuePendingValue);
    System.out.printf("Adjusted system.queue.pending metric from %d to %d (tags %s)\n", queuePending, correctQueuePendingValue, GSON.toJson(tags));
    // stop will flush the metrics
    metricsCollectionService.stopAndWait();
}
Also used : HBaseQueueDebugger(co.cask.cdap.data.tools.HBaseQueueDebugger) Set(java.util.Set) MetricsContext(co.cask.cdap.api.metrics.MetricsContext) NotFoundException(co.cask.cdap.common.NotFoundException) MetricTimeSeries(co.cask.cdap.api.metrics.MetricTimeSeries) SimpleQueueSpecificationGenerator(co.cask.cdap.internal.app.queue.SimpleQueueSpecificationGenerator) QueueSpecification(co.cask.cdap.app.queue.QueueSpecification) RunId(org.apache.twill.api.RunId) MetricDataQuery(co.cask.cdap.api.metrics.MetricDataQuery) QueueName(co.cask.cdap.common.queue.QueueName) TimeValue(co.cask.cdap.api.dataset.lib.cube.TimeValue)

Example 10 with TimeValue

use of co.cask.cdap.api.dataset.lib.cube.TimeValue in project cdap by caskdata.

the class SparkMetricsIntegrationTestRun method getTotalCounter.

private long getTotalCounter(Map<String, String> context, String metricName) throws Exception {
    MetricDataQuery query = new MetricDataQuery(0, 0, Integer.MAX_VALUE, metricName, AggregationFunction.SUM, context, new ArrayList<String>());
    try {
        Collection<MetricTimeSeries> result = getMetricsManager().query(query);
        if (result.isEmpty()) {
            return 0;
        }
        // since it is totals query and not groupBy specified, we know there's one time series
        List<TimeValue> timeValues = result.iterator().next().getTimeValues();
        if (timeValues.isEmpty()) {
            return 0;
        }
        // since it is totals, we know there's one value only
        return timeValues.get(0).getValue();
    } catch (Exception e) {
        throw Throwables.propagate(e);
    }
}
Also used : MetricTimeSeries(co.cask.cdap.api.metrics.MetricTimeSeries) MetricDataQuery(co.cask.cdap.api.metrics.MetricDataQuery) TimeValue(co.cask.cdap.api.dataset.lib.cube.TimeValue)

Aggregations

TimeValue (co.cask.cdap.api.dataset.lib.cube.TimeValue)20 MetricTimeSeries (co.cask.cdap.api.metrics.MetricTimeSeries)12 MetricDataQuery (co.cask.cdap.api.metrics.MetricDataQuery)11 Test (org.junit.Test)5 AggregationFunction (co.cask.cdap.api.dataset.lib.cube.AggregationFunction)4 DimensionValue (co.cask.cdap.api.dataset.lib.cube.DimensionValue)4 Map (java.util.Map)4 TimeSeries (co.cask.cdap.api.dataset.lib.cube.TimeSeries)3 InMemoryMetricsTable (co.cask.cdap.data2.dataset2.lib.table.inmemory.InMemoryMetricsTable)2 ImmutableList (com.google.common.collect.ImmutableList)2 IOException (java.io.IOException)2 ArrayList (java.util.ArrayList)2 LinkedHashMap (java.util.LinkedHashMap)2 List (java.util.List)2 Cube (co.cask.cdap.api.dataset.lib.cube.Cube)1 CubeDeleteQuery (co.cask.cdap.api.dataset.lib.cube.CubeDeleteQuery)1 CubeExploreQuery (co.cask.cdap.api.dataset.lib.cube.CubeExploreQuery)1 CubeFact (co.cask.cdap.api.dataset.lib.cube.CubeFact)1 CubeQuery (co.cask.cdap.api.dataset.lib.cube.CubeQuery)1 Interpolators (co.cask.cdap.api.dataset.lib.cube.Interpolators)1