use of co.cask.cdap.api.metrics.MetricValues in project cdap by caskdata.
the class MetricsHandlerTestRun method testInterpolate.
@Test
public void testInterpolate() throws Exception {
long start = System.currentTimeMillis() / 1000;
long end = start + 3;
Map<String, String> sliceBy = getFlowletContext("interspace", "WordCount1", "WordCounter", "run1", "splitter");
MetricValues value = new MetricValues(sliceBy, "reads", start, 100, MetricType.COUNTER);
metricStore.add(value);
value = new MetricValues(sliceBy, "reads", end, 400, MetricType.COUNTER);
metricStore.add(value);
verifyRangeQueryResult("/v3/metrics/query?" + getTags("interspace", "WordCount1", "WordCounter", "splitter") + "&metric=system.reads&interpolate=step&start=" + start + "&end=" + end, 4, 700);
verifyRangeQueryResult("/v3/metrics/query?" + getTags("interspace", "WordCount1", "WordCounter", "splitter") + "&metric=system.reads&interpolate=linear&start=" + start + "&end=" + end, 4, 1000);
// delete the added metrics for testing interpolator
MetricDeleteQuery deleteQuery = new MetricDeleteQuery(start, end, sliceBy);
metricStore.delete(deleteQuery);
}
use of co.cask.cdap.api.metrics.MetricValues in project cdap by caskdata.
the class MetricsHandlerTestRun method testResultLimit.
@Test
public void testResultLimit() throws Exception {
long start = 1;
Map<String, String> sliceBy = getFlowletContext("resolutions", "WordCount1", "WordCounter", "run1", "splitter");
// 1 second
metricStore.add(new MetricValues(sliceBy, "reads", start, 1, MetricType.COUNTER));
// 30 second
metricStore.add(new MetricValues(sliceBy, "reads", start + 30, 1, MetricType.COUNTER));
// 1 minute
metricStore.add(new MetricValues(sliceBy, "reads", start + 60, 1, MetricType.COUNTER));
// 10 minutes
metricStore.add(new MetricValues(sliceBy, "reads", start + 600, 1, MetricType.COUNTER));
// 1 hour
metricStore.add(new MetricValues(sliceBy, "reads", start + 3600, 1, MetricType.COUNTER));
// count is one record
verifyRangeQueryResult("/v3/metrics/query?" + getTags("resolutions", "WordCount1", "WordCounter", "splitter") + "&metric=system.reads&resolution=auto&count=1&start=" + start + "&end=" + (start + 600), 1, 1);
// count is greater than data points in time-range
verifyRangeQueryResult("/v3/metrics/query?" + getTags("resolutions", "WordCount1", "WordCounter", "splitter") + "&metric=system.reads&resolution=auto&count=6&start=" + start + "&end=" + (start + 600), 4, 4);
// count is less than data points in time-range
verifyRangeQueryResult("/v3/metrics/query?" + getTags("resolutions", "WordCount1", "WordCounter", "splitter") + "&metric=system.reads&resolution=auto&count=2&start=" + (start - 1) + "&end=" + (start + 3600), 2, 3);
}
use of co.cask.cdap.api.metrics.MetricValues in project cdap by caskdata.
the class WorkflowStatsSLAHttpHandlerTest method setupRuns.
/*
* This helper is used only for the details and compare endpoints and not the statistics endpoint because
* the statistics endpoint needs to handle number of spark runs differently and also have tests for a
* specific run's spark job.
*/
private List<RunId> setupRuns(WorkflowId workflowProgram, ProgramId mapreduceProgram, ProgramId sparkProgram, Store store, int count) throws Exception {
List<RunId> runIdList = new ArrayList<>();
long startTime = System.currentTimeMillis();
long currentTimeMillis;
for (int i = 0; i < count; i++) {
// work-flow runs every 5 minutes
currentTimeMillis = startTime + (i * TimeUnit.MINUTES.toMillis(5));
RunId workflowRunId = RunIds.generate(currentTimeMillis);
runIdList.add(workflowRunId);
store.setStart(workflowProgram, workflowRunId.getId(), RunIds.getTime(workflowRunId, TimeUnit.SECONDS));
// MR job starts 2 seconds after workflow started
RunId mapreduceRunid = RunIds.generate(currentTimeMillis + TimeUnit.SECONDS.toMillis(2));
Map<String, String> systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, mapreduceProgram.getProgram(), ProgramOptionConstants.WORKFLOW_NAME, workflowProgram.getProgram(), ProgramOptionConstants.WORKFLOW_RUN_ID, workflowRunId.getId());
store.setStart(mapreduceProgram, mapreduceRunid.getId(), RunIds.getTime(mapreduceRunid, TimeUnit.SECONDS), null, ImmutableMap.<String, String>of(), systemArgs);
store.setStop(mapreduceProgram, mapreduceRunid.getId(), // map-reduce job ran for 17 seconds
TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + 19, ProgramRunStatus.COMPLETED);
Map<String, String> mapTypeContext = ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, mapreduceProgram.getNamespace(), Constants.Metrics.Tag.APP, mapreduceProgram.getApplication(), Constants.Metrics.Tag.MAPREDUCE, mapreduceProgram.getProgram(), Constants.Metrics.Tag.RUN_ID, mapreduceRunid.toString(), Constants.Metrics.Tag.MR_TASK_TYPE, MapReduceMetrics.TaskType.Mapper.getId());
metricStore.add(new MetricValues(mapTypeContext, MapReduceMetrics.METRIC_INPUT_RECORDS, 10, 38L, MetricType.GAUGE));
// spark starts 20 seconds after workflow starts
systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, sparkProgram.getProgram(), ProgramOptionConstants.WORKFLOW_NAME, workflowProgram.getProgram(), ProgramOptionConstants.WORKFLOW_RUN_ID, workflowRunId.getId());
RunId sparkRunid = RunIds.generate(currentTimeMillis + TimeUnit.SECONDS.toMillis(20));
store.setStart(sparkProgram, sparkRunid.getId(), RunIds.getTime(sparkRunid, TimeUnit.SECONDS), null, ImmutableMap.<String, String>of(), systemArgs);
// spark job runs for 38 seconds
long stopTime = TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + 58;
store.setStop(sparkProgram, sparkRunid.getId(), stopTime, ProgramRunStatus.COMPLETED);
// workflow ran for 1 minute
long workflowStopTime = TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + 60;
store.setStop(workflowProgram, workflowRunId.getId(), workflowStopTime, ProgramRunStatus.COMPLETED);
}
return runIdList;
}
use of co.cask.cdap.api.metrics.MetricValues in project cdap by caskdata.
the class MessagingMetricsCollectionService method publish.
@Override
protected void publish(Iterator<MetricValues> metrics) throws Exception {
int size = topicPayloads.size();
while (metrics.hasNext()) {
encoderOutputStream.reset();
MetricValues metricValues = metrics.next();
// Encode MetricValues into bytes
recordWriter.encode(metricValues, encoder);
// Calculate the topic number with the hashcode of MetricValues' tags and store the encoded payload in the
// corresponding list of the topic number
topicPayloads.get(Math.abs(metricValues.getTags().hashCode() % size)).addPayload(encoderOutputStream.toByteArray());
}
publishMetric(topicPayloads.values());
}
use of co.cask.cdap.api.metrics.MetricValues in project cdap by caskdata.
the class MessagingMetricsProcessorService method persistMetrics.
/**
* Persist metrics into metric store
*
* @param metricValues a non-empty deque of {@link MetricValues}
*/
private void persistMetrics(Deque<MetricValues> metricValues) throws Exception {
long now = System.currentTimeMillis();
long lastMetricTime = metricValues.peekLast().getTimestamp();
long delay = now - TimeUnit.SECONDS.toMillis(lastMetricTime);
metricValues.add(new MetricValues(metricsContextMap, TimeUnit.MILLISECONDS.toSeconds(now), ImmutableList.of(new MetricValue(processMetricName, MetricType.COUNTER, metricValues.size()), new MetricValue(delayMetricName, MetricType.GAUGE, delay))));
metricStore.add(metricValues);
metricsProcessedCount += metricValues.size();
PROGRESS_LOG.debug("{} metrics metrics persisted. Last metric metric's timestamp: {}. " + "Metrics process delay: {}ms", metricsProcessedCount, lastMetricTime, delay);
}
Aggregations