use of io.cdap.cdap.api.metrics.MetricValues in project cdap by caskdata.
the class TaskWorkerMetricsTest method testWrappedRequest.
@Test
public void testWrappedRequest() throws IOException {
String taskClassName = TaskWorkerServiceTest.TestRunnableClass.class.getName();
String wrappedClassName = "testClassName";
RunnableTaskRequest req = RunnableTaskRequest.getBuilder(taskClassName).withParam("100").withEmbeddedTaskRequest(RunnableTaskRequest.getBuilder(wrappedClassName).build()).build();
String reqBody = GSON.toJson(req);
HttpResponse response = HttpRequests.execute(HttpRequest.post(uri.resolve("/v3Internal/worker/run").toURL()).withBody(reqBody).build(), new DefaultHttpRequestConfig(false));
TaskWorkerTestUtil.waitForServiceCompletion(taskWorkerStateFuture);
Assert.assertEquals(HttpURLConnection.HTTP_OK, response.getResponseCode());
Assert.assertSame(1, published.size());
// check the metrics are present
MetricValues metricValues = published.get(0);
Assert.assertTrue(hasMetric(metricValues, Constants.Metrics.TaskWorker.REQUEST_COUNT));
Assert.assertTrue(hasMetric(metricValues, Constants.Metrics.TaskWorker.REQUEST_LATENCY_MS));
// check the clz tag is set correctly
Assert.assertEquals(wrappedClassName, metricValues.getTags().get(Constants.Metrics.Tag.CLASS));
}
use of io.cdap.cdap.api.metrics.MetricValues in project cdap by caskdata.
the class TaskWorkerMetricsTest method beforeTest.
@Before
public void beforeTest() {
CConfiguration cConf = createCConf();
SConfiguration sConf = SConfiguration.create();
published = new ArrayList<>();
AggregatedMetricsCollectionService mockMetricsCollector = new AggregatedMetricsCollectionService(1000L) {
@Override
protected void publish(Iterator<MetricValues> metrics) {
Iterators.addAll(published, metrics);
}
};
mockMetricsCollector.startAndWait();
taskWorkerService = new TaskWorkerService(cConf, sConf, new InMemoryDiscoveryService(), (namespaceId, retryStrategy) -> null, mockMetricsCollector);
taskWorkerStateFuture = TaskWorkerTestUtil.getServiceCompletionFuture(taskWorkerService);
// start the service
taskWorkerService.startAndWait();
InetSocketAddress addr = taskWorkerService.getBindAddress();
this.uri = URI.create(String.format("http://%s:%s", addr.getHostName(), addr.getPort()));
}
use of io.cdap.cdap.api.metrics.MetricValues in project cdap by caskdata.
the class TaskWorkerMetricsTest method testSimpleRequest.
@Test
public void testSimpleRequest() throws IOException {
String taskClassName = TaskWorkerServiceTest.TestRunnableClass.class.getName();
RunnableTaskRequest req = RunnableTaskRequest.getBuilder(taskClassName).withParam("100").build();
String reqBody = GSON.toJson(req);
HttpResponse response = HttpRequests.execute(HttpRequest.post(uri.resolve("/v3Internal/worker/run").toURL()).withBody(reqBody).build(), new DefaultHttpRequestConfig(false));
TaskWorkerTestUtil.waitForServiceCompletion(taskWorkerStateFuture);
Assert.assertEquals(HttpURLConnection.HTTP_OK, response.getResponseCode());
Assert.assertSame(1, published.size());
// check the metrics are present
MetricValues metricValues = published.get(0);
Assert.assertTrue(hasMetric(metricValues, Constants.Metrics.TaskWorker.REQUEST_LATENCY_MS));
Assert.assertTrue(hasMetric(metricValues, Constants.Metrics.TaskWorker.REQUEST_COUNT));
// check the clz tag is set correctly
Assert.assertEquals(taskClassName, metricValues.getTags().get("clz"));
}
use of io.cdap.cdap.api.metrics.MetricValues in project cdap by caskdata.
the class WorkflowStatsSLAHttpHandlerTest method setupRuns.
/*
* This helper is used only for the details and compare endpoints and not the statistics endpoint because
* the statistics endpoint needs to handle number of spark runs differently and also have tests for a
* specific run's spark job.
*/
private List<RunId> setupRuns(WorkflowId workflowProgram, ProgramId mapreduceProgram, ProgramId sparkProgram, Store store, int count, ArtifactId artifactId) {
List<RunId> runIdList = new ArrayList<>();
long startTime = System.currentTimeMillis();
long currentTimeMillis;
for (int i = 0; i < count; i++) {
// work-flow runs every 5 minutes
currentTimeMillis = startTime + (i * TimeUnit.MINUTES.toMillis(5));
RunId workflowRunId = RunIds.generate(currentTimeMillis);
runIdList.add(workflowRunId);
setStartAndRunning(workflowProgram, workflowRunId.getId(), artifactId);
// MR job starts 2 seconds after workflow started
RunId mapreduceRunid = RunIds.generate(currentTimeMillis + TimeUnit.SECONDS.toMillis(2));
Map<String, String> systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, mapreduceProgram.getProgram(), ProgramOptionConstants.WORKFLOW_NAME, workflowProgram.getProgram(), ProgramOptionConstants.WORKFLOW_RUN_ID, workflowRunId.getId());
setStartAndRunning(mapreduceProgram, mapreduceRunid.getId(), ImmutableMap.of(), systemArgs, artifactId);
store.setStop(mapreduceProgram.run(mapreduceRunid.getId()), // map-reduce job ran for 17 seconds
TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + 19, ProgramRunStatus.COMPLETED, AppFabricTestHelper.createSourceId(++sourceId));
Map<String, String> mapTypeContext = ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, mapreduceProgram.getNamespace(), Constants.Metrics.Tag.APP, mapreduceProgram.getApplication(), Constants.Metrics.Tag.MAPREDUCE, mapreduceProgram.getProgram(), Constants.Metrics.Tag.RUN_ID, mapreduceRunid.toString(), Constants.Metrics.Tag.MR_TASK_TYPE, MapReduceMetrics.TaskType.Mapper.getId());
metricStore.add(new MetricValues(mapTypeContext, MapReduceMetrics.METRIC_INPUT_RECORDS, 10, 38L, MetricType.GAUGE));
// spark starts 20 seconds after workflow starts
systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, sparkProgram.getProgram(), ProgramOptionConstants.WORKFLOW_NAME, workflowProgram.getProgram(), ProgramOptionConstants.WORKFLOW_RUN_ID, workflowRunId.getId());
RunId sparkRunid = RunIds.generate(currentTimeMillis + TimeUnit.SECONDS.toMillis(20));
setStartAndRunning(sparkProgram, sparkRunid.getId(), ImmutableMap.of(), systemArgs, artifactId);
// spark job runs for 38 seconds
long stopTime = TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + 58;
store.setStop(sparkProgram.run(sparkRunid.getId()), stopTime, ProgramRunStatus.COMPLETED, AppFabricTestHelper.createSourceId(++sourceId));
// workflow ran for 1 minute
long workflowStopTime = TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + 60;
store.setStop(workflowProgram.run(workflowRunId.getId()), workflowStopTime, ProgramRunStatus.COMPLETED, AppFabricTestHelper.createSourceId(++sourceId));
}
return runIdList;
}
use of io.cdap.cdap.api.metrics.MetricValues in project cdap by caskdata.
the class MetricsHandlerTest method testInterpolate.
@Test
public void testInterpolate() throws Exception {
long start = System.currentTimeMillis() / 1000;
long end = start + 3;
Map<String, String> sliceBy = getServiceContext("interspace", "WordCount1", "WordCounter", "run1", "splitter");
MetricValues value = new MetricValues(sliceBy, "reads", start, 100, MetricType.COUNTER);
metricStore.add(value);
value = new MetricValues(sliceBy, "reads", end, 400, MetricType.COUNTER);
metricStore.add(value);
verifyRangeQueryResult("/v3/metrics/query?" + getTags("interspace", "WordCount1", "WordCounter", "splitter") + "&metric=system.reads&interpolate=step&start=" + start + "&end=" + end, 4, 700);
verifyRangeQueryResult("/v3/metrics/query?" + getTags("interspace", "WordCount1", "WordCounter", "splitter") + "&metric=system.reads&interpolate=linear&start=" + start + "&end=" + end, 4, 1000);
Map<String, String> deleteTags = new LinkedHashMap<>();
deleteTags.put(Constants.Metrics.Tag.NAMESPACE, "interspace");
deleteTags.put(Constants.Metrics.Tag.APP, "WordCount1");
deleteTags.put(Constants.Metrics.Tag.SERVICE, "WordCounter");
// delete the added metrics for testing interpolator
MetricDeleteQuery deleteQuery = new MetricDeleteQuery(start, end, Collections.emptySet(), deleteTags, new ArrayList<>(deleteTags.keySet()));
metricStore.delete(deleteQuery);
}
Aggregations