use of io.cdap.cdap.proto.WorkflowStatistics in project cdap by caskdata.
the class WorkflowStatsSLAHttpHandlerTest method testStatistics.
@Test
public void testStatistics() throws Exception {
deploy(WorkflowApp.class, 200);
String workflowName = "FunWorkflow";
String mapreduceName = "ClassicWordCount";
String sparkName = "SparkWorkflowTest";
ProgramId workflowProgram = WORKFLOW_APP.workflow(workflowName);
ProgramId mapreduceProgram = WORKFLOW_APP.mr(mapreduceName);
ProgramId sparkProgram = WORKFLOW_APP.spark(sparkName);
ArtifactId artifactId = WORKFLOW_APP.getNamespaceId().artifact("testArtifact", "1.0").toApiArtifactId();
long startTime = System.currentTimeMillis();
long currentTimeMillis = startTime;
String outlierRunId = null;
for (int i = 0; i < 10; i++) {
// workflow runs every 5 minutes
currentTimeMillis = startTime + (i * TimeUnit.MINUTES.toMillis(5));
RunId workflowRunId = RunIds.generate(currentTimeMillis);
setStartAndRunning(workflowProgram, workflowRunId.getId(), artifactId);
// MR job starts 2 seconds after workflow started
RunId mapreduceRunid = RunIds.generate(currentTimeMillis + TimeUnit.SECONDS.toMillis(2));
Map<String, String> systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, mapreduceName, ProgramOptionConstants.WORKFLOW_NAME, workflowName, ProgramOptionConstants.WORKFLOW_RUN_ID, workflowRunId.getId());
setStartAndRunning(mapreduceProgram, mapreduceRunid.getId(), ImmutableMap.of(), systemArgs, artifactId);
store.setStop(mapreduceProgram.run(mapreduceRunid), // map-reduce job ran for 17 seconds
TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + 19, ProgramRunStatus.COMPLETED, AppFabricTestHelper.createSourceId(++sourceId));
// This makes sure that not all runs have Spark programs in them
if (i < 5) {
// spark starts 20 seconds after workflow starts
RunId sparkRunid = RunIds.generate(currentTimeMillis + TimeUnit.SECONDS.toMillis(20));
systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, sparkProgram.getProgram(), ProgramOptionConstants.WORKFLOW_NAME, workflowName, ProgramOptionConstants.WORKFLOW_RUN_ID, workflowRunId.getId());
setStartAndRunning(sparkProgram, sparkRunid.getId(), ImmutableMap.of(), systemArgs, artifactId);
// spark job runs for 38 seconds
long stopTime = TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + 58;
if (i == 4) {
// spark job ran for 100 seconds. 62 seconds greater than avg.
stopTime = TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + 120;
}
store.setStop(sparkProgram.run(sparkRunid.getId()), stopTime, ProgramRunStatus.COMPLETED, AppFabricTestHelper.createSourceId(++sourceId));
}
// workflow ran for 1 minute
long workflowStopTime = TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + 60;
if (i == 4) {
// spark job ran longer for this run
workflowStopTime = TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + 122;
outlierRunId = workflowRunId.getId();
}
store.setStop(workflowProgram.run(workflowRunId.getId()), workflowStopTime, ProgramRunStatus.COMPLETED, AppFabricTestHelper.createSourceId(++sourceId));
}
String request = String.format("%s/namespaces/%s/apps/%s/workflows/%s/statistics?start=%s&end=%s" + "&percentile=%s", Constants.Gateway.API_VERSION_3, Id.Namespace.DEFAULT.getId(), WorkflowApp.class.getSimpleName(), workflowProgram.getProgram(), TimeUnit.MILLISECONDS.toSeconds(startTime), TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + TimeUnit.MINUTES.toSeconds(2), "99");
HttpResponse response = doGet(request);
WorkflowStatistics workflowStatistics = readResponse(response, new TypeToken<WorkflowStatistics>() {
}.getType());
PercentileInformation percentileInformation = workflowStatistics.getPercentileInformationList().get(0);
Assert.assertEquals(1, percentileInformation.getRunIdsOverPercentile().size());
Assert.assertEquals(outlierRunId, percentileInformation.getRunIdsOverPercentile().get(0));
Assert.assertEquals("5", workflowStatistics.getNodes().get(sparkName).get("runs"));
request = String.format("%s/namespaces/%s/apps/%s/workflows/%s/statistics?start=%s&end=%s" + "&percentile=%s&percentile=%s", Constants.Gateway.API_VERSION_3, Id.Namespace.DEFAULT.getId(), WorkflowApp.class.getSimpleName(), workflowProgram.getProgram(), "now", "0", "90", "95");
response = doGet(request);
Assert.assertEquals(HttpResponseStatus.BAD_REQUEST.code(), response.getResponseCode());
request = String.format("%s/namespaces/%s/apps/%s/workflows/%s/statistics?start=%s&end=%s" + "&percentile=%s&percentile=%s", Constants.Gateway.API_VERSION_3, Id.Namespace.DEFAULT.getId(), WorkflowApp.class.getSimpleName(), workflowProgram.getProgram(), "now", "0", "90.0", "950");
response = doGet(request);
Assert.assertEquals(HttpResponseStatus.BAD_REQUEST.code(), response.getResponseCode());
Id.Application appId = new Id.Application(Id.Namespace.DEFAULT, WorkflowApp.class.getSimpleName());
deleteApp(appId, HttpResponseStatus.OK.code());
request = String.format("%s/namespaces/%s/apps/%s/workflows/%s/statistics?start=%s&end=%s" + "&percentile=%s", Constants.Gateway.API_VERSION_3, Id.Namespace.DEFAULT.getId(), WorkflowApp.class.getSimpleName(), workflowProgram, 0, System.currentTimeMillis(), "99");
response = doGet(request);
Assert.assertEquals(HttpResponseStatus.OK.code(), response.getResponseCode());
Assert.assertTrue(response.getResponseBodyAsString().startsWith("There are no statistics associated with this workflow : "));
}
use of io.cdap.cdap.proto.WorkflowStatistics in project cdap by cdapio.
the class WorkflowStatsSLAHttpHandlerTest method testStatistics.
@Test
public void testStatistics() throws Exception {
deploy(WorkflowApp.class, 200);
String workflowName = "FunWorkflow";
String mapreduceName = "ClassicWordCount";
String sparkName = "SparkWorkflowTest";
ProgramId workflowProgram = WORKFLOW_APP.workflow(workflowName);
ProgramId mapreduceProgram = WORKFLOW_APP.mr(mapreduceName);
ProgramId sparkProgram = WORKFLOW_APP.spark(sparkName);
ArtifactId artifactId = WORKFLOW_APP.getNamespaceId().artifact("testArtifact", "1.0").toApiArtifactId();
long startTime = System.currentTimeMillis();
long currentTimeMillis = startTime;
String outlierRunId = null;
for (int i = 0; i < 10; i++) {
// workflow runs every 5 minutes
currentTimeMillis = startTime + (i * TimeUnit.MINUTES.toMillis(5));
RunId workflowRunId = RunIds.generate(currentTimeMillis);
setStartAndRunning(workflowProgram, workflowRunId.getId(), artifactId);
// MR job starts 2 seconds after workflow started
RunId mapreduceRunid = RunIds.generate(currentTimeMillis + TimeUnit.SECONDS.toMillis(2));
Map<String, String> systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, mapreduceName, ProgramOptionConstants.WORKFLOW_NAME, workflowName, ProgramOptionConstants.WORKFLOW_RUN_ID, workflowRunId.getId());
setStartAndRunning(mapreduceProgram, mapreduceRunid.getId(), ImmutableMap.of(), systemArgs, artifactId);
store.setStop(mapreduceProgram.run(mapreduceRunid), // map-reduce job ran for 17 seconds
TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + 19, ProgramRunStatus.COMPLETED, AppFabricTestHelper.createSourceId(++sourceId));
// This makes sure that not all runs have Spark programs in them
if (i < 5) {
// spark starts 20 seconds after workflow starts
RunId sparkRunid = RunIds.generate(currentTimeMillis + TimeUnit.SECONDS.toMillis(20));
systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, sparkProgram.getProgram(), ProgramOptionConstants.WORKFLOW_NAME, workflowName, ProgramOptionConstants.WORKFLOW_RUN_ID, workflowRunId.getId());
setStartAndRunning(sparkProgram, sparkRunid.getId(), ImmutableMap.of(), systemArgs, artifactId);
// spark job runs for 38 seconds
long stopTime = TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + 58;
if (i == 4) {
// spark job ran for 100 seconds. 62 seconds greater than avg.
stopTime = TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + 120;
}
store.setStop(sparkProgram.run(sparkRunid.getId()), stopTime, ProgramRunStatus.COMPLETED, AppFabricTestHelper.createSourceId(++sourceId));
}
// workflow ran for 1 minute
long workflowStopTime = TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + 60;
if (i == 4) {
// spark job ran longer for this run
workflowStopTime = TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + 122;
outlierRunId = workflowRunId.getId();
}
store.setStop(workflowProgram.run(workflowRunId.getId()), workflowStopTime, ProgramRunStatus.COMPLETED, AppFabricTestHelper.createSourceId(++sourceId));
}
String request = String.format("%s/namespaces/%s/apps/%s/workflows/%s/statistics?start=%s&end=%s" + "&percentile=%s", Constants.Gateway.API_VERSION_3, Id.Namespace.DEFAULT.getId(), WorkflowApp.class.getSimpleName(), workflowProgram.getProgram(), TimeUnit.MILLISECONDS.toSeconds(startTime), TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + TimeUnit.MINUTES.toSeconds(2), "99");
HttpResponse response = doGet(request);
WorkflowStatistics workflowStatistics = readResponse(response, new TypeToken<WorkflowStatistics>() {
}.getType());
PercentileInformation percentileInformation = workflowStatistics.getPercentileInformationList().get(0);
Assert.assertEquals(1, percentileInformation.getRunIdsOverPercentile().size());
Assert.assertEquals(outlierRunId, percentileInformation.getRunIdsOverPercentile().get(0));
Assert.assertEquals("5", workflowStatistics.getNodes().get(sparkName).get("runs"));
request = String.format("%s/namespaces/%s/apps/%s/workflows/%s/statistics?start=%s&end=%s" + "&percentile=%s&percentile=%s", Constants.Gateway.API_VERSION_3, Id.Namespace.DEFAULT.getId(), WorkflowApp.class.getSimpleName(), workflowProgram.getProgram(), "now", "0", "90", "95");
response = doGet(request);
Assert.assertEquals(HttpResponseStatus.BAD_REQUEST.code(), response.getResponseCode());
request = String.format("%s/namespaces/%s/apps/%s/workflows/%s/statistics?start=%s&end=%s" + "&percentile=%s&percentile=%s", Constants.Gateway.API_VERSION_3, Id.Namespace.DEFAULT.getId(), WorkflowApp.class.getSimpleName(), workflowProgram.getProgram(), "now", "0", "90.0", "950");
response = doGet(request);
Assert.assertEquals(HttpResponseStatus.BAD_REQUEST.code(), response.getResponseCode());
Id.Application appId = new Id.Application(Id.Namespace.DEFAULT, WorkflowApp.class.getSimpleName());
deleteApp(appId, HttpResponseStatus.OK.code());
request = String.format("%s/namespaces/%s/apps/%s/workflows/%s/statistics?start=%s&end=%s" + "&percentile=%s", Constants.Gateway.API_VERSION_3, Id.Namespace.DEFAULT.getId(), WorkflowApp.class.getSimpleName(), workflowProgram, 0, System.currentTimeMillis(), "99");
response = doGet(request);
Assert.assertEquals(HttpResponseStatus.OK.code(), response.getResponseCode());
Assert.assertTrue(response.getResponseBodyAsString().startsWith("There are no statistics associated with this workflow : "));
}
use of io.cdap.cdap.proto.WorkflowStatistics in project cdap by caskdata.
the class WorkflowStatsSLAHttpHandler method workflowStats.
/**
* Returns the statistics for a given workflow.
*
* @param request The request
* @param responder The responder
* @param namespaceId The namespace the application is in
* @param appId The application the workflow is in
* @param workflowId The workflow that needs to have it stats shown
* @param start The start time of the range
* @param end The end time of the range
* @param percentiles The list of percentile values on which visibility is needed
*/
@GET
@Path("apps/{app-id}/workflows/{workflow-id}/statistics")
public void workflowStats(HttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespaceId, @PathParam("app-id") String appId, @PathParam("workflow-id") String workflowId, @QueryParam("start") @DefaultValue("now-1d") String start, @QueryParam("end") @DefaultValue("now") String end, @QueryParam("percentile") @DefaultValue("90.0") List<Double> percentiles) throws Exception {
long startTime = TimeMathParser.parseTimeInSeconds(start);
long endTime = TimeMathParser.parseTimeInSeconds(end);
if (startTime < 0) {
throw new BadRequestException("Invalid start time. The time you entered was : " + startTime);
} else if (endTime < 0) {
throw new BadRequestException("Invalid end time. The time you entered was : " + endTime);
} else if (endTime < startTime) {
throw new BadRequestException("Start time : " + startTime + " cannot be larger than end time : " + endTime);
}
for (double i : percentiles) {
if (i < 0.0 || i > 100.0) {
throw new BadRequestException("Percentile values have to be greater than or equal to 0 and" + " less than or equal to 100. Invalid input was " + Double.toString(i));
}
}
WorkflowId workflow = new WorkflowId(namespaceId, appId, workflowId);
WorkflowStatistics workflowStatistics = store.getWorkflowStatistics(workflow, startTime, endTime, percentiles);
if (workflowStatistics == null) {
responder.sendString(HttpResponseStatus.OK, "There are no statistics associated with this workflow : " + workflowId + " in the specified time range.");
return;
}
responder.sendJson(HttpResponseStatus.OK, GSON.toJson(workflowStatistics));
}
use of io.cdap.cdap.proto.WorkflowStatistics in project cdap by cdapio.
the class WorkflowStatsSLAHttpHandler method workflowStats.
/**
* Returns the statistics for a given workflow.
*
* @param request The request
* @param responder The responder
* @param namespaceId The namespace the application is in
* @param appId The application the workflow is in
* @param workflowId The workflow that needs to have it stats shown
* @param start The start time of the range
* @param end The end time of the range
* @param percentiles The list of percentile values on which visibility is needed
*/
@GET
@Path("apps/{app-id}/workflows/{workflow-id}/statistics")
public void workflowStats(HttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespaceId, @PathParam("app-id") String appId, @PathParam("workflow-id") String workflowId, @QueryParam("start") @DefaultValue("now-1d") String start, @QueryParam("end") @DefaultValue("now") String end, @QueryParam("percentile") @DefaultValue("90.0") List<Double> percentiles) throws Exception {
long startTime = TimeMathParser.parseTimeInSeconds(start);
long endTime = TimeMathParser.parseTimeInSeconds(end);
if (startTime < 0) {
throw new BadRequestException("Invalid start time. The time you entered was : " + startTime);
} else if (endTime < 0) {
throw new BadRequestException("Invalid end time. The time you entered was : " + endTime);
} else if (endTime < startTime) {
throw new BadRequestException("Start time : " + startTime + " cannot be larger than end time : " + endTime);
}
for (double i : percentiles) {
if (i < 0.0 || i > 100.0) {
throw new BadRequestException("Percentile values have to be greater than or equal to 0 and" + " less than or equal to 100. Invalid input was " + Double.toString(i));
}
}
WorkflowId workflow = new WorkflowId(namespaceId, appId, workflowId);
WorkflowStatistics workflowStatistics = store.getWorkflowStatistics(workflow, startTime, endTime, percentiles);
if (workflowStatistics == null) {
responder.sendString(HttpResponseStatus.OK, "There are no statistics associated with this workflow : " + workflowId + " in the specified time range.");
return;
}
responder.sendJson(HttpResponseStatus.OK, GSON.toJson(workflowStatistics));
}
Aggregations