use of io.cdap.cdap.api.artifact.ArtifactId in project cdap by caskdata.
the class PipelineSpecGeneratorTest method setupTests.
@BeforeClass
public static void setupTests() {
// populate some mock plugins.
MockPluginConfigurer pluginConfigurer = new MockPluginConfigurer();
Set<ArtifactId> artifactIds = ImmutableSet.of(ARTIFACT_ID);
pluginConfigurer.addMockPlugin(BatchSource.PLUGIN_TYPE, "mocksource", MockPlugin.builder().setOutputSchema(SCHEMA_A).build(), artifactIds);
pluginConfigurer.addMockPlugin(BatchSource.PLUGIN_TYPE, "mocksource2", MockPlugin.builder().setOutputSchema(SCHEMA_A2).build(), artifactIds);
pluginConfigurer.addMockPlugin(Transform.PLUGIN_TYPE, "mockA", MockPlugin.builder().setOutputSchema(SCHEMA_A).setErrorSchema(SCHEMA_B).build(), artifactIds);
pluginConfigurer.addMockPlugin(Transform.PLUGIN_TYPE, "mockB", MockPlugin.builder().setOutputSchema(SCHEMA_B).build(), artifactIds);
pluginConfigurer.addMockPlugin(Transform.PLUGIN_TYPE, "mockABC", MockPlugin.builder().setOutputSchema(SCHEMA_ABC).build(), artifactIds);
pluginConfigurer.addMockPlugin(BatchSink.PLUGIN_TYPE, "mocksink", MockPlugin.builder().build(), artifactIds);
pluginConfigurer.addMockPlugin(Action.PLUGIN_TYPE, "mockaction", MockPlugin.builder().build(), artifactIds);
pluginConfigurer.addMockPlugin(Condition.PLUGIN_TYPE, "mockcondition", MockPlugin.builder().build(), artifactIds);
pluginConfigurer.addMockPlugin(BatchJoiner.PLUGIN_TYPE, "mockjoiner", MockPlugin.builder().build(), artifactIds);
pluginConfigurer.addMockPlugin(ErrorTransform.PLUGIN_TYPE, "mockerror", MockPlugin.builder().build(), artifactIds);
pluginConfigurer.addMockPlugin(SplitterTransform.PLUGIN_TYPE, "mocksplit", new MockSplitter(ImmutableMap.of("portA", SCHEMA_A, "portB", SCHEMA_B)), artifactIds);
pluginConfigurer.addMockPlugin(BatchJoiner.PLUGIN_TYPE, "mockautojoiner", new MockAutoJoin(), artifactIds);
pluginConfigurer.addMockPlugin(BatchSQLEngine.PLUGIN_TYPE, "mocksqlengine", new MockSQLEngine(), artifactIds);
specGenerator = new BatchPipelineSpecGenerator(NamespaceId.DEFAULT.getNamespace(), pluginConfigurer, null, ImmutableSet.of(BatchSource.PLUGIN_TYPE), ImmutableSet.of(BatchSink.PLUGIN_TYPE), Engine.MAPREDUCE, MOCK_FEATURE_FLAGS_PROVIDER);
}
use of io.cdap.cdap.api.artifact.ArtifactId in project cdap by caskdata.
the class WorkflowStatsSLAHttpHandlerTest method testStatistics.
@Test
public void testStatistics() throws Exception {
deploy(WorkflowApp.class, 200);
String workflowName = "FunWorkflow";
String mapreduceName = "ClassicWordCount";
String sparkName = "SparkWorkflowTest";
ProgramId workflowProgram = WORKFLOW_APP.workflow(workflowName);
ProgramId mapreduceProgram = WORKFLOW_APP.mr(mapreduceName);
ProgramId sparkProgram = WORKFLOW_APP.spark(sparkName);
ArtifactId artifactId = WORKFLOW_APP.getNamespaceId().artifact("testArtifact", "1.0").toApiArtifactId();
long startTime = System.currentTimeMillis();
long currentTimeMillis = startTime;
String outlierRunId = null;
for (int i = 0; i < 10; i++) {
// workflow runs every 5 minutes
currentTimeMillis = startTime + (i * TimeUnit.MINUTES.toMillis(5));
RunId workflowRunId = RunIds.generate(currentTimeMillis);
setStartAndRunning(workflowProgram, workflowRunId.getId(), artifactId);
// MR job starts 2 seconds after workflow started
RunId mapreduceRunid = RunIds.generate(currentTimeMillis + TimeUnit.SECONDS.toMillis(2));
Map<String, String> systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, mapreduceName, ProgramOptionConstants.WORKFLOW_NAME, workflowName, ProgramOptionConstants.WORKFLOW_RUN_ID, workflowRunId.getId());
setStartAndRunning(mapreduceProgram, mapreduceRunid.getId(), ImmutableMap.of(), systemArgs, artifactId);
store.setStop(mapreduceProgram.run(mapreduceRunid), // map-reduce job ran for 17 seconds
TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + 19, ProgramRunStatus.COMPLETED, AppFabricTestHelper.createSourceId(++sourceId));
// This makes sure that not all runs have Spark programs in them
if (i < 5) {
// spark starts 20 seconds after workflow starts
RunId sparkRunid = RunIds.generate(currentTimeMillis + TimeUnit.SECONDS.toMillis(20));
systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, sparkProgram.getProgram(), ProgramOptionConstants.WORKFLOW_NAME, workflowName, ProgramOptionConstants.WORKFLOW_RUN_ID, workflowRunId.getId());
setStartAndRunning(sparkProgram, sparkRunid.getId(), ImmutableMap.of(), systemArgs, artifactId);
// spark job runs for 38 seconds
long stopTime = TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + 58;
if (i == 4) {
// spark job ran for 100 seconds. 62 seconds greater than avg.
stopTime = TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + 120;
}
store.setStop(sparkProgram.run(sparkRunid.getId()), stopTime, ProgramRunStatus.COMPLETED, AppFabricTestHelper.createSourceId(++sourceId));
}
// workflow ran for 1 minute
long workflowStopTime = TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + 60;
if (i == 4) {
// spark job ran longer for this run
workflowStopTime = TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + 122;
outlierRunId = workflowRunId.getId();
}
store.setStop(workflowProgram.run(workflowRunId.getId()), workflowStopTime, ProgramRunStatus.COMPLETED, AppFabricTestHelper.createSourceId(++sourceId));
}
String request = String.format("%s/namespaces/%s/apps/%s/workflows/%s/statistics?start=%s&end=%s" + "&percentile=%s", Constants.Gateway.API_VERSION_3, Id.Namespace.DEFAULT.getId(), WorkflowApp.class.getSimpleName(), workflowProgram.getProgram(), TimeUnit.MILLISECONDS.toSeconds(startTime), TimeUnit.MILLISECONDS.toSeconds(currentTimeMillis) + TimeUnit.MINUTES.toSeconds(2), "99");
HttpResponse response = doGet(request);
WorkflowStatistics workflowStatistics = readResponse(response, new TypeToken<WorkflowStatistics>() {
}.getType());
PercentileInformation percentileInformation = workflowStatistics.getPercentileInformationList().get(0);
Assert.assertEquals(1, percentileInformation.getRunIdsOverPercentile().size());
Assert.assertEquals(outlierRunId, percentileInformation.getRunIdsOverPercentile().get(0));
Assert.assertEquals("5", workflowStatistics.getNodes().get(sparkName).get("runs"));
request = String.format("%s/namespaces/%s/apps/%s/workflows/%s/statistics?start=%s&end=%s" + "&percentile=%s&percentile=%s", Constants.Gateway.API_VERSION_3, Id.Namespace.DEFAULT.getId(), WorkflowApp.class.getSimpleName(), workflowProgram.getProgram(), "now", "0", "90", "95");
response = doGet(request);
Assert.assertEquals(HttpResponseStatus.BAD_REQUEST.code(), response.getResponseCode());
request = String.format("%s/namespaces/%s/apps/%s/workflows/%s/statistics?start=%s&end=%s" + "&percentile=%s&percentile=%s", Constants.Gateway.API_VERSION_3, Id.Namespace.DEFAULT.getId(), WorkflowApp.class.getSimpleName(), workflowProgram.getProgram(), "now", "0", "90.0", "950");
response = doGet(request);
Assert.assertEquals(HttpResponseStatus.BAD_REQUEST.code(), response.getResponseCode());
Id.Application appId = new Id.Application(Id.Namespace.DEFAULT, WorkflowApp.class.getSimpleName());
deleteApp(appId, HttpResponseStatus.OK.code());
request = String.format("%s/namespaces/%s/apps/%s/workflows/%s/statistics?start=%s&end=%s" + "&percentile=%s", Constants.Gateway.API_VERSION_3, Id.Namespace.DEFAULT.getId(), WorkflowApp.class.getSimpleName(), workflowProgram, 0, System.currentTimeMillis(), "99");
response = doGet(request);
Assert.assertEquals(HttpResponseStatus.OK.code(), response.getResponseCode());
Assert.assertTrue(response.getResponseBodyAsString().startsWith("There are no statistics associated with this workflow : "));
}
use of io.cdap.cdap.api.artifact.ArtifactId in project cdap by caskdata.
the class WorkflowStatsSLAHttpHandlerTest method testCompare.
@Test
public void testCompare() throws Exception {
deploy(WorkflowApp.class, 200);
String workflowName = "FunWorkflow";
String mapreduceName = "ClassicWordCount";
String sparkName = "SparkWorkflowTest";
WorkflowId workflowProgram = WORKFLOW_APP.workflow(workflowName);
ProgramId mapreduceProgram = WORKFLOW_APP.mr(mapreduceName);
ProgramId sparkProgram = WORKFLOW_APP.spark(sparkName);
ArtifactId artifactId = WORKFLOW_APP.getNamespaceId().artifact("testArtifact", "1.0").toApiArtifactId();
List<RunId> workflowRunIdList = setupRuns(workflowProgram, mapreduceProgram, sparkProgram, store, 2, artifactId);
RunId workflowRun1 = workflowRunIdList.get(0);
RunId workflowRun2 = workflowRunIdList.get(1);
String request = String.format("%s/namespaces/%s/apps/%s/workflows/%s/runs/%s/compare?other-run-id=%s", Constants.Gateway.API_VERSION_3, Id.Namespace.DEFAULT.getId(), WorkflowApp.class.getSimpleName(), workflowProgram.getProgram(), workflowRun1.getId(), workflowRun2.getId());
HttpResponse response = doGet(request);
Collection<WorkflowStatsComparison.ProgramNodes> workflowStatistics = readResponse(response, new TypeToken<Collection<WorkflowStatsComparison.ProgramNodes>>() {
}.getType());
Assert.assertNotNull(workflowStatistics.iterator().next());
Assert.assertEquals(2, workflowStatistics.size());
for (WorkflowStatsComparison.ProgramNodes node : workflowStatistics) {
if (node.getProgramType() == ProgramType.MAPREDUCE) {
Assert.assertEquals(38L, (long) node.getWorkflowProgramDetailsList().get(0).getMetrics().get(TaskCounter.MAP_INPUT_RECORDS.name()));
}
}
}
use of io.cdap.cdap.api.artifact.ArtifactId in project cdap by caskdata.
the class DefaultAppConfigurer method createSpecification.
public ApplicationSpecification createSpecification(@Nullable String applicationName, @Nullable String applicationVersion) {
// applicationName can be null only for apps before 3.2 that were not upgraded
ArtifactScope scope = artifactId.getNamespace().equals(Id.Namespace.SYSTEM) ? ArtifactScope.SYSTEM : ArtifactScope.USER;
ArtifactId artifactId = new ArtifactId(this.artifactId.getName(), this.artifactId.getVersion(), scope);
String namespace = deployNamespace.toEntityId().getNamespace();
String appName = applicationName == null ? name : applicationName;
String appVersion = applicationVersion == null ? ApplicationId.DEFAULT_VERSION : applicationVersion;
Map<String, ScheduleCreationSpec> builtScheduleSpecs = new HashMap<>();
for (Map.Entry<String, ScheduleCreationSpec> entry : scheduleSpecs.entrySet()) {
// If the ScheduleCreationSpec is really a builder, then build the ScheduleCreationSpec
if (entry.getValue() instanceof DefaultScheduleBuilder.ScheduleCreationBuilder) {
DefaultScheduleBuilder.ScheduleCreationBuilder builder = (DefaultScheduleBuilder.ScheduleCreationBuilder) entry.getValue();
builtScheduleSpecs.put(entry.getKey(), builder.build(namespace, appName, appVersion));
} else {
builtScheduleSpecs.put(entry.getKey(), entry.getValue());
}
}
return new DefaultApplicationSpecification(appName, appVersion, ProjectInfo.getVersion().toString(), description, configuration, artifactId, getDatasetModules(), getDatasetSpecs(), mapReduces, sparks, workflows, services, builtScheduleSpecs, workers, getPlugins());
}
use of io.cdap.cdap.api.artifact.ArtifactId in project cdap by caskdata.
the class SystemAppTask method run.
@Override
public void run(RunnableTaskContext context) throws Exception {
ArtifactId systemAppArtifactId = context.getArtifactId();
if (systemAppArtifactId == null) {
throw new IllegalArgumentException("Missing artifactId from the system app task request");
}
LOG.debug("Received system app task for artifact {}", systemAppArtifactId);
Injector injector = createInjector(cConf);
ArtifactRepository artifactRepository = injector.getInstance(ArtifactRepository.class);
Impersonator impersonator = injector.getInstance(Impersonator.class);
String systemAppNamespace = context.getNamespace();
Id.Artifact artifactId = Id.Artifact.from(Id.Namespace.from(systemAppNamespace), systemAppArtifactId.getName(), systemAppArtifactId.getVersion());
ArtifactLocalizerClient localizerClient = injector.getInstance(ArtifactLocalizerClient.class);
File artifactLocation = localizerClient.getUnpackedArtifactLocation(Artifacts.toProtoArtifactId(new NamespaceId(systemAppNamespace), systemAppArtifactId));
EntityImpersonator classLoaderImpersonator = new EntityImpersonator(artifactId.toEntityId(), impersonator);
try (CloseableClassLoader artifactClassLoader = artifactRepository.createArtifactClassLoader(new ArtifactDescriptor(artifactId.getNamespace().getId(), artifactId.toArtifactId(), Locations.toLocation(artifactLocation)), classLoaderImpersonator);
SystemAppTaskContext systemAppTaskContext = buildTaskSystemAppContext(injector, systemAppNamespace, systemAppArtifactId, artifactClassLoader)) {
RunnableTaskRequest taskRequest = context.getEmbeddedRequest();
String taskClassName = taskRequest.getClassName();
if (taskClassName == null) {
LOG.debug("No system app task to execute");
return;
}
LOG.debug("Requested to run system app task {}", taskClassName);
Class<?> clazz = artifactClassLoader.loadClass(taskClassName);
if (!(RunnableTask.class.isAssignableFrom(clazz))) {
throw new ClassCastException(String.format("%s is not a RunnableTask", taskClassName));
}
LOG.debug("Launching system app task {}", taskClassName);
RunnableTask runnableTask = (RunnableTask) injector.getInstance(clazz);
RunnableTaskContext runnableTaskContext = new RunnableTaskContext(taskRequest.getParam().getSimpleParam(), null, null, null, systemAppTaskContext) {
@Override
public void writeResult(byte[] data) throws IOException {
context.writeResult(data);
}
@Override
public void setTerminateOnComplete(boolean terminate) {
context.setTerminateOnComplete(terminate);
}
@Override
public boolean isTerminateOnComplete() {
return context.isTerminateOnComplete();
}
};
runnableTask.run(runnableTaskContext);
LOG.debug("System app task completed {}", taskClassName);
}
}
Aggregations