use of io.cdap.cdap.api.app.ApplicationSpecification in project cdap by cdapio.
the class DistributedWorkflowProgramRunner method validateOptions.
@Override
protected void validateOptions(Program program, ProgramOptions options) {
super.validateOptions(program, options);
// Extract and verify parameters
ApplicationSpecification appSpec = program.getApplicationSpecification();
Preconditions.checkNotNull(appSpec, "Missing application specification.");
ProgramType processorType = program.getType();
Preconditions.checkNotNull(processorType, "Missing processor type.");
Preconditions.checkArgument(processorType == ProgramType.WORKFLOW, "Only WORKFLOW process type is supported.");
WorkflowSpecification spec = appSpec.getWorkflows().get(program.getName());
Preconditions.checkNotNull(spec, "Missing WorkflowSpecification for %s", program.getName());
for (WorkflowNode node : spec.getNodes()) {
if (node.getType().equals(WorkflowNodeType.ACTION)) {
SystemArguments.validateTransactionTimeout(options.getUserArguments().asMap(), cConf, "action", node.getNodeId());
}
}
}
use of io.cdap.cdap.api.app.ApplicationSpecification in project cdap by cdapio.
the class LineageAdmin method computeWorkflowInnerPrograms.
/**
* Compute the inner programs and program runs based on the program relations and add them to the collections.
*
* @param toVisitPrograms the collection of next to visit programs
* @param programWorkflowMap the program workflow run id map
* @param programRelations the program relations of the dataset
*/
private void computeWorkflowInnerPrograms(Set<ProgramId> toVisitPrograms, Map<ProgramRunId, ProgramRunId> programWorkflowMap, Set<Relation> programRelations) {
// Step 1 walk through the program relations, filter out the possible mapreduce and spark programs that
// could be in the workflow, and get the appSpec for the program, to determine what other programs
// are in the workflow
Map<ApplicationId, ApplicationSpecification> appSpecs = new HashMap<>();
Set<ProgramRunId> possibleInnerPrograms = new HashSet<>();
programRelations.forEach(relation -> {
ProgramType type = relation.getProgram().getType();
if (type.equals(ProgramType.MAPREDUCE) || type.equals(ProgramType.SPARK)) {
possibleInnerPrograms.add(relation.getProgramRunId());
appSpecs.computeIfAbsent(relation.getProgram().getParent(), store::getApplication);
}
});
// Step 2, get the run record for all the possible inner programs, the run record contains the
// workflow information, fetch the workflow id and add them to the map
Map<ProgramRunId, RunRecordDetail> runRecords = store.getRuns(possibleInnerPrograms);
Set<ProgramRunId> workflowRunIds = new HashSet<>();
runRecords.entrySet().stream().filter(e -> e.getValue() != null).forEach(entry -> {
ProgramRunId programRunId = entry.getKey();
RunRecordDetail runRecord = entry.getValue();
if (runRecord.getSystemArgs().containsKey(ProgramOptionConstants.WORKFLOW_RUN_ID)) {
ProgramRunId wfRunId = extractWorkflowRunId(programRunId, runRecord);
programWorkflowMap.put(programRunId, wfRunId);
workflowRunIds.add(wfRunId);
}
});
// Step 3, fetch run records of the workflow, the properties of the workflow run record has all
// the inner program run ids, compare them with the app spec to get the type of the program
runRecords = store.getRuns(workflowRunIds);
runRecords.entrySet().stream().filter(e -> e.getValue() != null).forEach(entry -> {
ProgramRunId programRunId = entry.getKey();
RunRecordDetail runRecord = entry.getValue();
extractAndAddInnerPrograms(toVisitPrograms, programWorkflowMap, appSpecs, programRunId, runRecord);
});
}
use of io.cdap.cdap.api.app.ApplicationSpecification in project cdap by cdapio.
the class CoreSchedulerServiceTest method testProgramEvents.
@Test
@Category(XSlowTests.class)
public void testProgramEvents() throws Exception {
// Deploy the app
deploy(AppWithMultipleSchedules.class, 200);
CConfiguration cConf = getInjector().getInstance(CConfiguration.class);
TopicId programEventTopic = NamespaceId.SYSTEM.topic(cConf.get(Constants.AppFabric.PROGRAM_STATUS_RECORD_EVENT_TOPIC));
ProgramStateWriter programStateWriter = new MessagingProgramStateWriter(cConf, messagingService);
// These notifications should not trigger the program
ProgramRunId anotherWorkflowRun = ANOTHER_WORKFLOW.run(RunIds.generate());
ArtifactId artifactId = ANOTHER_WORKFLOW.getNamespaceId().artifact("test", "1.0").toApiArtifactId();
ApplicationSpecification appSpec = new DefaultApplicationSpecification(AppWithMultipleSchedules.NAME, ApplicationId.DEFAULT_VERSION, ProjectInfo.getVersion().toString(), "desc", null, artifactId, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap());
ProgramDescriptor programDescriptor = new ProgramDescriptor(anotherWorkflowRun.getParent(), appSpec);
BasicArguments systemArgs = new BasicArguments(ImmutableMap.of(ProgramOptionConstants.SKIP_PROVISIONING, Boolean.TRUE.toString()));
ProgramOptions programOptions = new SimpleProgramOptions(anotherWorkflowRun.getParent(), systemArgs, new BasicArguments(), false);
programStateWriter.start(anotherWorkflowRun, programOptions, null, programDescriptor);
programStateWriter.running(anotherWorkflowRun, null);
long lastProcessed = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis());
programStateWriter.error(anotherWorkflowRun, null);
waitUntilProcessed(programEventTopic, lastProcessed);
ProgramRunId someWorkflowRun = SOME_WORKFLOW.run(RunIds.generate());
programDescriptor = new ProgramDescriptor(someWorkflowRun.getParent(), appSpec);
programStateWriter.start(someWorkflowRun, new SimpleProgramOptions(someWorkflowRun.getParent(), systemArgs, new BasicArguments()), null, programDescriptor);
programStateWriter.running(someWorkflowRun, null);
lastProcessed = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis());
programStateWriter.killed(someWorkflowRun);
waitUntilProcessed(programEventTopic, lastProcessed);
Assert.assertEquals(0, getRuns(TRIGGERED_WORKFLOW, ProgramRunStatus.ALL));
// Enable the schedule
scheduler.enableSchedule(APP_MULT_ID.schedule(AppWithMultipleSchedules.WORKFLOW_COMPLETED_SCHEDULE));
// Start a program with user arguments
startProgram(ANOTHER_WORKFLOW, ImmutableMap.of(AppWithMultipleSchedules.ANOTHER_RUNTIME_ARG_KEY, AppWithMultipleSchedules.ANOTHER_RUNTIME_ARG_VALUE), 200);
// Wait for a completed run record
waitForCompleteRuns(1, TRIGGERED_WORKFLOW);
assertProgramRuns(TRIGGERED_WORKFLOW, ProgramRunStatus.COMPLETED, 1);
RunRecord run = getProgramRuns(TRIGGERED_WORKFLOW, ProgramRunStatus.COMPLETED).get(0);
Map<String, List<WorkflowTokenDetail.NodeValueDetail>> tokenData = getWorkflowToken(TRIGGERED_WORKFLOW, run.getPid(), null, null).getTokenData();
// There should be 2 entries in tokenData
Assert.assertEquals(2, tokenData.size());
// The value of TRIGGERED_RUNTIME_ARG_KEY should be ANOTHER_RUNTIME_ARG_VALUE from the triggering workflow
Assert.assertEquals(AppWithMultipleSchedules.ANOTHER_RUNTIME_ARG_VALUE, tokenData.get(AppWithMultipleSchedules.TRIGGERED_RUNTIME_ARG_KEY).get(0).getValue());
// The value of TRIGGERED_TOKEN_KEY should be ANOTHER_TOKEN_VALUE from the triggering workflow
Assert.assertEquals(AppWithMultipleSchedules.ANOTHER_TOKEN_VALUE, tokenData.get(AppWithMultipleSchedules.TRIGGERED_TOKEN_KEY).get(0).getValue());
}
use of io.cdap.cdap.api.app.ApplicationSpecification in project cdap by cdapio.
the class MetadataSubscriberServiceTest method testProfileMetadata.
@Test
public void testProfileMetadata() throws Exception {
Injector injector = getInjector();
ApplicationSpecification appSpec = Specifications.from(new AppWithWorkflow());
ApplicationId appId = NamespaceId.DEFAULT.app(appSpec.getName());
ProgramId workflowId = appId.workflow("SampleWorkflow");
ScheduleId scheduleId = appId.schedule("tsched1");
// publish a creation of a schedule that will never exist
// this tests that such a message is eventually discarded
// note that for this test, we configure a fast retry strategy and a small number of retries
// therefore this will cost only a few seconds delay
publishBogusCreationEvent();
// get the mds should be empty property since we haven't started the MetadataSubscriberService
MetadataStorage mds = injector.getInstance(MetadataStorage.class);
Assert.assertEquals(Collections.emptyMap(), mds.read(new Read(workflowId.toMetadataEntity())).getProperties());
Assert.assertEquals(Collections.emptyMap(), mds.read(new Read(scheduleId.toMetadataEntity())).getProperties());
// add a app with workflow to app meta store
// note: since we bypass the app-fabric when adding this app, no ENTITY_CREATION message
// will be published for the app (it happens in app lifecycle service). Therefore this
// app must exist before assigning the profile for the namespace, otherwise the app's
// programs will not receive the profile metadata.
Store store = injector.getInstance(DefaultStore.class);
store.addApplication(appId, appSpec);
// set default namespace to use the profile, since now MetadataSubscriberService is not started,
// it should not affect the mds
PreferencesService preferencesService = injector.getInstance(PreferencesService.class);
preferencesService.setProperties(NamespaceId.DEFAULT, Collections.singletonMap(SystemArguments.PROFILE_NAME, ProfileId.NATIVE.getScopedName()));
// add a schedule to schedule store
ProgramScheduleService scheduleService = injector.getInstance(ProgramScheduleService.class);
scheduleService.add(new ProgramSchedule("tsched1", "one time schedule", workflowId, Collections.emptyMap(), new TimeTrigger("* * ? * 1"), ImmutableList.of()));
// add a new profile in default namespace
ProfileService profileService = injector.getInstance(ProfileService.class);
ProfileId myProfile = new ProfileId(NamespaceId.DEFAULT.getNamespace(), "MyProfile");
Profile profile1 = new Profile("MyProfile", Profile.NATIVE.getLabel(), Profile.NATIVE.getDescription(), Profile.NATIVE.getScope(), Profile.NATIVE.getProvisioner());
profileService.saveProfile(myProfile, profile1);
// add a second profile in default namespace
ProfileId myProfile2 = new ProfileId(NamespaceId.DEFAULT.getNamespace(), "MyProfile2");
Profile profile2 = new Profile("MyProfile2", Profile.NATIVE.getLabel(), Profile.NATIVE.getDescription(), Profile.NATIVE.getScope(), Profile.NATIVE.getProvisioner());
profileService.saveProfile(myProfile2, profile2);
try {
// Verify the workflow profile metadata is updated to default profile
Tasks.waitFor(ProfileId.NATIVE.getScopedName(), () -> getProfileProperty(mds, workflowId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// Verify the schedule profile metadata is updated to default profile
Tasks.waitFor(ProfileId.NATIVE.getScopedName(), () -> getProfileProperty(mds, scheduleId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// set default namespace to use my profile
preferencesService.setProperties(NamespaceId.DEFAULT, Collections.singletonMap(SystemArguments.PROFILE_NAME, "USER:MyProfile"));
// Verify the workflow profile metadata is updated to my profile
Tasks.waitFor(myProfile.getScopedName(), () -> getProfileProperty(mds, workflowId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// Verify the schedule profile metadata is updated to my profile
Tasks.waitFor(myProfile.getScopedName(), () -> getProfileProperty(mds, scheduleId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// set app level to use my profile 2
preferencesService.setProperties(appId, Collections.singletonMap(SystemArguments.PROFILE_NAME, "USER:MyProfile2"));
// set instance level to system profile
preferencesService.setProperties(Collections.singletonMap(SystemArguments.PROFILE_NAME, ProfileId.NATIVE.getScopedName()));
// Verify the workflow profile metadata is updated to MyProfile2 which is at app level
Tasks.waitFor(myProfile2.getScopedName(), () -> getProfileProperty(mds, workflowId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// Verify the schedule profile metadata is updated to MyProfile2 which is at app level
Tasks.waitFor(myProfile2.getScopedName(), () -> getProfileProperty(mds, scheduleId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// remove the preferences at instance level, should not affect the metadata
preferencesService.deleteProperties();
// Verify the workflow profile metadata is updated to default profile
Tasks.waitFor(myProfile2.getScopedName(), () -> getProfileProperty(mds, workflowId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// Verify the schedule profile metadata is updated to default profile
Tasks.waitFor(myProfile2.getScopedName(), () -> getProfileProperty(mds, scheduleId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// remove app level pref should let the programs/schedules use ns level pref
preferencesService.deleteProperties(appId);
// Verify the workflow profile metadata is updated to MyProfile
Tasks.waitFor(myProfile.getScopedName(), () -> getProfileProperty(mds, workflowId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// Verify the schedule profile metadata is updated to MyProfile
Tasks.waitFor(myProfile.getScopedName(), () -> getProfileProperty(mds, scheduleId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// remove ns level pref so no pref is there
preferencesService.deleteProperties(NamespaceId.DEFAULT);
// Verify the workflow profile metadata is updated to default profile
Tasks.waitFor(ProfileId.NATIVE.getScopedName(), () -> getProfileProperty(mds, workflowId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// Verify the schedule profile metadata is updated to default profile
Tasks.waitFor(ProfileId.NATIVE.getScopedName(), () -> getProfileProperty(mds, scheduleId), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
} finally {
// stop and clean up the store
preferencesService.deleteProperties(NamespaceId.DEFAULT);
preferencesService.deleteProperties();
preferencesService.deleteProperties(appId);
store.removeAll(NamespaceId.DEFAULT);
scheduleService.delete(scheduleId);
profileService.disableProfile(myProfile);
profileService.disableProfile(myProfile2);
profileService.deleteAllProfiles(myProfile.getNamespaceId());
mds.apply(new MetadataMutation.Drop(workflowId.toMetadataEntity()), MutationOptions.DEFAULT);
mds.apply(new MetadataMutation.Drop(scheduleId.toMetadataEntity()), MutationOptions.DEFAULT);
}
}
use of io.cdap.cdap.api.app.ApplicationSpecification in project cdap by cdapio.
the class ProgramExistenceVerifier method ensureExists.
@Override
public void ensureExists(ProgramId programId) throws ApplicationNotFoundException, ProgramNotFoundException {
ApplicationId appId = programId.getParent();
ApplicationSpecification appSpec = store.getApplication(appId);
if (appSpec == null) {
throw new ApplicationNotFoundException(appId);
}
ProgramType programType = programId.getType();
Set<String> programNames = null;
if (programType == ProgramType.MAPREDUCE && appSpec.getMapReduce() != null) {
programNames = appSpec.getMapReduce().keySet();
} else if (programType == ProgramType.WORKFLOW && appSpec.getWorkflows() != null) {
programNames = appSpec.getWorkflows().keySet();
} else if (programType == ProgramType.SERVICE && appSpec.getServices() != null) {
programNames = appSpec.getServices().keySet();
} else if (programType == ProgramType.SPARK && appSpec.getSpark() != null) {
programNames = appSpec.getSpark().keySet();
} else if (programType == ProgramType.WORKER && appSpec.getWorkers() != null) {
programNames = appSpec.getWorkers().keySet();
}
if (programNames != null) {
if (programNames.contains(programId.getProgram())) {
// is valid.
return;
}
}
throw new ProgramNotFoundException(programId);
}
Aggregations