use of io.cdap.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger in project cdap by caskdata.
the class DataPipelineTest method deployPipelineWithSchedule.
private WorkflowManager deployPipelineWithSchedule(String pipelineName, Engine engine, String triggeringPipelineName, ArgumentMapping key1Mapping, String expectedKey1Value, PluginPropertyMapping key2Mapping, String expectedKey2Value) throws Exception {
String tableName = "actionScheduleTable" + pipelineName + engine;
String sourceName = "macroActionWithScheduleInput-" + pipelineName + engine;
String sinkName = "macroActionWithScheduleOutput-" + pipelineName + engine;
String key1 = key1Mapping.getTarget();
String key2 = key2Mapping.getTarget();
ETLBatchConfig etlConfig = ETLBatchConfig.builder().addStage(new ETLStage("action1", MockAction.getPlugin(tableName, "row1", "column1", String.format("${%s}", key1)))).addStage(new ETLStage("action2", MockAction.getPlugin(tableName, "row2", "column2", String.format("${%s}", key2)))).addStage(new ETLStage("source", MockSource.getPlugin(sourceName))).addStage(new ETLStage("filter1", StringValueFilterTransform.getPlugin("name", String.format("${%s}", key1)))).addStage(new ETLStage("filter2", StringValueFilterTransform.getPlugin("name", String.format("${%s}", key2)))).addStage(new ETLStage("sink", MockSink.getPlugin(sinkName))).addConnection("action1", "action2").addConnection("action2", "source").addConnection("source", "filter1").addConnection("filter1", "filter2").addConnection("filter2", "sink").setEngine(engine).build();
AppRequest<ETLBatchConfig> appRequest = new AppRequest<>(APP_ARTIFACT, etlConfig);
ApplicationId appId = NamespaceId.DEFAULT.app(pipelineName);
ApplicationManager appManager = deployApplication(appId, appRequest);
// there should be only two programs - one workflow and one mapreduce/spark
Schema schema = Schema.recordOf("testRecord", Schema.Field.of("name", Schema.of(Schema.Type.STRING)));
// Use the expectedKey1Value and expectedKey2Value as values for two records, so that Only record "samuel"
StructuredRecord recordSamuel = StructuredRecord.builder(schema).set("name", "samuel").build();
StructuredRecord recordKey1Value = StructuredRecord.builder(schema).set("name", expectedKey1Value).build();
StructuredRecord recordKey2Value = StructuredRecord.builder(schema).set("name", expectedKey2Value).build();
// write one record to each source
DataSetManager<Table> inputManager = getDataset(sourceName);
MockSource.writeInput(inputManager, ImmutableList.of(recordSamuel, recordKey1Value, recordKey2Value));
String defaultNamespace = NamespaceId.DEFAULT.getNamespace();
// Use properties from the triggering pipeline as values for runtime argument key1, key2
TriggeringPropertyMapping propertyMapping = new TriggeringPropertyMapping(ImmutableList.of(key1Mapping), ImmutableList.of(key2Mapping));
ProgramStatusTrigger completeTrigger = new ProgramStatusTrigger(new WorkflowId(defaultNamespace, triggeringPipelineName, SmartWorkflow.NAME), ImmutableSet.of(ProgramStatus.COMPLETED));
ScheduleId scheduleId = appId.schedule("completeSchedule");
appManager.addSchedule(new ScheduleDetail(scheduleId.getNamespace(), scheduleId.getApplication(), scheduleId.getVersion(), scheduleId.getSchedule(), "", new ScheduleProgramInfo(SchedulableProgramType.WORKFLOW, SmartWorkflow.NAME), ImmutableMap.of(SmartWorkflow.TRIGGERING_PROPERTIES_MAPPING, GSON.toJson(propertyMapping)), completeTrigger, ImmutableList.of(), Schedulers.JOB_QUEUE_TIMEOUT_MILLIS, null, null));
appManager.enableSchedule(scheduleId);
return appManager.getWorkflowManager(SmartWorkflow.NAME);
}
use of io.cdap.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger in project cdap by caskdata.
the class ScheduleTaskRunnerTest method testRuntimeArgumentResolution.
@Test
public void testRuntimeArgumentResolution() throws Exception {
Injector injector = Guice.createInjector(new ConfigModule(), binder -> {
binder.bind(OwnerAdmin.class).to(NoOpOwnerAdmin.class);
binder.bind(NamespaceAdmin.class).to(InMemoryNamespaceAdmin.class);
binder.bind(NamespaceQueryAdmin.class).to(InMemoryNamespaceAdmin.class);
binder.bind(PreferencesFetcher.class).toInstance(new FakePreferencesFetcher(Collections.singletonMap("key", "should-be-overridden")));
});
PropertiesResolver propertiesResolver = injector.getInstance(PropertiesResolver.class);
ApplicationId appId = NamespaceId.DEFAULT.app("app");
ProgramSchedule programSchedule = new ProgramSchedule("schedule", "desc", appId.workflow("wf2"), Collections.singletonMap("key", "val"), new ProgramStatusTrigger(appId.workflow("wf1")), Collections.emptyList());
Map<String, String> userArgs = ScheduleTaskRunner.getUserArgs(programSchedule, propertiesResolver);
Assert.assertEquals("val", userArgs.get("key"));
}
use of io.cdap.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger in project cdap by caskdata.
the class ProgramScheduleStoreDatasetTest method testDeleteScheduleByTriggeringProgram.
@Test
public void testDeleteScheduleByTriggeringProgram() {
TransactionRunner transactionRunner = getTransactionRunner();
SatisfiableTrigger prog1Trigger = new ProgramStatusTrigger(PROG1_ID, ProgramStatus.COMPLETED, ProgramStatus.FAILED, ProgramStatus.KILLED);
SatisfiableTrigger prog2Trigger = new ProgramStatusTrigger(PROG2_ID, ProgramStatus.COMPLETED, ProgramStatus.FAILED, ProgramStatus.KILLED);
final ProgramSchedule sched1 = new ProgramSchedule("sched1", "a program status trigger", PROG3_ID, ImmutableMap.of("propper", "popper"), prog1Trigger, ImmutableList.<Constraint>of());
final ProgramSchedule sched2 = new ProgramSchedule("sched2", "a program status trigger", PROG3_ID, ImmutableMap.of("propper", "popper"), prog2Trigger, ImmutableList.<Constraint>of());
final ProgramSchedule schedOr = new ProgramSchedule("schedOr", "an OR trigger", PROG3_ID, ImmutableMap.of("propper", "popper"), new OrTrigger(new PartitionTrigger(DS1_ID, 1), prog1Trigger, new AndTrigger(new OrTrigger(prog1Trigger, prog2Trigger), new PartitionTrigger(DS2_ID, 1)), new OrTrigger(prog2Trigger)), ImmutableList.<Constraint>of());
final ProgramSchedule schedAnd = new ProgramSchedule("schedAnd", "an AND trigger", PROG3_ID, ImmutableMap.of("propper", "popper"), new AndTrigger(new PartitionTrigger(DS1_ID, 1), prog2Trigger, new AndTrigger(prog1Trigger, new PartitionTrigger(DS2_ID, 1))), ImmutableList.<Constraint>of());
TransactionRunners.run(transactionRunner, context -> {
ProgramScheduleStoreDataset store = Schedulers.getScheduleStore(context);
store.addSchedules(ImmutableList.of(sched1, sched2, schedOr, schedAnd));
});
TransactionRunners.run(transactionRunner, context -> {
ProgramScheduleStoreDataset store = Schedulers.getScheduleStore(context);
// ProgramStatus event for PROG1_ID should trigger only sched1, schedOr, schedAnd
Assert.assertEquals(ImmutableSet.of(sched1, schedOr, schedAnd), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.COMPLETED))));
// ProgramStatus event for PROG2_ID should trigger only sched2, schedOr, schedAnd
Assert.assertEquals(ImmutableSet.of(sched2, schedOr, schedAnd), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG2_ID, ProgramStatus.FAILED))));
});
// update or delete all schedules triggered by PROG1_ID
TransactionRunners.run(transactionRunner, context -> {
ProgramScheduleStoreDataset store = Schedulers.getScheduleStore(context);
store.modifySchedulesTriggeredByDeletedProgram(PROG1_ID);
});
final ProgramSchedule schedOrNew = new ProgramSchedule("schedOr", "an OR trigger", PROG3_ID, ImmutableMap.of("propper", "popper"), new OrTrigger(new PartitionTrigger(DS1_ID, 1), new AndTrigger(prog2Trigger, new PartitionTrigger(DS2_ID, 1)), prog2Trigger), ImmutableList.of());
TransactionRunners.run(transactionRunner, context -> {
ProgramScheduleStoreDataset store = Schedulers.getScheduleStore(context);
// ProgramStatus event for PROG1_ID should trigger no schedules after modifying schedules triggered by it
Assert.assertEquals(Collections.emptySet(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.COMPLETED))));
Assert.assertEquals(Collections.emptySet(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.FAILED))));
Assert.assertEquals(Collections.emptySet(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.KILLED))));
// ProgramStatus event for PROG2_ID should trigger only sched2 and schedOrNew
Assert.assertEquals(ImmutableSet.of(sched2, schedOrNew), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG2_ID, ProgramStatus.FAILED))));
});
// update or delete all schedules triggered by PROG2_ID
TransactionRunners.run(transactionRunner, context -> {
ProgramScheduleStoreDataset store = Schedulers.getScheduleStore(context);
store.modifySchedulesTriggeredByDeletedProgram(PROG2_ID);
});
final ProgramSchedule schedOrNew1 = new ProgramSchedule("schedOr", "an OR trigger", PROG3_ID, ImmutableMap.of("propper", "popper"), new PartitionTrigger(DS1_ID, 1), ImmutableList.of());
final Set<ProgramSchedule> ds1Schedules = new HashSet<>();
TransactionRunners.run(transactionRunner, context -> {
ProgramScheduleStoreDataset store = Schedulers.getScheduleStore(context);
// ProgramStatus event for PROG2_ID should trigger no schedules after modifying schedules triggered by it
Assert.assertEquals(Collections.emptySet(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG2_ID, ProgramStatus.COMPLETED))));
Assert.assertEquals(Collections.emptySet(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG2_ID, ProgramStatus.FAILED))));
Assert.assertEquals(Collections.emptySet(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG2_ID, ProgramStatus.KILLED))));
// event for DS1 should trigger only schedOrNew1 since all other schedules are deleted
ds1Schedules.addAll(toScheduleSet(store.findSchedules(Schedulers.triggerKeyForPartition(DS1_ID))));
});
Assert.assertEquals(ImmutableSet.of(schedOrNew1), ds1Schedules);
}
use of io.cdap.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger in project cdap by caskdata.
the class ConfiguratorTest method testAppWithConfig.
@Test
public void testAppWithConfig() throws Exception {
LocationFactory locationFactory = new LocalLocationFactory(TMP_FOLDER.newFolder());
Location appJar = AppJarHelper.createDeploymentJar(locationFactory, ConfigTestApp.class);
Id.Artifact artifactId = Id.Artifact.from(Id.Namespace.DEFAULT, ConfigTestApp.class.getSimpleName(), "1.0.0");
CConfiguration cConf = CConfiguration.create();
ArtifactRepository baseArtifactRepo = new DefaultArtifactRepository(conf, null, null, null, new DummyProgramRunnerFactory(), new DefaultImpersonator(cConf, null));
ArtifactRepository artifactRepo = new AuthorizationArtifactRepository(baseArtifactRepo, authEnforcer, authenticationContext);
PluginFinder pluginFinder = new LocalPluginFinder(artifactRepo);
ConfigTestApp.ConfigClass config = new ConfigTestApp.ConfigClass("myTable");
AppDeploymentInfo appDeploymentInfo = new AppDeploymentInfo(artifactId.toEntityId(), appJar, NamespaceId.DEFAULT, new ApplicationClass(ConfigTestApp.class.getName(), "", null), null, null, new Gson().toJson(config));
// Create a configurator that is testable. Provide it an application.
Configurator configurator = new InMemoryConfigurator(conf, pluginFinder, new DefaultImpersonator(cConf, null), artifactRepo, null, appDeploymentInfo);
ListenableFuture<ConfigResponse> result = configurator.config();
ConfigResponse response = result.get(10, TimeUnit.SECONDS);
Assert.assertNotNull(response);
AppSpecInfo appSpecInfo = response.getAppSpecInfo();
if (appSpecInfo == null) {
throw new IllegalStateException("Failed to deploy application");
}
ApplicationSpecification specification = appSpecInfo.getAppSpec();
Assert.assertNotNull(specification);
Assert.assertEquals(1, specification.getDatasets().size());
Assert.assertTrue(specification.getDatasets().containsKey("myTable"));
// Create a deployment info without the app configuration
appDeploymentInfo = new AppDeploymentInfo(artifactId.toEntityId(), appJar, NamespaceId.DEFAULT, new ApplicationClass(ConfigTestApp.class.getName(), "", null), null, null, null);
Configurator configuratorWithoutConfig = new InMemoryConfigurator(conf, pluginFinder, new DefaultImpersonator(cConf, null), artifactRepo, null, appDeploymentInfo);
result = configuratorWithoutConfig.config();
response = result.get(10, TimeUnit.SECONDS);
Assert.assertNotNull(response);
appSpecInfo = response.getAppSpecInfo();
if (appSpecInfo == null) {
throw new IllegalStateException("Failed to deploy application");
}
specification = appSpecInfo.getAppSpec();
Assert.assertNotNull(specification);
Assert.assertEquals(1, specification.getDatasets().size());
Assert.assertTrue(specification.getDatasets().containsKey(ConfigTestApp.DEFAULT_TABLE));
Assert.assertNotNull(specification.getProgramSchedules().get(ConfigTestApp.SCHEDULE_NAME));
ProgramStatusTrigger trigger = (ProgramStatusTrigger) specification.getProgramSchedules().get(ConfigTestApp.SCHEDULE_NAME).getTrigger();
Assert.assertEquals(trigger.getProgramId().getProgram(), ConfigTestApp.WORKFLOW_NAME);
}
use of io.cdap.cdap.internal.app.runtime.schedule.trigger.ProgramStatusTrigger in project cdap by caskdata.
the class ProgramScheduleStoreDataset method modifySchedulesTriggeredByDeletedProgram.
/**
* Update all schedules that can be triggered by the given deleted program. A schedule will be removed if
* the only {@link ProgramStatusTrigger} in it is triggered by the deleted program. Schedules with composite triggers
* will be updated if the composite trigger can still be satisfied after the program is deleted, otherwise the
* schedules will be deleted.
*
* @param programId the program id for which to delete the schedules
* @return the IDs of the schedules that were deleted
*/
public List<ProgramSchedule> modifySchedulesTriggeredByDeletedProgram(ProgramId programId) throws IOException {
long deleteTime = System.currentTimeMillis();
List<ProgramSchedule> deleted = new ArrayList<>();
Set<ProgramScheduleRecord> scheduleRecords = new HashSet<>();
for (ProgramStatus status : ProgramStatus.values()) {
scheduleRecords.addAll(findSchedules(Schedulers.triggerKeyForProgramStatus(programId, status)));
}
for (ProgramScheduleRecord scheduleRecord : scheduleRecords) {
ProgramSchedule schedule = scheduleRecord.getSchedule();
markScheduleAsDeleted(schedule.getScheduleId(), deleteTime);
triggerStore.deleteAll(Range.singleton(getScheduleKeys(schedule.getScheduleId())));
if (schedule.getTrigger() instanceof AbstractSatisfiableCompositeTrigger) {
// get the updated composite trigger by removing the program status trigger of the given program
Trigger updatedTrigger = ((AbstractSatisfiableCompositeTrigger) schedule.getTrigger()).getTriggerWithDeletedProgram(programId);
if (updatedTrigger == null) {
deleted.add(schedule);
continue;
}
// if the updated composite trigger is not null, add the schedule back with updated composite trigger
try {
addScheduleWithStatus(new ProgramSchedule(schedule.getName(), schedule.getDescription(), schedule.getProgramId(), schedule.getProperties(), updatedTrigger, schedule.getConstraints(), schedule.getTimeoutMillis()), scheduleRecord.getMeta().getStatus(), System.currentTimeMillis());
} catch (AlreadyExistsException e) {
// this should never happen
LOG.warn("Failed to add the schedule '{}' triggered by '{}' with updated trigger '{}', " + "skip adding this schedule.", schedule.getScheduleId(), programId, updatedTrigger, e);
}
} else {
deleted.add(schedule);
}
}
return deleted;
}
Aggregations