use of co.cask.cdap.internal.app.runtime.schedule.trigger.SatisfiableTrigger in project cdap by caskdata.
the class ProgramScheduleStoreDatasetTest method testDeleteScheduleByTriggeringProgram.
@Test
public void testDeleteScheduleByTriggeringProgram() throws Exception {
DatasetFramework dsFramework = getInjector().getInstance(DatasetFramework.class);
TransactionSystemClient txClient = getInjector().getInstance(TransactionSystemClient.class);
TransactionExecutorFactory txExecutorFactory = new DynamicTransactionExecutorFactory(txClient);
dsFramework.truncateInstance(Schedulers.STORE_DATASET_ID);
final ProgramScheduleStoreDataset store = dsFramework.getDataset(Schedulers.STORE_DATASET_ID, new HashMap<String, String>(), null);
Assert.assertNotNull(store);
TransactionExecutor txExecutor = txExecutorFactory.createExecutor(Collections.singleton((TransactionAware) store));
SatisfiableTrigger prog1Trigger = new ProgramStatusTrigger(PROG1_ID, ProgramStatus.COMPLETED, ProgramStatus.FAILED, ProgramStatus.KILLED);
SatisfiableTrigger prog2Trigger = new ProgramStatusTrigger(PROG2_ID, ProgramStatus.COMPLETED, ProgramStatus.FAILED, ProgramStatus.KILLED);
final ProgramSchedule sched1 = new ProgramSchedule("sched1", "a program status trigger", PROG3_ID, ImmutableMap.of("propper", "popper"), prog1Trigger, ImmutableList.<Constraint>of());
final ProgramSchedule sched2 = new ProgramSchedule("sched2", "a program status trigger", PROG3_ID, ImmutableMap.of("propper", "popper"), prog2Trigger, ImmutableList.<Constraint>of());
final ProgramSchedule schedOr = new ProgramSchedule("schedOr", "an OR trigger", PROG3_ID, ImmutableMap.of("propper", "popper"), new OrTrigger(new PartitionTrigger(DS1_ID, 1), prog1Trigger, new AndTrigger(new OrTrigger(prog1Trigger, prog2Trigger), new PartitionTrigger(DS2_ID, 1)), new OrTrigger(prog2Trigger)), ImmutableList.<Constraint>of());
final ProgramSchedule schedAnd = new ProgramSchedule("schedAnd", "an AND trigger", PROG3_ID, ImmutableMap.of("propper", "popper"), new AndTrigger(new PartitionTrigger(DS1_ID, 1), prog2Trigger, new AndTrigger(prog1Trigger, new PartitionTrigger(DS2_ID, 1))), ImmutableList.<Constraint>of());
txExecutor.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
store.addSchedules(ImmutableList.of(sched1, sched2, schedOr, schedAnd));
}
});
txExecutor.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
// ProgramStatus event for PROG1_ID should trigger only sched1, schedOr, schedAnd
Assert.assertEquals(ImmutableSet.of(sched1, schedOr, schedAnd), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.COMPLETED))));
// ProgramStatus event for PROG2_ID should trigger only sched2, schedOr, schedAnd
Assert.assertEquals(ImmutableSet.of(sched2, schedOr, schedAnd), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG2_ID, ProgramStatus.FAILED))));
}
});
// update or delete all schedules triggered by PROG1_ID
txExecutor.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
store.modifySchedulesTriggeredByDeletedProgram(PROG1_ID);
}
});
final ProgramSchedule schedOrNew = new ProgramSchedule("schedOr", "an OR trigger", PROG3_ID, ImmutableMap.of("propper", "popper"), new OrTrigger(new PartitionTrigger(DS1_ID, 1), new AndTrigger(prog2Trigger, new PartitionTrigger(DS2_ID, 1)), prog2Trigger), ImmutableList.of());
txExecutor.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
// ProgramStatus event for PROG1_ID should trigger no schedules after modifying schedules triggered by it
Assert.assertEquals(Collections.emptySet(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.COMPLETED))));
Assert.assertEquals(Collections.emptySet(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.FAILED))));
Assert.assertEquals(Collections.emptySet(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG1_ID, ProgramStatus.KILLED))));
// ProgramStatus event for PROG2_ID should trigger only sched2 and schedOrNew
Assert.assertEquals(ImmutableSet.of(sched2, schedOrNew), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG2_ID, ProgramStatus.FAILED))));
}
});
// update or delete all schedules triggered by PROG2_ID
txExecutor.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
store.modifySchedulesTriggeredByDeletedProgram(PROG2_ID);
}
});
final ProgramSchedule schedOrNew1 = new ProgramSchedule("schedOr", "an OR trigger", PROG3_ID, ImmutableMap.of("propper", "popper"), new PartitionTrigger(DS1_ID, 1), ImmutableList.of());
final Set<ProgramSchedule> ds1Schedules = new HashSet<>();
txExecutor.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
// ProgramStatus event for PROG2_ID should trigger no schedules after modifying schedules triggered by it
Assert.assertEquals(Collections.emptySet(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG2_ID, ProgramStatus.COMPLETED))));
Assert.assertEquals(Collections.emptySet(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG2_ID, ProgramStatus.FAILED))));
Assert.assertEquals(Collections.emptySet(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForProgramStatus(PROG2_ID, ProgramStatus.KILLED))));
// event for DS1 should trigger only schedOrNew1 since all other schedules are deleted
ds1Schedules.addAll(toScheduleSet(store.findSchedules(Schedulers.triggerKeyForPartition(DS1_ID))));
}
});
Assert.assertEquals(ImmutableSet.of(schedOrNew1), ds1Schedules);
}
use of co.cask.cdap.internal.app.runtime.schedule.trigger.SatisfiableTrigger in project cdap by caskdata.
the class ScheduleTaskRunner method launch.
public void launch(Job job) throws Exception {
ProgramSchedule schedule = job.getSchedule();
ProgramId programId = schedule.getProgramId();
Map<String, String> userArgs = Maps.newHashMap();
userArgs.putAll(schedule.getProperties());
userArgs.putAll(propertiesResolver.getUserProperties(Id.Program.fromEntityId(programId)));
Map<String, String> systemArgs = Maps.newHashMap();
systemArgs.putAll(propertiesResolver.getSystemProperties(Id.Program.fromEntityId(programId)));
// Let the triggers update the arguments first before setting the triggering schedule info
((SatisfiableTrigger) job.getSchedule().getTrigger()).updateLaunchArguments(job.getSchedule(), job.getNotifications(), userArgs, systemArgs);
TriggeringScheduleInfo triggeringScheduleInfo = getTriggeringScheduleInfo(job);
systemArgs.put(ProgramOptionConstants.TRIGGERING_SCHEDULE_INFO, GSON.toJson(triggeringScheduleInfo));
execute(programId, systemArgs, userArgs);
LOG.info("Successfully started program {} in schedule {}.", schedule.getProgramId(), schedule.getName());
}
use of co.cask.cdap.internal.app.runtime.schedule.trigger.SatisfiableTrigger in project cdap by caskdata.
the class TimeScheduler method getCronTriggerKeyMap.
/**
* @return A Map with cron expression as keys and corresponding trigger key as values.
* Trigger keys are created from program name, programType and scheduleName (and cron expression if the trigger
* in the schedule is a composite trigger) and TimeScheuler#PAUSED_NEW_TRIGGERS_GROUP
* if it exists in this group else returns the {@link TriggerKey} prepared with null which gets it with
* {@link Key#DEFAULT_GROUP}
* @throws org.quartz.SchedulerException
*/
private Map<String, TriggerKey> getCronTriggerKeyMap(ProgramSchedule schedule) throws org.quartz.SchedulerException {
ProgramId program = schedule.getProgramId();
SchedulableProgramType programType = program.getType().getSchedulableType();
co.cask.cdap.api.schedule.Trigger trigger = schedule.getTrigger();
Map<String, TriggerKey> cronTriggerKeyMap = new HashMap<>();
// Get a set of TimeTrigger if the schedule's trigger is a composite trigger
if (trigger instanceof AbstractSatisfiableCompositeTrigger) {
Set<SatisfiableTrigger> triggerSet = ((AbstractSatisfiableCompositeTrigger) trigger).getUnitTriggers().get(ProtoTrigger.Type.TIME);
if (triggerSet == null) {
return ImmutableMap.of();
}
for (SatisfiableTrigger timeTrigger : triggerSet) {
String cron = ((TimeTrigger) timeTrigger).getCronExpression();
String triggerName = AbstractTimeSchedulerService.getTriggerName(program, programType, schedule.getName(), cron);
cronTriggerKeyMap.put(cron, triggerKeyForName(triggerName));
}
return cronTriggerKeyMap;
}
// No need to include cron expression in trigger key if the trigger is not composite trigger
String triggerName = AbstractTimeSchedulerService.scheduleIdFor(program, programType, schedule.getName());
cronTriggerKeyMap.put(((TimeTrigger) schedule.getTrigger()).getCronExpression(), triggerKeyForName(triggerName));
return cronTriggerKeyMap;
}
use of co.cask.cdap.internal.app.runtime.schedule.trigger.SatisfiableTrigger in project cdap by caskdata.
the class ScheduleTaskRunner method getTriggeringScheduleInfo.
private TriggeringScheduleInfo getTriggeringScheduleInfo(Job job) {
TriggerInfoContext triggerInfoContext = new TriggerInfoContext(job, store);
SatisfiableTrigger trigger = ((SatisfiableTrigger) job.getSchedule().getTrigger());
List<TriggerInfo> triggerInfo = trigger.getTriggerInfos(triggerInfoContext);
ProgramSchedule schedule = job.getSchedule();
return new DefaultTriggeringScheduleInfo(schedule.getName(), schedule.getDescription(), triggerInfo, schedule.getProperties());
}
Aggregations