use of co.cask.cdap.internal.app.runtime.schedule.trigger.TimeTrigger in project cdap by caskdata.
the class CoreSchedulerServiceTest method addListDeleteSchedules.
@Test
public void addListDeleteSchedules() throws Exception {
// verify that list returns nothing
Assert.assertTrue(scheduler.listSchedules(APP1_ID).isEmpty());
Assert.assertTrue(scheduler.listSchedules(PROG1_ID).isEmpty());
// add a schedule for app1
ProgramSchedule tsched1 = new ProgramSchedule("tsched1", "one time schedule", PROG1_ID, ImmutableMap.of("prop1", "nn"), new TimeTrigger("* * ? * 1"), ImmutableList.<Constraint>of());
scheduler.addSchedule(tsched1);
Assert.assertEquals(tsched1, scheduler.getSchedule(TSCHED1_ID));
Assert.assertEquals(ImmutableList.of(tsched1), scheduler.listSchedules(APP1_ID));
Assert.assertEquals(ImmutableList.of(tsched1), scheduler.listSchedules(PROG1_ID));
// add three more schedules, one for the same program, one for the same app, one for another app
ProgramSchedule psched1 = new ProgramSchedule("psched1", "one partition schedule", PROG1_ID, ImmutableMap.of("prop3", "abc"), new PartitionTrigger(DS1_ID, 1), ImmutableList.<Constraint>of());
ProgramSchedule tsched11 = new ProgramSchedule("tsched11", "two times schedule", PROG11_ID, ImmutableMap.of("prop2", "xx"), new TimeTrigger("* * ? * 1,2"), ImmutableList.<Constraint>of());
ProgramSchedule psched2 = new ProgramSchedule("psched2", "two partition schedule", PROG2_ID, ImmutableMap.of("propper", "popper"), new PartitionTrigger(DS2_ID, 2), ImmutableList.<Constraint>of());
scheduler.addSchedules(ImmutableList.of(psched1, tsched11, psched2));
Assert.assertEquals(psched1, scheduler.getSchedule(PSCHED1_ID));
Assert.assertEquals(tsched11, scheduler.getSchedule(TSCHED11_ID));
Assert.assertEquals(psched2, scheduler.getSchedule(PSCHED2_ID));
// list by app and program
Assert.assertEquals(ImmutableList.of(psched1, tsched1), scheduler.listSchedules(PROG1_ID));
Assert.assertEquals(ImmutableList.of(tsched11), scheduler.listSchedules(PROG11_ID));
Assert.assertEquals(ImmutableList.of(psched2), scheduler.listSchedules(PROG2_ID));
Assert.assertEquals(ImmutableList.of(psched1, tsched1, tsched11), scheduler.listSchedules(APP1_ID));
Assert.assertEquals(ImmutableList.of(psched2), scheduler.listSchedules(APP2_ID));
// delete one schedule
scheduler.deleteSchedule(TSCHED1_ID);
verifyNotFound(scheduler, TSCHED1_ID);
Assert.assertEquals(ImmutableList.of(psched1), scheduler.listSchedules(PROG1_ID));
Assert.assertEquals(ImmutableList.of(tsched11), scheduler.listSchedules(PROG11_ID));
Assert.assertEquals(ImmutableList.of(psched2), scheduler.listSchedules(PROG2_ID));
Assert.assertEquals(ImmutableList.of(psched1, tsched11), scheduler.listSchedules(APP1_ID));
Assert.assertEquals(ImmutableList.of(psched2), scheduler.listSchedules(APP2_ID));
// attempt to delete it again along with another one that exists
try {
scheduler.deleteSchedules(ImmutableList.of(TSCHED1_ID, TSCHED11_ID));
Assert.fail("expected NotFoundException");
} catch (NotFoundException e) {
// expected
}
Assert.assertEquals(ImmutableList.of(psched1), scheduler.listSchedules(PROG1_ID));
Assert.assertEquals(ImmutableList.of(tsched11), scheduler.listSchedules(PROG11_ID));
Assert.assertEquals(ImmutableList.of(psched2), scheduler.listSchedules(PROG2_ID));
Assert.assertEquals(ImmutableList.of(psched1, tsched11), scheduler.listSchedules(APP1_ID));
Assert.assertEquals(ImmutableList.of(psched2), scheduler.listSchedules(APP2_ID));
// attempt to add it back together with a schedule that exists
try {
scheduler.addSchedules(ImmutableList.of(tsched1, tsched11));
Assert.fail("expected AlreadyExistsException");
} catch (AlreadyExistsException e) {
// expected
}
Assert.assertEquals(ImmutableList.of(psched1), scheduler.listSchedules(PROG1_ID));
Assert.assertEquals(ImmutableList.of(tsched11), scheduler.listSchedules(PROG11_ID));
Assert.assertEquals(ImmutableList.of(psched2), scheduler.listSchedules(PROG2_ID));
Assert.assertEquals(ImmutableList.of(psched1, tsched11), scheduler.listSchedules(APP1_ID));
Assert.assertEquals(ImmutableList.of(psched2), scheduler.listSchedules(APP2_ID));
// add it back, delete all schedules for one app
scheduler.addSchedule(tsched1);
scheduler.deleteSchedules(APP1_ID);
verifyNotFound(scheduler, TSCHED1_ID);
verifyNotFound(scheduler, PSCHED1_ID);
verifyNotFound(scheduler, TSCHED11_ID);
Assert.assertEquals(ImmutableList.of(), scheduler.listSchedules(PROG1_ID));
Assert.assertEquals(ImmutableList.of(), scheduler.listSchedules(PROG11_ID));
Assert.assertEquals(ImmutableList.of(psched2), scheduler.listSchedules(PROG2_ID));
Assert.assertEquals(ImmutableList.of(), scheduler.listSchedules(APP1_ID));
Assert.assertEquals(ImmutableList.of(psched2), scheduler.listSchedules(PROG2_ID));
}
use of co.cask.cdap.internal.app.runtime.schedule.trigger.TimeTrigger in project cdap by caskdata.
the class ProgramScheduleStoreDatasetTest method testFindSchedulesByEventAndUpdateSchedule.
@Test
public void testFindSchedulesByEventAndUpdateSchedule() throws Exception {
DatasetFramework dsFramework = getInjector().getInstance(DatasetFramework.class);
TransactionSystemClient txClient = getInjector().getInstance(TransactionSystemClient.class);
TransactionExecutorFactory txExecutorFactory = new DynamicTransactionExecutorFactory(txClient);
final ProgramScheduleStoreDataset store = dsFramework.getDataset(Schedulers.STORE_DATASET_ID, new HashMap<String, String>(), null);
Assert.assertNotNull(store);
TransactionExecutor txExecutor = txExecutorFactory.createExecutor(Collections.singleton((TransactionAware) store));
final ProgramSchedule sched11 = new ProgramSchedule("sched11", "one partition schedule", PROG1_ID, ImmutableMap.of("prop3", "abc"), new PartitionTrigger(DS1_ID, 1), ImmutableList.<Constraint>of());
final ProgramSchedule sched12 = new ProgramSchedule("sched12", "two partition schedule", PROG1_ID, ImmutableMap.of("propper", "popper"), new PartitionTrigger(DS2_ID, 2), ImmutableList.<Constraint>of());
final ProgramSchedule sched22 = new ProgramSchedule("sched22", "twentytwo partition schedule", PROG2_ID, ImmutableMap.of("nn", "4"), new PartitionTrigger(DS2_ID, 22), ImmutableList.<Constraint>of());
txExecutor.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
// event for DS1 or DS2 should trigger nothing. validate it returns an empty collection
Assert.assertTrue(store.findSchedules(Schedulers.triggerKeyForPartition(DS1_ID)).isEmpty());
Assert.assertTrue(store.findSchedules(Schedulers.triggerKeyForPartition(DS2_ID)).isEmpty());
}
});
txExecutor.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
store.addSchedules(ImmutableList.of(sched11, sched12, sched22));
}
});
txExecutor.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
// event for DS1 should trigger only sched11
Assert.assertEquals(ImmutableSet.of(sched11), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForPartition(DS1_ID))));
// event for DS2 triggers only sched12 and sched22
Assert.assertEquals(ImmutableSet.of(sched12, sched22), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForPartition(DS2_ID))));
}
});
final ProgramSchedule sched11New = new ProgramSchedule(sched11.getName(), "time schedule", PROG1_ID, ImmutableMap.of("timeprop", "time"), new TimeTrigger("* * * * *"), ImmutableList.<Constraint>of());
final ProgramSchedule sched12New = new ProgramSchedule(sched12.getName(), "one partition schedule", PROG1_ID, ImmutableMap.of("pp", "p"), new PartitionTrigger(DS1_ID, 2), ImmutableList.<Constraint>of());
final ProgramSchedule sched22New = new ProgramSchedule(sched22.getName(), "one streamsize schedule", PROG2_ID, ImmutableMap.of("ss", "s"), new StreamSizeTrigger(NS_ID.stream("stream"), 1), ImmutableList.<Constraint>of());
txExecutor.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
store.updateSchedule(sched11New);
store.updateSchedule(sched12New);
store.updateSchedule(sched22New);
}
});
txExecutor.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
// event for DS1 should trigger only sched12New after update
Assert.assertEquals(ImmutableSet.of(sched12New), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForPartition(DS1_ID))));
// event for DS2 triggers no schedule after update
Assert.assertEquals(ImmutableSet.<ProgramSchedule>of(), toScheduleSet(store.findSchedules(Schedulers.triggerKeyForPartition(DS2_ID))));
}
});
}
use of co.cask.cdap.internal.app.runtime.schedule.trigger.TimeTrigger in project cdap by caskdata.
the class Schedulers method toScheduleCreationSpec.
public static ScheduleCreationSpec toScheduleCreationSpec(NamespaceId deployNamespace, Schedule schedule, String programName, Map<String, String> properties) {
Trigger trigger;
if (schedule instanceof TimeSchedule) {
trigger = new TimeTrigger(((TimeSchedule) schedule).getCronEntry());
} else {
StreamSizeSchedule streamSizeSchedule = ((StreamSizeSchedule) schedule);
trigger = new StreamSizeTrigger(deployNamespace.stream(streamSizeSchedule.getStreamName()), streamSizeSchedule.getDataTriggerMB());
}
Integer maxConcurrentRuns = schedule.getRunConstraints().getMaxConcurrentRuns();
List<Constraint> constraints = maxConcurrentRuns == null ? ImmutableList.<Constraint>of() : ImmutableList.<Constraint>of(new ConcurrencyConstraint(maxConcurrentRuns));
return new ScheduleCreationSpec(schedule.getName(), schedule.getDescription(), programName, properties, trigger, constraints, Schedulers.JOB_QUEUE_TIMEOUT_MILLIS);
}
use of co.cask.cdap.internal.app.runtime.schedule.trigger.TimeTrigger in project cdap by caskdata.
the class Schedulers method toProgramSchedule.
public static ProgramSchedule toProgramSchedule(ApplicationId appId, ScheduleSpecification spec) {
Schedule schedule = spec.getSchedule();
ProgramType programType = ProgramType.valueOfSchedulableType(spec.getProgram().getProgramType());
ProgramId programId = appId.program(programType, spec.getProgram().getProgramName());
Trigger trigger;
if (schedule instanceof TimeSchedule) {
TimeSchedule timeSchedule = (TimeSchedule) schedule;
trigger = new TimeTrigger(timeSchedule.getCronEntry());
} else {
StreamSizeSchedule streamSchedule = (StreamSizeSchedule) schedule;
StreamId streamId = programId.getNamespaceId().stream(streamSchedule.getStreamName());
trigger = new StreamSizeTrigger(streamId, streamSchedule.getDataTriggerMB());
}
Integer maxConcurrentRuns = schedule.getRunConstraints().getMaxConcurrentRuns();
List<Constraint> constraints = maxConcurrentRuns == null ? ImmutableList.<Constraint>of() : ImmutableList.<Constraint>of(new ConcurrencyConstraint(maxConcurrentRuns));
return new ProgramSchedule(schedule.getName(), schedule.getDescription(), programId, spec.getProperties(), trigger, constraints);
}
use of co.cask.cdap.internal.app.runtime.schedule.trigger.TimeTrigger in project cdap by caskdata.
the class JobQueueDataset method isTriggerSatisfied.
private boolean isTriggerSatisfied(Trigger trigger, List<Notification> notifications) {
if (trigger instanceof TimeTrigger || trigger instanceof StreamSizeTrigger) {
// is initially created
return true;
}
if (trigger instanceof PartitionTrigger) {
PartitionTrigger partitionTrigger = (PartitionTrigger) trigger;
int numPartitions = 0;
for (Notification notification : notifications) {
String numPartitionsString = notification.getProperties().get("numPartitions");
numPartitions += Integer.parseInt(numPartitionsString);
}
return numPartitions >= partitionTrigger.getNumPartitions();
}
throw new IllegalArgumentException("Unknown trigger class: " + trigger.getClass());
}
Aggregations