use of io.cdap.cdap.api.schedule.SchedulableProgramType in project cdap by caskdata.
the class TimeScheduler method addProgramSchedule.
public void addProgramSchedule(ProgramSchedule schedule) throws AlreadyExistsException, SchedulerException {
// Verify every trigger does not exist first before adding any of them to Quartz scheduler
try {
Map<String, TriggerKey> cronTriggerKeyMap = getCronTriggerKeyMap(schedule);
for (TriggerKey triggerKey : cronTriggerKeyMap.values()) {
assertTriggerDoesNotExist(triggerKey);
}
ProgramId program = schedule.getProgramId();
SchedulableProgramType programType = program.getType().getSchedulableType();
JobDetail job = addJob(program, programType);
for (Map.Entry<String, TriggerKey> entry : cronTriggerKeyMap.entrySet()) {
scheduleJob(entry.getValue(), schedule.getName(), entry.getKey(), job);
}
} catch (org.quartz.SchedulerException e) {
throw new SchedulerException(e);
}
}
use of io.cdap.cdap.api.schedule.SchedulableProgramType in project cdap by caskdata.
the class TimeScheduler method getScheduledRuntime.
private List<ScheduledRuntime> getScheduledRuntime(ProgramId program, boolean previousRuntimeRequested) throws SchedulerException {
List<ScheduledRuntime> scheduledRuntimes = new ArrayList<>();
SchedulableProgramType schedulableType = program.getType().getSchedulableType();
if (schedulableType == null) {
throw new IllegalArgumentException("Program " + program + " cannot be scheduled");
}
try {
for (Trigger trigger : scheduler.getTriggersOfJob(jobKeyFor(program, schedulableType))) {
long time;
if (previousRuntimeRequested) {
if (trigger.getPreviousFireTime() == null) {
// previous fire time can be null for the triggers which are not yet fired
continue;
}
time = trigger.getPreviousFireTime().getTime();
} else {
// skip the trigger that is not enabled, since it cannot launch program as scheduled
if (scheduler.getTriggerState(trigger.getKey()) == Trigger.TriggerState.PAUSED) {
continue;
}
time = trigger.getNextFireTime().getTime();
}
ScheduledRuntime runtime = new ScheduledRuntime(trigger.getKey().toString(), time);
scheduledRuntimes.add(runtime);
}
} catch (org.quartz.SchedulerException e) {
throw new SchedulerException(e);
}
return scheduledRuntimes;
}
use of io.cdap.cdap.api.schedule.SchedulableProgramType in project cdap by caskdata.
the class TimeScheduler method getCronTriggerKeyMap.
/**
* @return A Map with cron expression as keys and corresponding trigger key as values.
* Trigger keys are created from program name, programType and scheduleName (and cron expression if the trigger
* in the schedule is a composite trigger) and TimeScheuler#PAUSED_NEW_TRIGGERS_GROUP
* if it exists in this group else returns the {@link TriggerKey} prepared with null which gets it with
* {@link Key#DEFAULT_GROUP}
* @throws org.quartz.SchedulerException
*/
private Map<String, TriggerKey> getCronTriggerKeyMap(ProgramSchedule schedule) throws org.quartz.SchedulerException {
ProgramId program = schedule.getProgramId();
SchedulableProgramType programType = program.getType().getSchedulableType();
io.cdap.cdap.api.schedule.Trigger trigger = schedule.getTrigger();
Map<String, TriggerKey> cronTriggerKeyMap = new HashMap<>();
// Get a set of TimeTrigger if the schedule's trigger is a composite trigger
if (trigger instanceof AbstractSatisfiableCompositeTrigger) {
Set<SatisfiableTrigger> triggerSet = ((AbstractSatisfiableCompositeTrigger) trigger).getUnitTriggers().get(ProtoTrigger.Type.TIME);
if (triggerSet == null) {
return ImmutableMap.of();
}
for (SatisfiableTrigger timeTrigger : triggerSet) {
String cron = ((TimeTrigger) timeTrigger).getCronExpression();
String triggerName = AbstractTimeSchedulerService.getTriggerName(program, programType, schedule.getName(), cron);
cronTriggerKeyMap.put(cron, triggerKeyForName(triggerName));
}
return cronTriggerKeyMap;
}
// No need to include cron expression in trigger key if the trigger is not composite trigger
String triggerName = AbstractTimeSchedulerService.scheduleIdFor(program, programType, schedule.getName());
cronTriggerKeyMap.put(((TimeTrigger) schedule.getTrigger()).getCronExpression(), triggerKeyForName(triggerName));
return cronTriggerKeyMap;
}
use of io.cdap.cdap.api.schedule.SchedulableProgramType in project cdap by caskdata.
the class WorkflowDriver method executeAction.
private void executeAction(WorkflowActionNode node, WorkflowToken token) throws Exception {
status.put(node.getNodeId(), node);
CountDownLatch executorTerminateLatch = new CountDownLatch(1);
ExecutorService executorService = createExecutor(1, executorTerminateLatch, "action-" + node.getNodeId() + "-%d");
try {
// Run the action in new thread
Future<?> future = executorService.submit(new Callable<Void>() {
@Override
public Void call() throws Exception {
SchedulableProgramType programType = node.getProgram().getProgramType();
String programName = node.getProgram().getProgramName();
String prettyProgramType = ProgramType.valueOf(programType.name()).getPrettyName();
ProgramWorkflowRunner programWorkflowRunner = workflowProgramRunnerFactory.getProgramWorkflowRunner(programType, token, node.getNodeId(), nodeStates);
// this should not happen, since null is only passed in from WorkflowDriver, only when calling configure
if (programWorkflowRunner == null) {
throw new UnsupportedOperationException("Operation not allowed.");
}
Runnable programRunner = programWorkflowRunner.create(programName);
LOG.info("Starting {} Program '{}' in workflow", prettyProgramType, programName);
programRunner.run();
LOG.info("{} Program '{}' in workflow completed", prettyProgramType, programName);
return null;
}
});
future.get();
} catch (Throwable t) {
Throwables.propagateIfPossible(t, Exception.class);
throw Throwables.propagate(t);
} finally {
executorService.shutdownNow();
executorTerminateLatch.await();
status.remove(node.getNodeId());
}
workflowStateWriter.setWorkflowToken(workflowRunId, token);
}
use of io.cdap.cdap.api.schedule.SchedulableProgramType in project cdap by caskdata.
the class DistributedWorkflowProgramRunner method setupLaunchConfig.
@Override
protected void setupLaunchConfig(ProgramLaunchConfig launchConfig, Program program, ProgramOptions options, CConfiguration cConf, Configuration hConf, File tempDir) throws IOException {
WorkflowSpecification spec = program.getApplicationSpecification().getWorkflows().get(program.getName());
List<ClassAcceptor> acceptors = new ArrayList<>();
acceptors.add(launchConfig.getClassAcceptor());
// Only interested in MapReduce and Spark nodes.
// This is because CUSTOM_ACTION types are running inside the driver
Set<SchedulableProgramType> runnerTypes = EnumSet.of(SchedulableProgramType.MAPREDUCE, SchedulableProgramType.SPARK);
Iterable<ScheduleProgramInfo> programInfos = spec.getNodeIdMap().values().stream().filter(WorkflowActionNode.class::isInstance).map(WorkflowActionNode.class::cast).map(WorkflowActionNode::getProgram).filter(programInfo -> runnerTypes.contains(programInfo.getProgramType()))::iterator;
// Can't use Stream.forEach as we want to preserve the IOException being thrown
for (ScheduleProgramInfo programInfo : programInfos) {
ProgramType programType = ProgramType.valueOfSchedulableType(programInfo.getProgramType());
ProgramRunner runner = programRunnerFactory.create(programType);
try {
if (runner instanceof DistributedProgramRunner) {
// Call setupLaunchConfig with the corresponding program.
// Need to constructs a new ProgramOptions with the scope extracted for the given program
ProgramId programId = program.getId().getParent().program(programType, programInfo.getProgramName());
Map<String, String> programUserArgs = RuntimeArguments.extractScope(programId.getType().getScope(), programId.getProgram(), options.getUserArguments().asMap());
ProgramOptions programOptions = new SimpleProgramOptions(programId, options.getArguments(), new BasicArguments(programUserArgs));
((DistributedProgramRunner) runner).setupLaunchConfig(launchConfig, Programs.create(cConf, program, programId, runner), programOptions, cConf, hConf, tempDir);
acceptors.add(launchConfig.getClassAcceptor());
}
} finally {
if (runner instanceof Closeable) {
Closeables.closeQuietly((Closeable) runner);
}
}
}
// Set the class acceptor
launchConfig.setClassAcceptor(new AndClassAcceptor(acceptors));
// Find out the default resources requirements based on the programs inside the workflow
// At least gives the Workflow driver 768 mb of container memory
Map<String, Resources> runnablesResources = Maps.transformValues(launchConfig.getRunnables(), this::getResources);
Resources defaultResources = maxResources(new Resources(768), findDriverResources(spec.getNodes(), runnablesResources));
// Clear and set the runnable for the workflow driver.
launchConfig.clearRunnables();
// Extract scoped runtime arguments that only meant for the workflow but not for child nodes
Map<String, String> runtimeArgs = RuntimeArguments.extractScope("task", "workflow", options.getUserArguments().asMap());
launchConfig.addRunnable(spec.getName(), new WorkflowTwillRunnable(spec.getName()), 1, runtimeArgs, defaultResources, 0);
}
Aggregations